diff --git a/lib/parser/sequence.js b/lib/parser/sequence.js index b2a32a71..52ef19e1 100644 --- a/lib/parser/sequence.js +++ b/lib/parser/sequence.js @@ -1,6 +1,6 @@ var List = require('../utils/list'); var TYPE = require('../tokenizer').TYPE; -var WHITESPACE = TYPE.Whitespace; +var WHITESPACE = TYPE.WhiteSpace; var COMMENT = TYPE.Comment; module.exports = function readSequence(recognizer) { diff --git a/lib/syntax/atrule/supports.js b/lib/syntax/atrule/supports.js index dbeffa32..22d2874e 100644 --- a/lib/syntax/atrule/supports.js +++ b/lib/syntax/atrule/supports.js @@ -1,7 +1,7 @@ var List = require('../../utils/list'); var TYPE = require('../../tokenizer').TYPE; -var WHITESPACE = TYPE.Whitespace; +var WHITESPACE = TYPE.WhiteSpace; var COMMENT = TYPE.Comment; var IDENTIFIER = TYPE.Identifier; var LEFTPARENTHESIS = TYPE.LeftParenthesis; diff --git a/lib/syntax/node/Block.js b/lib/syntax/node/Block.js index dc38c30d..7181f0dd 100644 --- a/lib/syntax/node/Block.js +++ b/lib/syntax/node/Block.js @@ -1,7 +1,7 @@ var List = require('../../utils/list'); var TYPE = require('../../tokenizer').TYPE; -var WHITESPACE = TYPE.Whitespace; +var WHITESPACE = TYPE.WhiteSpace; var COMMENT = TYPE.Comment; var SEMICOLON = TYPE.Semicolon; var COMMERCIALAT = TYPE.CommercialAt; diff --git a/lib/syntax/node/DeclarationList.js b/lib/syntax/node/DeclarationList.js index f4f667e7..94007687 100644 --- a/lib/syntax/node/DeclarationList.js +++ b/lib/syntax/node/DeclarationList.js @@ -1,7 +1,7 @@ var List = require('../../utils/list'); var TYPE = require('../../tokenizer').TYPE; -var WHITESPACE = TYPE.Whitespace; +var WHITESPACE = TYPE.WhiteSpace; var COMMENT = TYPE.Comment; var SEMICOLON = TYPE.Semicolon; diff --git a/lib/syntax/node/MediaQuery.js b/lib/syntax/node/MediaQuery.js index b80adda7..0287913d 100644 --- a/lib/syntax/node/MediaQuery.js +++ b/lib/syntax/node/MediaQuery.js @@ -1,7 +1,7 @@ var List = require('../../utils/list'); var TYPE = require('../../tokenizer').TYPE; -var WHITESPACE = TYPE.Whitespace; +var WHITESPACE = TYPE.WhiteSpace; var COMMENT = TYPE.Comment; var IDENTIFIER = TYPE.Identifier; var LEFTPARENTHESIS = TYPE.LeftParenthesis; diff --git a/lib/syntax/node/Raw.js b/lib/syntax/node/Raw.js index f0d97af3..5b5c21f1 100644 --- a/lib/syntax/node/Raw.js +++ b/lib/syntax/node/Raw.js @@ -1,6 +1,6 @@ var TYPE = require('../../tokenizer').TYPE; -var WHITESPACE = TYPE.Whitespace; +var WHITESPACE = TYPE.WhiteSpace; var LEFTPARENTHESIS = TYPE.LeftParenthesis; var RIGHTPARENTHESIS = TYPE.RightParenthesis; var LEFTCURLYBRACKET = TYPE.LeftCurlyBracket; diff --git a/lib/syntax/node/StyleSheet.js b/lib/syntax/node/StyleSheet.js index a6652582..f08a6f01 100644 --- a/lib/syntax/node/StyleSheet.js +++ b/lib/syntax/node/StyleSheet.js @@ -1,7 +1,7 @@ var List = require('../../utils/list'); var TYPE = require('../../tokenizer').TYPE; -var WHITESPACE = TYPE.Whitespace; +var WHITESPACE = TYPE.WhiteSpace; var COMMENT = TYPE.Comment; var EXCLAMATIONMARK = TYPE.ExclamationMark; var COMMERCIALAT = TYPE.CommercialAt; diff --git a/lib/syntax/node/Value.js b/lib/syntax/node/Value.js index 5883cd82..d249cb77 100644 --- a/lib/syntax/node/Value.js +++ b/lib/syntax/node/Value.js @@ -1,7 +1,7 @@ var endsWith = require('../../tokenizer').endsWith; var TYPE = require('../../tokenizer').TYPE; -var WHITESPACE = TYPE.Whitespace; +var WHITESPACE = TYPE.WhiteSpace; var COMMENT = TYPE.Comment; var LEFTPARENTHESIS = TYPE.LeftParenthesis; var COLON = TYPE.Colon; diff --git a/lib/syntax/node/WhiteSpace.js b/lib/syntax/node/WhiteSpace.js index 4e378218..3d21552a 100644 --- a/lib/syntax/node/WhiteSpace.js +++ b/lib/syntax/node/WhiteSpace.js @@ -1,4 +1,4 @@ -var WHITESPACE = require('../../tokenizer').TYPE.Whitespace; +var WHITESPACE = require('../../tokenizer').TYPE.WhiteSpace; var SPACE = Object.freeze({ type: 'WhiteSpace', loc: null, diff --git a/lib/tokenizer/Tokenizer.js b/lib/tokenizer/Tokenizer.js index 21870e31..12754625 100644 --- a/lib/tokenizer/Tokenizer.js +++ b/lib/tokenizer/Tokenizer.js @@ -13,14 +13,14 @@ var firstCharOffset = utils.firstCharOffset; var cmpStr = utils.cmpStr; var isNumber = utils.isNumber; var findLastNonSpaceLocation = utils.findLastNonSpaceLocation; -var findWhitespaceEnd = utils.findWhitespaceEnd; +var findWhiteSpaceEnd = utils.findWhiteSpaceEnd; var findCommentEnd = utils.findCommentEnd; var findStringEnd = utils.findStringEnd; var findNumberEnd = utils.findNumberEnd; var findIdentifierEnd = utils.findIdentifierEnd; var NULL = 0; -var WHITESPACE = TYPE.Whitespace; +var WHITESPACE = TYPE.WhiteSpace; var IDENTIFIER = TYPE.Identifier; var NUMBER = TYPE.Number; var STRING = TYPE.String; @@ -102,7 +102,7 @@ function tokenLayout(tokenizer, source, startPos) { switch (type) { case WHITESPACE: - offset = findWhitespaceEnd(source, offset + 1); + offset = findWhiteSpaceEnd(source, offset + 1); break; case PUNCTUATOR: diff --git a/lib/tokenizer/const.js b/lib/tokenizer/const.js index 0a5309ee..291129ba 100644 --- a/lib/tokenizer/const.js +++ b/lib/tokenizer/const.js @@ -17,7 +17,7 @@ var R = 13; var SPACE = 32; var TYPE = { - Whitespace: WHITESPACE, + WhiteSpace: WHITESPACE, Identifier: IDENTIFIER, Number: NUMBER, String: STRING, diff --git a/lib/tokenizer/utils.js b/lib/tokenizer/utils.js index 9ec29dc3..25de1efe 100644 --- a/lib/tokenizer/utils.js +++ b/lib/tokenizer/utils.js @@ -102,7 +102,7 @@ function findLastNonSpaceLocation(scanner) { return scanner.getLocation(i + 1); } -function findWhitespaceEnd(source, offset) { +function findWhiteSpaceEnd(source, offset) { for (; offset < source.length; offset++) { var code = source.charCodeAt(offset); @@ -235,7 +235,7 @@ module.exports = { endsWith: endsWith, findLastNonSpaceLocation: findLastNonSpaceLocation, - findWhitespaceEnd: findWhitespaceEnd, + findWhiteSpaceEnd: findWhiteSpaceEnd, findCommentEnd: findCommentEnd, findStringEnd: findStringEnd, findDecimalNumberEnd: findDecimalNumberEnd, diff --git a/test/tokenizer.js b/test/tokenizer.js index 3ae192e6..155157c0 100644 --- a/test/tokenizer.js +++ b/test/tokenizer.js @@ -6,12 +6,12 @@ describe('parser/tokenizer', function() { var tokens = [ { offset: 0, type: 'FullStop' }, { offset: 1, type: 'Identifier' }, - { offset: 5, type: 'Whitespace' }, + { offset: 5, type: 'WhiteSpace' }, { offset: 6, type: 'LeftCurlyBracket' }, - { offset: 7, type: 'Whitespace' }, + { offset: 7, type: 'WhiteSpace' }, { offset: 10, type: 'Identifier' }, { offset: 14, type: 'Colon' }, - { offset: 15, type: 'Whitespace' }, + { offset: 15, type: 'WhiteSpace' }, { offset: 16, type: 'Identifier' }, { offset: 19, type: 'LeftParenthesis' }, { offset: 20, type: 'Identifier' }, @@ -21,7 +21,7 @@ describe('parser/tokenizer', function() { { offset: 28, type: 'Identifier' }, { offset: 31, type: 'RightParenthesis' }, { offset: 32, type: 'Semicolon' }, - { offset: 33, type: 'Whitespace' }, + { offset: 33, type: 'WhiteSpace' }, { offset: 34, type: 'RightCurlyBracket' } ]; var types = tokens.map(function(token) {