diff --git a/Makefile b/Makefile index 8ed5e88..4ea072e 100644 --- a/Makefile +++ b/Makefile @@ -9,6 +9,7 @@ test-css: test-csslexer test-cssparser test-html: test-htmllexer test-htmlparser test-jsx: test-jsxlexer test-jsxlexer2 test-jsxparser test-jsxparser2 test-csx: test-csxlexer test-csxlexer2 test-csxparser test-csxparser2 +test-axml: test-axmllexer test-axmlparser test-api: @mocha tests/api.js -R spec @@ -61,6 +62,12 @@ test-csxparser: test-csxparser2: @mocha tests/csxparser2.js -R spec +test-axmllexer: + @mocha tests/axmllexer.js -R spec + +test-axmlparser: + @mocha tests/axmlparser.js -R spec + test-walk: @mocha tests/walk.js -R spec @@ -68,4 +75,4 @@ coveralls: @mocha tests/api.js tests/jslexer.js tests/jsparser.js tests/jscontext.js tests/es6parser.js tests/csslexer.js tests/cssparser.js tests/htmllexer.js tests/htmlparser.js tests/jsxlexer.js tests/jsxlexer2.js tests/jsxparser.js tests/jsxparser2 tests/walk.js --require blanket --reporter mocha-lcov-reporter | ./node_modules/coveralls/bin/coveralls.js test-cov: - @mocha tests/api.js tests/jslexer.js tests/jsparser.js tests/jscontext.js tests/es6parser.js tests/csslexer.js tests/cssparser.js tests/htmllexer.js tests/htmlparser.js tests/jsxlexer.js tests/jsxlexer2.js tests/jsxparser.js tests/jsxparser2 tests/walk.js --require blanket -R html-cov > tests/covrage.html \ No newline at end of file + @mocha tests/api.js tests/jslexer.js tests/jsparser.js tests/jscontext.js tests/es6parser.js tests/csslexer.js tests/cssparser.js tests/htmllexer.js tests/htmlparser.js tests/jsxlexer.js tests/jsxlexer2.js tests/jsxparser.js tests/jsxparser2 tests/walk.js --require blanket -R html-cov > tests/covrage.html diff --git a/package.json b/package.json index 20b073c..8dc185e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "homunculus", - "version": "1.4.3", + "version": "1.5.0", "description": "A lexer&parser by Javascript", "maintainers": [ { @@ -19,16 +19,19 @@ "src/lexer/Lexer", "src/lexer/CssLexer", "src/lexer/HtmlLexer", + "src/lexer/AxmlLexer", "src/lexer/JSXLexer", "src/lexer/Token", "src/lexer/CssToken", "src/lexer/HtmlToken", + "src/lexer/AxmlToken", "src/lexer/JSXToken", "src/lexer/match", "src/lexer/rule/Rule", "src/lexer/rule/EcmascriptRule", "src/lexer/rule/CssRule", "src/lexer/rule/HtmlRule", + "src/lexer/rule/AxmlRule", "src/parser", "src/util/walk" ] @@ -47,7 +50,8 @@ "es5", "es6", "jsx", - "csx" + "csx", + "axml" ], "author": "army8735", "license": "MIT", diff --git a/tests/axmllexer.js b/tests/axmllexer.js new file mode 100644 index 0000000..2c2175a --- /dev/null +++ b/tests/axmllexer.js @@ -0,0 +1,139 @@ +var homunculus = require('../'); + +var expect = require('expect.js'); +var path = require('path'); +var fs = require('fs'); + +var Token = homunculus.getClass('token', 'axml'); + +function join(tokens) { + var arr = tokens.map(function(token) { + return token.content(); + }); + return arr; +} +function type(tokens, tag) { + var arr = tokens.map(function(token) { + return tag ? Token.type(token.type()) : token.type(); + }); + return arr; +} + +describe('htmllexer', function() { + describe('simple test', function() { + describe('commnet', function() { + it('normal', function () { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse(''); + expect(join(tokens)).to.eql(['']); + expect(type(tokens)).to.eql([6]); + }); + }); + describe('doctype', function() { + it('normal', function () { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse(''); + expect(join(tokens)).to.eql(['<', '!DOCTYPE', '>']); + expect(type(tokens)).to.eql([26, 27, 26]); + }); + it('ignore case', function () { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse(''); + expect(join(tokens)).to.eql(['<', '!doctype', '>']); + expect(type(tokens)).to.eql([26, 27, 26]); + }); + }); + describe('markdown', function() { + it('normal', function () { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse(''); + expect(join(tokens)).to.eql(['<', 'html', '>', '']); + expect(type(tokens)).to.eql([26, 24, 26, 26, 24, 26]); + }); + it('selfclose', function() { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse(''); + expect(join(tokens)).to.eql(['<', 'img', '/>']); + expect(type(tokens)).to.eql([26, 24, 26]); + }); + it('without />', function() { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse(''); + expect(join(tokens)).to.eql(['<', 'img', '>']); + expect(type(tokens)).to.eql([26, 24, 26]); + }); + it('custom mark', function() { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse(''); + expect(join(tokens)).to.eql(['<', 'custom', '>']); + expect(type(tokens)).to.eql([26, 24, 26]); + }); + it('attribute', function() { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse(''); + expect(join(tokens)).to.eql(['<', 'a', ' ', 'href', '=', '"#"', '>']); + expect(type(tokens)).to.eql([26, 24, 1, 15, 8, 7, 26]); + }); + it('custom attribute', function() { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse(''); + expect(join(tokens)).to.eql(['<', 'a', ' ', 'custom', '=', '1', '>']); + expect(type(tokens)).to.eql([26, 24, 1, 15, 8, 4, 26]); + }); + it('data attribute', function() { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse(''); + expect(join(tokens)).to.eql(['<', 'a', ' ', 'data-b', '=', '1', '>']); + expect(type(tokens)).to.eql([26, 24, 1, 15, 8, 4, 26]); + }); + it('attr without quote', function() { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse(''); + expect(join(tokens)).to.eql(['<', 'input', ' ', 'checked', '=', 'checked', '>']); + expect(type(tokens)).to.eql([26, 24, 1, 15, 8, 15, 26]); + }); + it('attr without value', function() { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse(''); + expect(join(tokens)).to.eql(['<', 'input', ' ', 'checked', '>']); + expect(type(tokens)).to.eql([26, 24, 1, 15, 26]); + }); + it('text', function() { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse('
text

'); + expect(join(tokens)).to.eql(['<', 'div', '>', 'text', '', ' ', '<', 'p', '/>']); + expect(type(tokens)).to.eql([26, 24, 26, 25, 26, 24, 26, 25, 26, 24, 26]); + }); + it('error', function() { + var lexer = homunculus.getLexer('axml'); + expect(function() { + lexer.parse('

'); + }).to.throwError(); + }); + it('style with mark as text', function () { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse(''); + expect(join(tokens)).to.eql(['<', 'style', '>', '
', '']); + expect(type(tokens)).to.eql([26, 24, 26, 25, 26, 24, 26]); + }); + it('script no end', function () { + var lexer = homunculus.getLexer('axml'); + var tokens = lexer.parse('