Skip to content

Commit

Permalink
属性和()选择器中及互相嵌套的错误
Browse files Browse the repository at this point in the history
  • Loading branch information
army8735 committed May 13, 2015
1 parent aa0c7a1 commit c985af0
Show file tree
Hide file tree
Showing 10 changed files with 1,756 additions and 31 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "homunculus",
"version": "0.9.2",
"version": "0.9.3",
"description": "A lexer&parser by Javascript",
"maintainers": [
{
Expand Down
2 changes: 0 additions & 2 deletions src/lexer/CssLexer.js
Original file line number Diff line number Diff line change
Expand Up @@ -334,15 +334,13 @@ var CssLexer = Lexer.extend(function(rule) {
}
}
}
this.sel = false;
break;
case ')':
if(this.media || this.import || this.doc) {
this.value = true;
}
this.url = false;
this.parenthese = false;
this.sel = false;
this.var = false;
//)之后可能跟单位,比如margin:(1+2)px
this.number = true;
Expand Down
2 changes: 1 addition & 1 deletion src/lexer/rule/CssRule.js

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

34 changes: 24 additions & 10 deletions src/parser/css/Parser.js
Original file line number Diff line number Diff line change
Expand Up @@ -589,22 +589,17 @@ var Parser = IParser.extend(function(lexer) {
else {
var s = this.look.content().toLowerCase();
if(s == '[' && this.look.type() != Token.HACK) {
node.add(this.match());
while(this.look && this.look.content() != ']') {
node.add(this.match([Token.ATTR, Token.SIGN, Token.VARS, Token.NUMBER, Token.UNITS, Token.STRING]));
}
node.add(this.match(']'));
this.bracket1(node);
}
else {
node.add(this.match([Token.SELECTOR, Token.PSEUDO, Token.HACK, Token.VARS]));
}
while(this.look && [',', ';', '{', '}'].indexOf(this.look.content()) == -1) {
if(this.look.content() == '[' && this.look.type() != Token.HACK) {
node.add(this.match());
while(this.look && this.look.content() != ']') {
node.add(this.match([Token.ATTR, Token.SIGN, Token.VARS, Token.NUMBER, Token.UNITS, Token.STRING]));
}
node.add(this.match(']'));
this.bracket1(node);
}
else if(this.look.content() == '(') {
this.bracket2(node);
}
else {
node.add(this.match([Token.SELECTOR, Token.PSEUDO, Token.SIGN, Token.HACK, Token.VARS]));
Expand All @@ -613,6 +608,25 @@ var Parser = IParser.extend(function(lexer) {
}
return node;
},
bracket1: function(node) {
node.add(this.match());
while(this.look && [']', '(', ')'].indexOf(this.look.content()) == -1) {
node.add(this.match([Token.ATTR, Token.SIGN, Token.VARS, Token.NUMBER, Token.UNITS, Token.STRING]));
}
node.add(this.match(']'));
},
bracket2: function(node) {
node.add(this.match());
while(this.look && this.look.content() != ')') {
if(this.look.content() == '[') {
this.bracket1(node);
}
if(this.look && this.look.content() == ')') {
break;
}
node.add(this.match([Token.SELECTOR, Token.PSEUDO, Token.VARS, Token.NUMBER, Token.UNITS]));
}
},
block: function(kf) {
var node = new Node(Node.BLOCK);
node.add(this.match('{'));
Expand Down
32 changes: 28 additions & 4 deletions tests/csslexer.js
Original file line number Diff line number Diff line change
Expand Up @@ -163,8 +163,8 @@ describe('csslexer', function() {
it('pseudo', function() {
var lexer = homunculus.getLexer('css');
var tokens = lexer.parse('a:hover{}p:not(:first){}');
expect(join(tokens)).to.eql(['a', ':hover', '{', '}', 'p', ':not(:first)', '{', '}']);
expect(type(tokens)).to.eql([21, 19, 8, 8, 21, 19, 8, 8]);
expect(join(tokens)).to.eql(['a', ':hover', '{', '}', 'p', ':not', '(', ':first', ')', '{', '}']);
expect(type(tokens)).to.eql([21, 19, 8, 8, 21, 19, 8, 19, 8, 8, 8]);
});
it('pseudo at start', function() {
var lexer = homunculus.getLexer('css');
Expand All @@ -178,6 +178,30 @@ describe('csslexer', function() {
expect(join(tokens)).to.eql(['p', ',', ':root', '{', '}']);
expect(type(tokens)).to.eql([21, 8, 19, 8, 8]);
});
it('pseudo pseudo', function() {
var lexer = homunculus.getLexer('css');
var tokens = lexer.parse(':root:not(:first)');
expect(join(tokens)).to.eql([':root', ':not', '(', ':first', ')']);
expect(type(tokens)).to.eql([19, 19, 8, 19, 8]);
});
it('pseudo attr', function() {
var lexer = homunculus.getLexer('css');
var tokens = lexer.parse(':not([data="123"])');
expect(join(tokens)).to.eql([':not', '(', '[', 'data', '=', '"123"', ']', ')']);
expect(type(tokens)).to.eql([19, 8, 8, 22, 8, 7, 8, 8]);
});
it('pseudo before attr', function() {
var lexer = homunculus.getLexer('css');
var tokens = lexer.parse(':not([data="123"])[disabled]');
expect(join(tokens)).to.eql([':not', '(', '[', 'data', '=', '"123"', ']', ')', '[', 'disabled', ']']);
expect(type(tokens)).to.eql([19, 8, 8, 22, 8, 7, 8, 8, 8, 22, 8]);
});
it('pseudo after attr', function() {
var lexer = homunculus.getLexer('css');
var tokens = lexer.parse('[disabled]:not(:first)[checked]:not([data="123"])');
expect(join(tokens)).to.eql(['[', 'disabled', ']', ':not', '(', ':first', ')', '[', 'checked', ']', ':not', '(', '[', 'data', '=', '"123"', ']', ')']);
expect(type(tokens)).to.eql([8, 22, 8, 19, 8, 19, 8, 8, 22, 8, 19, 8, 8, 22, 8, 7, 8, 8]);
});
it('* and pseudo', function() {
var lexer = homunculus.getLexer('css');
var tokens = lexer.parse('*:first-child+html');
Expand All @@ -187,8 +211,8 @@ describe('csslexer', function() {
it('pseudo with ()', function() {
var lexer = homunculus.getLexer('css');
var tokens = lexer.parse('body:not(:-moz-handler-blocked)');
expect(join(tokens)).to.eql(['body', ':not(:-moz-handler-blocked)']);
expect(type(tokens)).to.eql([21, 19]);
expect(join(tokens)).to.eql(['body', ':not', '(', ':-moz-handler-blocked', ')']);
expect(type(tokens)).to.eql([21, 19, 8, 19, 8]);
});
it('attr 1', function() {
var lexer = homunculus.getLexer('css');
Expand Down
13 changes: 13 additions & 0 deletions tests/cssparser.js
Original file line number Diff line number Diff line change
Expand Up @@ -601,6 +601,11 @@ describe('cssparser', function() {
var node = parser.parse(':hover{margin:0}');
expect(tree(node)).to.eql([CssNode.SHEET,[CssNode.STYLESET,[CssNode.SELECTORS,[CssNode.SELECTOR,[":hover"]],CssNode.BLOCK,["{",CssNode.STYLE,[CssNode.KEY,["margin"],":",CssNode.VALUE,["0"]],"}"]]]]);
});
it('pseudo complex', function() {
var parser = homunculus.getParser('css');
var node = parser.parse('[disabled]:not(:first)[checked]:not([data="123"]){}');
expect(tree(node)).to.eql([CssNode.SHEET,[CssNode.STYLESET,[CssNode.SELECTORS,[CssNode.SELECTOR,["[","disabled","]",":not","(",":first",")","[","checked","]",":not","(","[","data","=","\"123\"","]",")"]],CssNode.BLOCK,["{","}"]]]]);
});
it('hack', function() {
var parser = homunculus.getParser('css');
var node = parser.parse('p{[;width:0;];}');
Expand Down Expand Up @@ -1406,6 +1411,14 @@ describe('cssparser', function() {
var str = jion(node, ignore);
expect(str).to.eql(code);
});
it('pure', function() {
var parser = homunculus.getParser('css');
var code = fs.readFileSync(path.join(__dirname, './lib/pure.css'), { encoding: 'utf-8' });
var node = parser.parse(code);
var ignore = parser.ignore();
var str = jion(node, ignore);
expect(str).to.eql(code);
});
});
describe('other test', function() {
it('node #parent,#prev,#next', function() {
Expand Down
Loading

0 comments on commit c985af0

Please sign in to comment.