Skip to content

Commit

Permalink
(puppetlabsGH-6) Correctly tokenize chaining arrows
Browse files Browse the repository at this point in the history
Previously chaining arrows were not accounted for, which broke tokenization
after them.  This commit adds the correct patterns for the arrows.  The class
regex was changed as using `^` is incorrect, it should really be looking for
word boundaries.  This commit also adds tests for the chaining arrows.
  • Loading branch information
glennsarti committed Oct 25, 2018
1 parent 835547c commit 251d103
Show file tree
Hide file tree
Showing 6 changed files with 89 additions and 6 deletions.
10 changes: 9 additions & 1 deletion generated-syntaxes/puppet.tmLanguage.atom.cson
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
'name': 'comment.block.puppet'
}
{
'begin': '^\\s*(node|class)\\s+((?:[-_A-Za-z0-9"\'.]+::)*[-_A-Za-z0-9"\'.]+)\\s*'
'begin': '\\b(node|class)\\s+((?:[-_A-Za-z0-9"\'.]+::)*[-_A-Za-z0-9"\'.]+)\\s*'
'captures':
'1':
'name': 'storage.type.puppet'
Expand Down Expand Up @@ -228,6 +228,14 @@
'match': '=>'
'name': 'punctuation.separator.key-value.puppet'
}
{
'match': '->'
'name': 'keyword.control.orderarrow.puppet'
}
{
'match': '~>'
'name': 'keyword.control.notifyarrow.puppet'
}
]
'repository':
'constants':
Expand Down
10 changes: 9 additions & 1 deletion generated-syntaxes/puppet.tmLanguage.cson
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ patterns: [
name: "comment.block.puppet"
}
{
begin: "^\\s*(node|class)\\s+((?:[-_A-Za-z0-9\"\\'.]+::)*[-_A-Za-z0-9\"\\'.]+)\\s*"
begin: "\\b(node|class)\\s+((?:[-_A-Za-z0-9\"\\'.]+::)*[-_A-Za-z0-9\"\\'.]+)\\s*"
captures:
"1":
name: "storage.type.puppet"
Expand Down Expand Up @@ -228,6 +228,14 @@ patterns: [
match: "=>"
name: "punctuation.separator.key-value.puppet"
}
{
match: "->"
name: "keyword.control.orderarrow.puppet"
}
{
match: "~>"
name: "keyword.control.notifyarrow.puppet"
}
]
repository:
constants:
Expand Down
10 changes: 9 additions & 1 deletion generated-syntaxes/puppet.tmLanguage.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
"name": "comment.block.puppet"
},
{
"begin": "^\\s*(node|class)\\s+((?:[-_A-Za-z0-9\"\\'.]+::)*[-_A-Za-z0-9\"\\'.]+)\\s*",
"begin": "\\b(node|class)\\s+((?:[-_A-Za-z0-9\"\\'.]+::)*[-_A-Za-z0-9\"\\'.]+)\\s*",
"captures": {
"1": {
"name": "storage.type.puppet"
Expand Down Expand Up @@ -271,6 +271,14 @@
{
"match": "=>",
"name": "punctuation.separator.key-value.puppet"
},
{
"match": "->",
"name": "keyword.control.orderarrow.puppet"
},
{
"match": "~>",
"name": "keyword.control.notifyarrow.puppet"
}
],
"repository": {
Expand Down
6 changes: 5 additions & 1 deletion generated-syntaxes/puppet.tmLanguage.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ patterns:
- begin: ^\s*/\*
end: \*/
name: comment.block.puppet
- begin: '^\s*(node|class)\s+((?:[-_A-Za-z0-9"\''.]+::)*[-_A-Za-z0-9"\''.]+)\s*'
- begin: '\b(node|class)\s+((?:[-_A-Za-z0-9"\''.]+::)*[-_A-Za-z0-9"\''.]+)\s*'
captures:
'1':
name: storage.type.puppet
Expand Down Expand Up @@ -150,6 +150,10 @@ patterns:
name: support.function.puppet
- match: =>
name: punctuation.separator.key-value.puppet
- match: '->'
name: keyword.control.orderarrow.puppet
- match: ~>
name: keyword.control.notifyarrow.puppet
repository:
constants:
patterns:
Expand Down
14 changes: 13 additions & 1 deletion syntaxes/puppet.tmLanguage
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
</dict>
<dict>
<key>begin</key>
<string>^\s*(node|class)\s+((?:[-_A-Za-z0-9"\'.]+::)*[-_A-Za-z0-9"\'.]+)\s*</string>
<string>\b(node|class)\s+((?:[-_A-Za-z0-9"\'.]+::)*[-_A-Za-z0-9"\'.]+)\s*</string>
<key>captures</key>
<dict>
<key>1</key>
Expand Down Expand Up @@ -431,6 +431,18 @@
<key>name</key>
<string>punctuation.separator.key-value.puppet</string>
</dict>
<dict>
<key>match</key>
<string>-></string>
<key>name</key>
<string>keyword.control.orderarrow.puppet</string>
</dict>
<dict>
<key>match</key>
<string>~></string>
<key>name</key>
<string>keyword.control.notifyarrow.puppet</string>
</dict>
</array>
<key>repository</key>
<dict>
Expand Down
45 changes: 44 additions & 1 deletion tests/syntaxes/puppet.tmLanguage.js
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,6 @@ describe('puppet.tmLanguage', function() {
});
});


describe('blocks', function() {
it("tokenizes single quoted node", function() {
var tokens = getLineTokens(grammar, "node 'hostname' {")
Expand Down Expand Up @@ -304,4 +303,48 @@ describe('puppet.tmLanguage', function() {
expect(tokens[0]).to.eql({value: 'package', scopes: ['source.puppet', 'meta.definition.resource.puppet', 'storage.type.puppet']});
});
});

describe('chaining arrows', function() {
var contexts = {
'ordering arrow': { 'text': '->', 'scope': 'keyword.control.orderarrow.puppet' },
'notifying arrow': { 'text': '~>', 'scope': 'keyword.control.notifyarrow.puppet' },
}

for(var contextName in contexts) {
context(contextName, function() {
var arrowText = contexts[contextName]['text'];
var arrowScope = contexts[contextName]['scope'];

it("tokenizes single line chaining", function() {
var tokens = getLineTokens(grammar, "Package['ntp'] ##ARROW## File['/etc/ntp.conf']".replace('##ARROW##', arrowText));
expect(tokens[7]).to.eql({value: arrowText, scopes: ['source.puppet'].concat(arrowScope)});
// Ensure that the trailing and leading resources are still tokenized correctly
expect(tokens[0]).to.eql({value: 'Package', scopes: ['source.puppet', 'storage.type.puppet']});
expect(tokens[9]).to.eql({value: 'File', scopes: ['source.puppet', 'storage.type.puppet']});
});

it("tokenizes single line chaining without whitespace", function() {
var tokens = getLineTokens(grammar, "Package['ntp']##ARROW##File['/etc/ntp.conf']".replace('##ARROW##', arrowText));
expect(tokens[6]).to.eql({value: arrowText, scopes: ['source.puppet'].concat(arrowScope)});
// Ensure that the trailing and leading resources are still tokenized correctly
expect(tokens[0]).to.eql({value: 'Package', scopes: ['source.puppet', 'storage.type.puppet']});
expect(tokens[7]).to.eql({value: 'File', scopes: ['source.puppet', 'storage.type.puppet']});
});

it("tokenizes multiline class at end chaining", function() {
var tokens = getLineTokens(grammar, "class a {\n} ##ARROW##\nclass b { }".replace('##ARROW##', arrowText));
expect(tokens[5]).to.eql({value: arrowText, scopes: ['source.puppet'].concat(arrowScope)});
// Ensure that the trailing class is still tokenized correctly
expect(tokens[7]).to.eql({value: 'class', scopes: ['source.puppet', 'meta.definition.class.puppet', 'storage.type.puppet']});
});

it("tokenizes multiline class at beginning chaining", function() {
var tokens = getLineTokens(grammar, "class a {\n}\n ##ARROW## class b { }".replace('##ARROW##', arrowText));
expect(tokens[5]).to.eql({value: arrowText, scopes: ['source.puppet'].concat(arrowScope)});
// Ensure that the trailing class is still tokenized correctly
expect(tokens[7]).to.eql({value: 'class', scopes: ['source.puppet', 'meta.definition.class.puppet', 'storage.type.puppet']});
});
});
};
});
});

0 comments on commit 251d103

Please sign in to comment.