Skip to content
This repository has been archived by the owner on Dec 15, 2022. It is now read-only.

Commit

Permalink
Merge pull request #133 from atom/wl-proper-brackets
Browse files Browse the repository at this point in the history
Use begin/end for brackets
  • Loading branch information
50Wliu committed Jul 17, 2017
2 parents ceccd06 + 6e3a0b7 commit c413bc9
Show file tree
Hide file tree
Showing 2 changed files with 93 additions and 57 deletions.
40 changes: 31 additions & 9 deletions grammars/go.cson
Original file line number Diff line number Diff line change
Expand Up @@ -280,16 +280,38 @@
'brackets':
'patterns': [
{
'match': '\\{|\\}'
'name': 'punctuation.other.bracket.curly.go'
'begin': '{'
'beginCaptures':
'0':
'name': 'punctuation.definition.begin.bracket.curly.go'
'end': '}'
'endCaptures':
'0':
'name': 'punctuation.definition.end.bracket.curly.go'
'patterns': [
{
'include': '$self'
}
]
}
{
'match': '\\(|\\)'
'name': 'punctuation.other.bracket.round.go'
'begin': '\\('
'beginCaptures':
'0':
'name': 'punctuation.definition.begin.bracket.round.go'
'end': '\\)'
'endCaptures':
'0':
'name': 'punctuation.definition.end.bracket.round.go'
'patterns': [
{
'include': '$self'
}
]
}
{
'match': '\\[|\\]'
'name': 'punctuation.other.bracket.square.go'
'name': 'punctuation.definition.bracket.square.go'
}
]
'comments':
Expand Down Expand Up @@ -348,11 +370,11 @@
'begin': '\\('
'beginCaptures':
'0':
'name': 'punctuation.other.bracket.round.go'
'name': 'punctuation.definition.imports.begin.bracket.round.go'
'end': '\\)'
'endCaptures':
'0':
'name': 'punctuation.other.bracket.round.go'
'name': 'punctuation.definition.imports.end.bracket.round.go'
'patterns': [
{
'include': '#comments'
Expand Down Expand Up @@ -584,11 +606,11 @@
'begin': '\\('
'beginCaptures':
'0':
'name': 'punctuation.other.bracket.round.go'
'name': 'punctuation.definition.variables.begin.bracket.round.go'
'end': '\\)'
'endCaptures':
'0':
'name': 'punctuation.other.bracket.round.go'
'name': 'punctuation.definition.variables.end.bracket.round.go'
'patterns': [
{
'include': '$self'
Expand Down
110 changes: 62 additions & 48 deletions spec/go-spec.coffee
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ describe 'Go grammar', ->
expect(lines[2][5]).toEqual value: '//', scopes: ['source.go', 'comment.line.double-slash.go', 'punctuation.definition.comment.go']
expect(lines[3][1]).toEqual value: '//', scopes: ['source.go', 'comment.line.double-slash.go', 'punctuation.definition.comment.go']


it 'tokenizes strings', ->
delims =
'string.quoted.double.go': '"'
Expand Down Expand Up @@ -222,7 +221,7 @@ describe 'Go grammar', ->

next = tokens[t.tokenPos + 1]
expect(next.value).toEqual '('
expect(next.scopes).toEqual ['source.go', 'punctuation.other.bracket.round.go']
expect(next.scopes).toEqual ['source.go', 'punctuation.definition.begin.bracket.round.go']

it 'only tokenizes func when it is an exact match', ->
tests = ['myfunc', 'funcMap']
Expand Down Expand Up @@ -263,7 +262,7 @@ describe 'Go grammar', ->

next = tokens[t.tokenPos + 1]
expect(next.value).toEqual '('
expect(next.scopes).toEqual ['source.go', 'punctuation.other.bracket.round.go']
expect(next.scopes).toEqual ['source.go', 'punctuation.definition.begin.bracket.round.go']

it 'tokenizes operators method declarations', ->
tests = [
Expand Down Expand Up @@ -353,16 +352,13 @@ describe 'Go grammar', ->
expect(tokens[0].scopes).toEqual ['source.go', scope]

it 'tokenizes punctuation brackets', ->
brackets =
'punctuation.other.bracket.square.go': [ '[', ']' ]
'punctuation.other.bracket.round.go': [ '(', ')' ]
'punctuation.other.bracket.curly.go': [ '{', '}' ]

for scope, brkts of brackets
for brkt in brkts
{tokens} = grammar.tokenizeLine brkt
expect(tokens[0].value).toEqual brkt
expect(tokens[0].scopes).toEqual ['source.go', scope]
{tokens} = grammar.tokenizeLine '{([])}'
expect(tokens[0]).toEqual value: '{', scopes: ['source.go', 'punctuation.definition.begin.bracket.curly.go']
expect(tokens[1]).toEqual value: '(', scopes: ['source.go', 'punctuation.definition.begin.bracket.round.go']
expect(tokens[2]).toEqual value: '[', scopes: ['source.go', 'punctuation.definition.bracket.square.go']
expect(tokens[3]).toEqual value: ']', scopes: ['source.go', 'punctuation.definition.bracket.square.go']
expect(tokens[4]).toEqual value: ')', scopes: ['source.go', 'punctuation.definition.end.bracket.round.go']
expect(tokens[5]).toEqual value: '}', scopes: ['source.go', 'punctuation.definition.end.bracket.curly.go']

it 'tokenizes punctuation delimiters', ->
delims =
Expand Down Expand Up @@ -428,7 +424,7 @@ describe 'Go grammar', ->

next = tokens[t.tokenPos + 1]
expect(next.value).toEqual '('
expect(next.scopes).toEqual ['source.go', 'punctuation.other.bracket.round.go']
expect(next.scopes).toEqual ['source.go', 'punctuation.definition.begin.bracket.round.go']
else
expect(relevantToken.scopes).not.toEqual want

Expand Down Expand Up @@ -493,9 +489,9 @@ describe 'Go grammar', ->
expect(token.value).toBe op
expect(token.scopes).toEqual ['source.go', 'keyword.operator.assignment.go']

testOpBracket = (token, op) ->
testOpBracket = (token, op, type) ->
expect(token.value).toBe op
expect(token.scopes).toEqual ['source.go', 'punctuation.other.bracket.round.go']
expect(token.scopes).toEqual ['source.go', "punctuation.definition.variables.#{type}.bracket.round.go"]

testOpPunctuation = (token, op) ->
expect(token.value).toBe op
Expand Down Expand Up @@ -582,28 +578,28 @@ describe 'Go grammar', ->
{tokens} = grammar.tokenizeLine 'var s [4]string'
testVar tokens[0]
testVarDeclaration tokens[2], 's'
expect(tokens[4]).toEqual value: '[', scopes: ['source.go', 'punctuation.other.bracket.square.go']
expect(tokens[4]).toEqual value: '[', scopes: ['source.go', 'punctuation.definition.bracket.square.go']
expect(tokens[5]).toEqual value: '4', scopes: ['source.go', 'constant.numeric.integer.go']
expect(tokens[6]).toEqual value: ']', scopes: ['source.go', 'punctuation.other.bracket.square.go']
expect(tokens[6]).toEqual value: ']', scopes: ['source.go', 'punctuation.definition.bracket.square.go']
testStringType tokens[7], 'string'

it 'tokenizes a single name and an array type with variadic length', ->
{tokens} = grammar.tokenizeLine 'var s [...]string'
testVar tokens[0]
testVarDeclaration tokens[2], 's'
expect(tokens[4]).toEqual value: '[', scopes: ['source.go', 'punctuation.other.bracket.square.go']
expect(tokens[4]).toEqual value: '[', scopes: ['source.go', 'punctuation.definition.bracket.square.go']
expect(tokens[5]).toEqual value: '...', scopes: ['source.go', 'keyword.operator.ellipsis.go']
expect(tokens[6]).toEqual value: ']', scopes: ['source.go', 'punctuation.other.bracket.square.go']
expect(tokens[6]).toEqual value: ']', scopes: ['source.go', 'punctuation.definition.bracket.square.go']
testStringType tokens[7], 'string'

it 'tokenizes a single name and multi-dimensional types with an address', ->
{tokens} = grammar.tokenizeLine 'var e [][]*string'
testVar tokens[0]
testVarDeclaration tokens[2], 'e'
expect(tokens[4]).toEqual value: '[', scopes: ['source.go', 'punctuation.other.bracket.square.go']
expect(tokens[5]).toEqual value: ']', scopes: ['source.go', 'punctuation.other.bracket.square.go']
expect(tokens[6]).toEqual value: '[', scopes: ['source.go', 'punctuation.other.bracket.square.go']
expect(tokens[7]).toEqual value: ']', scopes: ['source.go', 'punctuation.other.bracket.square.go']
expect(tokens[4]).toEqual value: '[', scopes: ['source.go', 'punctuation.definition.bracket.square.go']
expect(tokens[5]).toEqual value: ']', scopes: ['source.go', 'punctuation.definition.bracket.square.go']
expect(tokens[6]).toEqual value: '[', scopes: ['source.go', 'punctuation.definition.bracket.square.go']
expect(tokens[7]).toEqual value: ']', scopes: ['source.go', 'punctuation.definition.bracket.square.go']
testOpAddress tokens[8], '*'
testStringType tokens[9], 'string'

Expand Down Expand Up @@ -733,10 +729,10 @@ describe 'Go grammar', ->
)
'''
testVar lines[0][0]
testOpBracket lines[0][2], '('
testOpBracket lines[0][2], '(', 'begin'
testVarDeclaration lines[1][1], 'foo'
testOpAddress lines[1][3], '*'
testOpBracket lines[2][0], ')'
testOpBracket lines[2][0], ')', 'end'

it 'tokenizes single names with an initializer', ->
lines = grammar.tokenizeLines '''
Expand All @@ -745,11 +741,11 @@ describe 'Go grammar', ->
)
'''
testVar lines[0][0], 'var'
testOpBracket lines[0][2], '('
testOpBracket lines[0][2], '(', 'begin'
testVarAssignment lines[1][1], 'foo'
testOpAssignment lines[1][3], '='
testNum lines[1][5], '42'
testOpBracket lines[2][0], ')'
testOpBracket lines[2][0], ')', 'end'

it 'tokenizes multiple names', ->
lines = grammar.tokenizeLines '''
Expand All @@ -758,13 +754,13 @@ describe 'Go grammar', ->
)
'''
testVar lines[0][0]
testOpBracket lines[0][2], '('
testOpBracket lines[0][2], '(', 'begin'
testVarAssignment lines[1][1], 'foo'
testOpPunctuation lines[1][2], ','
testVarAssignment lines[1][4], 'bar'
testOpAssignment lines[1][6], '='
testOpPunctuation lines[1][8], ','
testOpBracket lines[2][0], ')'
testOpBracket lines[2][0], ')', 'end'

it 'tokenizes non variable declarations', ->
lines = grammar.tokenizeLines '''
Expand All @@ -782,7 +778,7 @@ describe 'Go grammar', ->
)
'''
testVar lines[0][0]
testOpBracket lines[0][2], '('
testOpBracket lines[0][2], '(', 'begin'
expect(lines[1][1]).toEqual value: '//', scopes: ['source.go', 'comment.line.double-slash.go', 'punctuation.definition.comment.go']
expect(lines[1][2]).toEqual value: ' I am a comment', scopes: ['source.go', 'comment.line.double-slash.go']
testVarDeclaration lines[2][1], 'foo'
Expand All @@ -791,7 +787,7 @@ describe 'Go grammar', ->
expect(lines[4][3]).toEqual value: 'func', scopes: ['source.go', 'keyword.function.go']
expect(lines[5][1]).toEqual value: 'if', scopes: ['source.go', 'keyword.control.go']
expect(lines[8][3]).toEqual value: 'nil', scopes: ['source.go', 'constant.language.go']
testOpBracket lines[11][0], ')'
testOpBracket lines[11][0], ')', 'end'

it 'tokenizes all parts of variable initializations correctly', ->
lines = grammar.tokenizeLines '''
Expand All @@ -802,12 +798,12 @@ describe 'Go grammar', ->
)
'''
testVar lines[0][0]
testOpBracket lines[0][2], '('
testOpBracket lines[0][2], '(', 'begin'
testVarAssignment lines[1][1], 'm'
testOpAssignment lines[1][3], '='
testString lines[2][2], 'key'
testNum lines[2][6], '10'
testOpBracket lines[4][0], ')'
testOpBracket lines[4][0], ')', 'end'

it 'tokenizes non-ASCII variable names', ->
{tokens} = grammar.tokenizeLine 'über = test'
Expand Down Expand Up @@ -855,9 +851,9 @@ describe 'Go grammar', ->
expect(token.value).toBe name
expect(token.scopes).toEqual ['source.go', 'string.quoted.double.go', 'entity.name.import.go']

testOpBracket = (token, op) ->
testOpBracket = (token, op, type) ->
expect(token.value).toBe op
expect(token.scopes).toEqual ['source.go', 'punctuation.other.bracket.round.go']
expect(token.scopes).toEqual ['source.go', "punctuation.definition.imports.#{type}.bracket.round.go"]

testBeginQuoted = (token) ->
expect(token.value).toBe '"'
Expand Down Expand Up @@ -896,45 +892,63 @@ describe 'Go grammar', ->

describe 'when it is a multi line declaration', ->
it 'tokenizes single declarations with a package name', ->
[kwd, decl, closing] = grammar.tokenizeLines 'import (\n\t"github.com/test/package"\n)'
[kwd, decl, closing] = grammar.tokenizeLines '''
import (
"github.com/test/package"
)
'''
testImport kwd[0]
testOpBracket kwd[2], '('
testOpBracket kwd[2], '(', 'begin'
testBeginQuoted decl[1]
testImportPackage decl[2], 'github.com/test/package'
testEndQuoted decl[3]
testOpBracket closing[0], ')'
testOpBracket closing[0], ')', 'end'

it 'tokenizes multiple declarations with a package name', ->
[kwd, decl, decl2, closing] = grammar.tokenizeLines 'import (\n\t"github.com/test/package"\n\t"fmt"\n)'
[kwd, decl, decl2, closing] = grammar.tokenizeLines '''
import (
"github.com/test/package"
"fmt"
)
'''
testImport kwd[0]
testOpBracket kwd[2], '('
testOpBracket kwd[2], '(', 'begin'
testBeginQuoted decl[1]
testImportPackage decl[2], 'github.com/test/package'
testEndQuoted decl[3]
testBeginQuoted decl2[1]
testImportPackage decl2[2], 'fmt'
testEndQuoted decl2[3]
testOpBracket closing[0], ')'
testOpBracket closing[0], ')', 'end'

it 'tokenizes single imports with an alias for a multi-line declaration', ->
[kwd, decl, closing] = grammar.tokenizeLines 'import (\n\t. "github.com/test/package"\n)'
[kwd, decl, closing] = grammar.tokenizeLines '''
import (
. "github.com/test/package"
)
'''
testImport kwd[0]
testOpBracket kwd[2], '('
testOpBracket kwd[2], '(', 'begin'
testImportAlias decl[1], '.'
testBeginQuoted decl[3]
testImportPackage decl[4], 'github.com/test/package'
testEndQuoted decl[5]
testOpBracket closing[0], ')'
testOpBracket closing[0], ')', 'end'

it 'tokenizes multiple imports with an alias for a multi-line declaration', ->
[kwd, decl, decl2, closing] = grammar.tokenizeLines 'import (\n\t. "github.com/test/package"\n\t"fmt"\n)'
[kwd, decl, decl2, closing] = grammar.tokenizeLines '''
import (
. "github.com/test/package"
"fmt"
)
'''
testImport kwd[0]
testOpBracket kwd[2], '('
testOpBracket kwd[2], '(', 'begin'
testImportAlias decl[1], '.'
testBeginQuoted decl[3]
testImportPackage decl[4], 'github.com/test/package'
testEndQuoted decl[5]
testBeginQuoted decl2[1]
testImportPackage decl2[2], 'fmt'
testEndQuoted decl2[3]
testOpBracket closing[0], ')'
testOpBracket closing[0], ')', 'end'

0 comments on commit c413bc9

Please sign in to comment.