Skip to content
This repository has been archived by the owner on Apr 30, 2023. It is now read-only.

Commit

Permalink
add parser tool
Browse files Browse the repository at this point in the history
  • Loading branch information
lysu committed Nov 7, 2015
1 parent e7bda07 commit dd24697
Show file tree
Hide file tree
Showing 4 changed files with 194 additions and 7 deletions.
27 changes: 27 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Compiled Object files, Static and Dynamic libs (Shared Objects)

.idea/

*.o
*.a
*.so

# Folders
_obj
_test

# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out

*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*

_testmain.go

*.exe
*.test
*.prof
24 changes: 24 additions & 0 deletions all_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
package patcher_test

import (
"testing"

"github.com/lysu/go-struct-patcher"
"github.com/stretchr/testify/assert"
)

func TestAll(t *testing.T) {

assert := assert.New(t)

testPath := "abc.r1.d"

tok, err := patcher.Lex(testPath)
assert.Nil(err)

assert.Len(tok, 5)

patcher := patcher.NewParser(tok)
assert.NotNil(patcher)

}
15 changes: 8 additions & 7 deletions lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@ const (
)

var (
tokenSpaceChars = " \n\r\t"
tokenIdentifierChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"
tokenSpaceChars = " \n\r\t"
tokenIdentifierChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"
tokenIdentifierCharsWithDigits = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789"
tokenDigits = "0123456789"
tokenDigits = "0123456789"

TokenSymbols = []string{
".", ";",
Expand Down Expand Up @@ -57,7 +57,8 @@ type lexer struct {
col int
}

func lex(input string) ([]*Token, *Error) {
// Lex do lexical analysis
func Lex(input string) ([]*Token, *Error) {
l := &lexer{
input: input,
tokens: make([]*Token, 0, 100),
Expand All @@ -68,7 +69,7 @@ func lex(input string) ([]*Token, *Error) {
}
l.run()
if l.errored {
errtoken := l.tokens[len(l.tokens)-1]
errtoken := l.tokens[len(l.tokens) - 1]
return nil, &Error{
Line: errtoken.Line,
Column: errtoken.Col,
Expand Down Expand Up @@ -163,7 +164,7 @@ func (l *lexer) errorf(format string, args ...interface{}) lexerStateFn {
}

func (l *lexer) eof() bool {
return l.start >= len(l.input)-1
return l.start >= len(l.input) - 1
}

func (l *lexer) run() {
Expand All @@ -181,7 +182,7 @@ func (l *lexer) run() {
}

func (l *lexer) stateCode() lexerStateFn {
outer_loop:
outer_loop:
for {
switch {
case l.accept(tokenSpaceChars):
Expand Down
135 changes: 135 additions & 0 deletions parser.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
package patcher

type Parser struct {
idx int
tokens []*Token
lastToken *Token
}

func NewParser(tokens []*Token) *Parser {
p := &Parser{tokens: tokens}
if len(tokens) > 0 {
p.lastToken = tokens[len(tokens)-1]
}
return p
}

func (p *Parser) Consume() {
p.ConsumeN(1)
}

func (p *Parser) ConsumeN(count int) {
p.idx += count
}

func (p *Parser) Current() *Token {
return p.Get(p.idx)
}

func (p *Parser) MatchType(typ TokenType) *Token {
if t := p.PeekType(typ); t != nil {
p.Consume()
return t
}
return nil
}

func (p *Parser) Match(typ TokenType, val string) *Token {
if t := p.Peek(typ, val); t != nil {
p.Consume()
return t
}
return nil
}

func (p *Parser) MatchOne(typ TokenType, vals ...string) *Token {
for _, val := range vals {
if t := p.Peek(typ, val); t != nil {
p.Consume()
return t
}
}
return nil
}

func (p *Parser) PeekType(typ TokenType) *Token {
return p.PeekTypeN(0, typ)
}

func (p *Parser) Peek(typ TokenType, val string) *Token {
return p.PeekN(0, typ, val)
}

func (p *Parser) PeekOne(typ TokenType, vals ...string) *Token {
for _, v := range vals {
t := p.PeekN(0, typ, v)
if t != nil {
return t
}
}
return nil
}

func (p *Parser) PeekN(shift int, typ TokenType, val string) *Token {
t := p.Get(p.idx + shift)
if t != nil {
if t.Typ == typ && t.Val == val {
return t
}
}
return nil
}

func (p *Parser) PeekTypeN(shift int, typ TokenType) *Token {
t := p.Get(p.idx + shift)
if t != nil {
if t.Typ == typ {
return t
}
}
return nil
}

func (p *Parser) Remaining() int {
return len(p.tokens) - p.idx
}

func (p *Parser) Count() int {
return len(p.tokens)
}

func (p *Parser) Get(i int) *Token {
if i < len(p.tokens) {
return p.tokens[i]
}
return nil
}

func (p *Parser) GetR(shift int) *Token {
i := p.idx + shift
return p.Get(i)
}

func (p *Parser) Error(msg string, token *Token) *Error {
if token == nil {
// Set current token
token = p.Current()
if token == nil {
// Set to last token
if len(p.tokens) > 0 {
token = p.tokens[len(p.tokens)-1]
}
}
}
var line, col int
if token != nil {
line = token.Line
col = token.Col
}
return &Error{
Line: line,
Column: col,
Token: token,
ErrorMsg: msg,
}
}

0 comments on commit dd24697

Please sign in to comment.