Skip to content

Commit

Permalink
fix linter warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
xiam committed Feb 8, 2020
1 parent d2bef11 commit f2ec568
Show file tree
Hide file tree
Showing 6 changed files with 183 additions and 51 deletions.
10 changes: 10 additions & 0 deletions lexer/errors.go
@@ -0,0 +1,10 @@
package lexer

import (
"errors"
)

// error messages
var (
ErrForceStopped = errors.New("force stopped")
)
91 changes: 60 additions & 31 deletions lexer/lexer.go
Expand Up @@ -2,11 +2,15 @@ package lexer

import (
"bytes"
"fmt"
"io"
"log"
"text/scanner"
)

var (
ticket = struct{}{}
)

type lexState func(*Lexer) lexState

var (
Expand Down Expand Up @@ -39,21 +43,27 @@ func New(r io.Reader) *Lexer {
}

return &Lexer{
in: s.Init(r),
tokens: make(chan Token),
done: make(chan struct{}),
buf: []rune{},
in: s.Init(r),
tickets: make(chan struct{}),
scanning: make(chan struct{}),
tokens: make(chan *Token),
buf: []rune{},
}
}

// Lexer represents a lexical analyzer
type Lexer struct {
in *scanner.Scanner

tokens chan Token
lastTok *Token
tokens chan *Token

tickets chan struct{}

scanning chan struct{}

done chan struct{}
lastErr error
closed bool

buf []rune

Expand All @@ -62,31 +72,42 @@ type Lexer struct {
lines int
}

// Tokens returns a channel that is going to receive tokens as soon as they are
// detected.
func (lx *Lexer) Tokens() chan Token {
return lx.tokens
}
// Next sends a signal to the Scan method for it to continue scanning
func (lx *Lexer) Next() bool {
if lx.closed {
return false
}

func (lx *Lexer) stop() {
for {
select {
case <-lx.tokens:
// drain channel
default:
lx.done <- struct{}{}
close(lx.tokens)
return
}
lx.tickets <- struct{}{}

tok, _ := <-lx.tokens
lx.lastTok = tok

if tok.tt == TokenEOF {
lx.closed = true
}

return true
}

// Token returns the most recent scanned token
func (lx *Lexer) Token() *Token {
return lx.lastTok
}

// Stop requests the Scan method to stop scanning
func (lx *Lexer) Stop() {
lx.closed = true
close(lx.tickets)
close(lx.scanning)
}

// Scan starts scanning the reader for tokens.
func (lx *Lexer) Scan() error {
for state := lexDefaultState; state != nil; {
select {
case <-lx.done:
return nil
case <-lx.scanning:
return ErrForceStopped
default:
state = state(lx)
}
Expand All @@ -96,13 +117,16 @@ func (lx *Lexer) Scan() error {
lx.emit(TokenEOF)
}

close(lx.tokens)

return lx.lastErr
}

func (lx *Lexer) emit(tt TokenType) {
lx.tokens <- Token{
_, ok := <-lx.tickets
if !ok {
return
}

tok := Token{
tt: tt,
lexeme: string(lx.buf),

Expand All @@ -118,6 +142,8 @@ func (lx *Lexer) emit(tt TokenType) {
lx.start = 0
lx.offset = 0
}

lx.tokens <- &tok
}

func (lx *Lexer) peek() rune {
Expand Down Expand Up @@ -231,8 +257,7 @@ func lexCollectStream(tt TokenType) lexState {

func lexStateError(err error) lexState {
return func(lx *Lexer) lexState {
log.Printf("lexer error: %v", err)
lx.lastErr = err
lx.lastErr = fmt.Errorf("read error: %v", err)
return nil
}
}
Expand All @@ -246,8 +271,12 @@ func Tokenize(in []byte) ([]Token, error) {
lx := New(bytes.NewReader(in))

go func() {
for tok := range lx.tokens {
tokens = append(tokens, tok)
for lx.Next() {
tok := lx.Token()
if tok == nil {
break
}
tokens = append(tokens, *tok)
}
done <- struct{}{}
}()
Expand Down
28 changes: 28 additions & 0 deletions lexer/scanner_test.go
@@ -0,0 +1,28 @@
package lexer

import (
"bytes"
"testing"

"github.com/stretchr/testify/assert"
)

func TestScannerStop(t *testing.T) {
lx := New(bytes.NewReader([]byte(`1 2 3 4 5`)))

errCh := make(chan error)
go func() {
errCh <- lx.Scan()
}()

go func() {
for lx.Next() {
_ = lx.Token()

lx.Stop()
}
}()

err := <-errCh
assert.Equal(t, ErrForceStopped, err)
}
10 changes: 10 additions & 0 deletions parser/errors.go
@@ -0,0 +1,10 @@
package parser

import (
"errors"
)

var (
errUnexpectedEOF = errors.New("unexpected EOF")
errUnexpectedToken = errors.New("unexpected token")
)
44 changes: 24 additions & 20 deletions parser/parser.go
Expand Up @@ -2,22 +2,17 @@ package parser

import (
"bytes"
"errors"
"fmt"
"io"
"log"
"strconv"

"github.com/xiam/sexpr/ast"
"github.com/xiam/sexpr/lexer"
)

// EOF represents the end of the file the parser is reading
var EOF = lexer.NewToken(lexer.TokenEOF, "", -1, -1)

var (
errUnexpectedEOF = errors.New("unexpected EOF")
errUnexpectedToken = errors.New("unexpected token")
)

type parserState func(p *Parser) parserState

// Parser represents a parser
Expand Down Expand Up @@ -52,8 +47,8 @@ func (p *Parser) Parse() error {
}

err := <-errCh
if err != nil {
return err
if err != nil && err != lexer.ErrForceStopped {
return fmt.Errorf("lexer error: %v", err)
}

return p.lastErr
Expand All @@ -64,11 +59,11 @@ func (p *Parser) curr() *lexer.Token {
}

func (p *Parser) read() *lexer.Token {
tok, ok := <-p.lx.Tokens()
if ok {
return &tok
if ok := p.lx.Next(); !ok {
return EOF
}
return EOF

return p.lx.Token()
}

func (p *Parser) peek() *lexer.Token {
Expand Down Expand Up @@ -111,8 +106,22 @@ func parserDefaultState(p *Parser) parserState {

func parserErrorState(err error) parserState {
return func(p *Parser) parserState {
//p.lx.stop()
p.lastErr = err
p.lx.Stop()

tok := p.curr()
if tok == nil {
p.lastErr = fmt.Errorf("syntax error: %w", err)
return nil
}

line, col := tok.Pos()
switch err {
case errUnexpectedToken:
p.lastErr = fmt.Errorf("syntax error: %w %q (around (line %v) (column %v))", err, tok.Text(), line, col)
return nil
}
// TODO: extract a code snippet around line and col
p.lastErr = fmt.Errorf("syntax error: %w (around (line: %v) (column %v))", err, line, col)
return nil
}
}
Expand Down Expand Up @@ -460,8 +469,3 @@ func Parse(in []byte) (*ast.Node, error) {

return p.root, nil
}

func parserError(err error, tok *lexer.Token) error {
log.Fatalf("%v: %v", err.Error(), tok)
return err
}
51 changes: 51 additions & 0 deletions parser/parser_test.go
Expand Up @@ -153,3 +153,54 @@ func TestParserBuildTree(t *testing.T) {
assert.Equal(t, testCases[i].Out, string(s))
}
}

func TestParserErrors(t *testing.T) {
testCases := []struct {
In string
Err string
}{
{
In: `(1`,
},
{
In: `(}`,
},
{
In: `[}`,
},
{
In: `[)`,
},
{
In: `1 )}`,
},
{
In: `1 ](}`,
},
{
In: `({}{`,
},
{
In: `({/{`,
},
{
In: `+}`,
},
{
In: `{)}`,
},
{
In: `(1 2 3 4
(5 6 7 8
(4 6})
)`,
},
}

for i := range testCases {
root, err := Parse([]byte(testCases[i].In))
assert.Nil(t, root)
assert.Error(t, err)
t.Log(err)
}
}

0 comments on commit f2ec568

Please sign in to comment.