Skip to content
This repository has been archived by the owner on Nov 22, 2019. It is now read-only.

Commit

Permalink
Merge pull request #7 from aquilax/channels
Browse files Browse the repository at this point in the history
Channels instead of injection
  • Loading branch information
aquilax committed Nov 16, 2014
2 parents 977d93f + 9e771fb commit f7e2052
Show file tree
Hide file tree
Showing 3 changed files with 115 additions and 34 deletions.
43 changes: 35 additions & 8 deletions hranoprovod.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,16 +41,43 @@ func (hr *Hranoprovod) Run(version string) error {

parserOptions := NewDefaultParserOptions()

db, errp1 := NewParser(parserOptions, nil).parseFile(options.databaseFileName)
if errp1 != nil {
return errp1
nodeList := NewNodeList()
parser := NewParser(parserOptions)
go parser.parseFile(options.databaseFileName)
err := func() error {
for {
select {
case node := <-parser.nodes:
nodeList.push(node)
case breakingError := <-parser.errors:
return breakingError
case <-parser.done:
return nil
}
}
}()

if err != nil {
return err
}
NewResolver(db, resolverMaxDepth).resolve()
NewResolver(nodeList, resolverMaxDepth).resolve()

_, errp2 := NewParser(parserOptions, NewProcessor(
processor := NewProcessor(
options,
db,
nodeList,
NewReporter(options, os.Stdout),
)).parseFile(options.logFileName)
return errp2
)

go parser.parseFile(options.logFileName)
for {
select {
case node := <-parser.nodes:
processor.process(node)
case breakingError := <-parser.errors:
return breakingError
case <-parser.done:
return nil
}
}
return nil
}
51 changes: 25 additions & 26 deletions parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,39 +22,48 @@ func NewDefaultParserOptions() *ParserOptions {
// Parser is the parser data structure
type Parser struct {
parserOptions *ParserOptions
processor *Processor
nodes chan *Node
errors chan *BreakingError
done chan bool
}

// NewParser returns new parser
func NewParser(parserOptions *ParserOptions, processor *Processor) *Parser {
func NewParser(parserOptions *ParserOptions) *Parser {
return &Parser{
parserOptions,
processor,
make(chan *Node),
make(chan *BreakingError),
make(chan bool),
}
}

func (p *Parser) parseFile(fileName string) (*NodeList, error) {
func (p *Parser) parseFile(fileName string) {
f, err := os.Open(fileName)
if err != nil {
return nil, NewBreakingError(err.Error(), exitErrorOpeningFile)
p.errors <- NewBreakingError(err.Error(), exitErrorOpeningFile)
return
}
defer f.Close()
return p.parseStream(bufio.NewReader(f))
p.parseStream(f)
}

func (p *Parser) parseStream(input *bufio.Reader) (*NodeList, error) {
func (p *Parser) parseStream(reader io.Reader) {
var node *Node
db := NewNodeList()
lineNumber := 0

input := bufio.NewReader(reader)
for {
bytes, _, err := input.ReadLine()
// handle errors
if err == io.EOF {
// push last node
if node != nil {
p.nodes <- node
}
break
}
if err != nil {
return nil, NewBreakingError(err.Error(), exitErrorIO)
p.errors <- NewBreakingError(err.Error(), exitErrorIO)
return
}

line := mytrim(string(bytes))
Expand All @@ -69,11 +78,7 @@ func (p *Parser) parseStream(input *bufio.Reader) (*NodeList, error) {
//new nodes start at the beginning of the line
if bytes[0] != 32 && bytes[0] != 8 {
if node != nil {
if p.processor != nil {
p.processor.process(node)
} else {
db.push(node)
}
p.nodes <- node
}
node = NewNode(line)
continue
Expand All @@ -84,21 +89,23 @@ func (p *Parser) parseStream(input *bufio.Reader) (*NodeList, error) {
separator := strings.LastIndexAny(line, "\t ")

if separator == -1 {
return nil, NewBreakingError(
p.errors <- NewBreakingError(
fmt.Sprintf("Bad syntax on line %d, \"%s\".", lineNumber, line),
exitErrorBadSyntax,
)
return
}

ename := mytrim(line[0:separator])
snum := mytrim(line[separator:])
enum, err := strconv.ParseFloat(snum, 32)

if err != nil {
return nil, NewBreakingError(
p.errors <- NewBreakingError(
fmt.Sprintf("Error converting \"%s\" to float on line %d \"%s\".", snum, lineNumber, line),
exitErrorConversion,
)
return
}
if ndx, exists := node.elements.index(ename); exists {
(*node.elements)[ndx].val += float32(enum)
Expand All @@ -107,13 +114,5 @@ func (p *Parser) parseStream(input *bufio.Reader) (*NodeList, error) {
}
}
}

if node != nil {
if p.processor != nil {
p.processor.process(node)
} else {
db.push(node)
}
}
return db, nil
p.done <- true
}
55 changes: 55 additions & 0 deletions parser_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
package main

import (
. "github.com/smartystreets/goconvey/convey"
"strings"
"testing"
)

func readChannels(parser *Parser) (*NodeList, error) {
nodeList := NewNodeList()
for {
select {
case node := <-parser.nodes:
nodeList.push(node)
case breakingError := <-parser.errors:
return nil, breakingError
case <-parser.done:
return nodeList, nil
}
}
}

func TestParser(t *testing.T) {
Convey("Given new parser", t, func() {
parser := NewParser(NewDefaultParserOptions())
Convey("It completes successfully on empty string", func() {
go parser.parseStream(strings.NewReader(""))
nodeList, error := readChannels(parser)
So(len(*nodeList), ShouldEqual, 0)
So(error, ShouldBeNil)
})

Convey("It processes valid node", func() {
file := `2011/07/17:
el1: 1.22
ел 2: 4
el/3: 3`
go parser.parseStream(strings.NewReader(file))
nodeList, err := readChannels(parser)
So(len(*nodeList), ShouldEqual, 1)
So(err, ShouldBeNil)
node := (*nodeList)["2011/07/17"]
So(node.header, ShouldEqual, "2011/07/17")
elements := node.elements
So(elements, ShouldNotBeNil)
So(len(*elements), ShouldEqual, 3)
So((*elements)[0].name, ShouldEqual, "el1")
So((*elements)[0].val, ShouldEqual, 1.22)
So((*elements)[1].name, ShouldEqual, "ел 2")
So((*elements)[1].val, ShouldEqual, 4.0)
So((*elements)[2].name, ShouldEqual, "el/3")
So((*elements)[2].val, ShouldEqual, 3.0)
})
})
}

0 comments on commit f7e2052

Please sign in to comment.