Skip to content

Commit

Permalink
io.Reader to []byte
Browse files Browse the repository at this point in the history
  • Loading branch information
tdewolff committed Nov 6, 2017
1 parent f49ea8b commit 4a5fd61
Show file tree
Hide file tree
Showing 14 changed files with 74 additions and 66 deletions.
4 changes: 2 additions & 2 deletions css/lex.go
Original file line number Diff line number Diff line change
Expand Up @@ -144,9 +144,9 @@ type Lexer struct {
}

// NewLexer returns a new Lexer for a given io.Reader.
func NewLexer(r io.Reader) *Lexer {
func NewLexer(b []byte) *Lexer {
return &Lexer{
buffer.NewMemLexer(r),
buffer.NewMemLexer(b),
}
}

Expand Down
9 changes: 4 additions & 5 deletions css/lex_test.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package css // import "github.com/tdewolff/parse/css"

import (
"bytes"
"fmt"
"io"
"strconv"
Expand All @@ -12,7 +11,7 @@ import (

func helperStringify(t *testing.T, input string) string {
s := ""
l := NewLexer(bytes.NewBufferString(input))
l := NewLexer([]byte(input))
for i := 0; i < 10; i++ {
tt, text := l.Next()
if tt == ErrorToken {
Expand Down Expand Up @@ -121,7 +120,7 @@ func TestTokens(t *testing.T) {
}
for _, tt := range tokenTests {
stringify := helperStringify(t, tt.css)
l := NewLexer(bytes.NewBufferString(tt.css))
l := NewLexer([]byte(tt.css))
i := 0
for {
token, _ := l.Next()
Expand All @@ -144,13 +143,13 @@ func TestTokens(t *testing.T) {
test.String(t, EmptyToken.String(), "Empty")
test.String(t, CustomPropertyValueToken.String(), "CustomPropertyValue")
test.String(t, TokenType(100).String(), "Invalid(100)")
test.That(t, NewLexer(bytes.NewBufferString("x")).consumeBracket() == ErrorToken, "consumeBracket on 'x' must return error")
test.That(t, NewLexer([]byte("x")).consumeBracket() == ErrorToken, "consumeBracket on 'x' must return error")
}

////////////////////////////////////////////////////////////////

func ExampleNewLexer() {
l := NewLexer(bytes.NewBufferString("color: red;"))
l := NewLexer([]byte("color: red;"))
out := ""
for {
tt, data := l.Next()
Expand Down
5 changes: 2 additions & 3 deletions css/parse.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ package css // import "github.com/tdewolff/parse/css"
import (
"bytes"
"errors"
"io"
"strconv"

"github.com/tdewolff/parse"
Expand Down Expand Up @@ -92,8 +91,8 @@ type Parser struct {
}

// NewParser returns a new CSS parser from an io.Reader. isInline specifies whether this is an inline style attribute.
func NewParser(r io.Reader, isInline bool) *Parser {
l := NewLexer(r)
func NewParser(b []byte, isInline bool) *Parser {
l := NewLexer(b)
p := &Parser{
l: l,
}
Expand Down
27 changes: 13 additions & 14 deletions css/parse_test.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package css // import "github.com/tdewolff/parse/css"

import (
"bytes"
"fmt"
"io"
"testing"
Expand Down Expand Up @@ -103,7 +102,7 @@ func TestParse(t *testing.T) {
}
for _, tt := range parseTests {
output := ""
p := NewParser(bytes.NewBufferString(tt.css), tt.inline)
p := NewParser([]byte(tt.css), tt.inline)
for {
grammar, _, data := p.Next()
data = parse.Copy(data)
Expand Down Expand Up @@ -162,7 +161,7 @@ func TestParseError(t *testing.T) {
{true, "--custom-variable:0", io.EOF},
}
for _, tt := range parseErrorTests {
p := NewParser(bytes.NewBufferString(tt.css), tt.inline)
p := NewParser([]byte(tt.css), tt.inline)
for {
grammar, _, _ := p.Next()
if grammar == ErrorGrammar {
Expand All @@ -173,21 +172,21 @@ func TestParseError(t *testing.T) {
}
}

func TestReader(t *testing.T) {
input := "x:a;"
p := NewParser(test.NewPlainReader(bytes.NewBufferString(input)), true)
for {
grammar, _, _ := p.Next()
if grammar == ErrorGrammar {
break
}
}
}
// func TestReader(t *testing.T) {
// input := "x:a;"
// p := NewParser(test.NewPlainReader(bytes.NewBufferString(input)), true)
// for {
// grammar, _, _ := p.Next()
// if grammar == ErrorGrammar {
// break
// }
// }
// }

////////////////////////////////////////////////////////////////

func ExampleNewParser() {
p := NewParser(bytes.NewBufferString("color: red;"), true) // false because this is the content of an inline style attribute
p := NewParser([]byte("color: red;"), true) // false because this is the content of an inline style attribute
out := ""
for {
gt, _, data := p.Next()
Expand Down
8 changes: 2 additions & 6 deletions css/util.go
Original file line number Diff line number Diff line change
@@ -1,20 +1,16 @@
package css // import "github.com/tdewolff/parse/css"

import (
"bytes"
)

// IsIdent returns true if the bytes are a valid identifier.
func IsIdent(b []byte) bool {
l := NewLexer(bytes.NewBuffer(b))
l := NewLexer(b)
l.consumeIdentToken()
l.r.Restore()
return l.r.Pos() == len(b)
}

// IsURLUnquoted returns true if the bytes are a valid unquoted URL.
func IsURLUnquoted(b []byte) bool {
l := NewLexer(bytes.NewBuffer(b))
l := NewLexer(b)
l.consumeUnquotedURL()
l.r.Restore()
return l.r.Pos() == len(b)
Expand Down
5 changes: 2 additions & 3 deletions html/lex.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ package html // import "github.com/tdewolff/parse/html"

import (
"errors"
"io"
"strconv"

"github.com/tdewolff/buffer"
Expand Down Expand Up @@ -77,9 +76,9 @@ type Lexer struct {
}

// NewLexer returns a new Lexer for a given io.Reader.
func NewLexer(r io.Reader) *Lexer {
func NewLexer(b []byte) *Lexer {
return &Lexer{
r: buffer.NewMemLexer(r),
r: buffer.NewMemLexer(b),
}
}

Expand Down
13 changes: 6 additions & 7 deletions html/lex_test.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package html // import "github.com/tdewolff/parse/html"

import (
"bytes"
"fmt"
"io"
"strconv"
Expand All @@ -12,7 +11,7 @@ import (

func helperStringify(t *testing.T, input string) string {
s := ""
l := NewLexer(bytes.NewBufferString(input))
l := NewLexer([]byte(input))
for i := 0; i < 10; i++ {
tt, data := l.Next()
if tt == ErrorToken {
Expand Down Expand Up @@ -91,7 +90,7 @@ func TestTokens(t *testing.T) {
}
for _, tt := range tokenTests {
stringify := helperStringify(t, tt.html)
l := NewLexer(bytes.NewBufferString(tt.html))
l := NewLexer([]byte(tt.html))
i := 0
for {
token, _ := l.Next()
Expand Down Expand Up @@ -126,7 +125,7 @@ func TestTags(t *testing.T) {
}
for _, tt := range tagTests {
stringify := helperStringify(t, tt.html)
l := NewLexer(bytes.NewBufferString(tt.html))
l := NewLexer([]byte(tt.html))
for {
token, _ := l.Next()
if token == ErrorToken {
Expand Down Expand Up @@ -159,7 +158,7 @@ func TestAttributes(t *testing.T) {
}
for _, tt := range attributeTests {
stringify := helperStringify(t, tt.attr)
l := NewLexer(bytes.NewBufferString(tt.attr))
l := NewLexer([]byte(tt.attr))
i := 0
for {
token, _ := l.Next()
Expand Down Expand Up @@ -188,7 +187,7 @@ func TestErrors(t *testing.T) {
}
for _, tt := range errorTests {
stringify := helperStringify(t, tt.html)
l := NewLexer(bytes.NewBufferString(tt.html))
l := NewLexer([]byte(tt.html))
for {
token, _ := l.Next()
if token == ErrorToken {
Expand Down Expand Up @@ -261,7 +260,7 @@ func BenchmarkWhitespace3(b *testing.B) {
////////////////////////////////////////////////////////////////

func ExampleNewLexer() {
l := NewLexer(bytes.NewBufferString("<span class='user'>John Doe</span>"))
l := NewLexer([]byte("<span class='user'>John Doe</span>"))
out := ""
for {
tt, data := l.Next()
Expand Down
5 changes: 2 additions & 3 deletions js/lex.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
package js // import "github.com/tdewolff/parse/js"

import (
"io"
"strconv"
"unicode"

Expand Down Expand Up @@ -97,9 +96,9 @@ type Lexer struct {
}

// NewLexer returns a new Lexer for a given io.Reader.
func NewLexer(r io.Reader) *Lexer {
func NewLexer(b []byte) *Lexer {
return &Lexer{
r: buffer.NewMemLexer(r),
r: buffer.NewMemLexer(b),
stack: make([]ParsingContext, 0),
state: ExprState,
emptyLine: true,
Expand Down
7 changes: 3 additions & 4 deletions js/lex_test.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package js // import "github.com/tdewolff/parse/js"

import (
"bytes"
"fmt"
"io"
"strconv"
Expand All @@ -12,7 +11,7 @@ import (

func helperStringify(t *testing.T, input string, index int) string {
s := ""
l := NewLexer(bytes.NewBufferString(input))
l := NewLexer([]byte(input))
for i := 0; i <= index; i++ {
tt, data := l.Next()
if tt == ErrorToken {
Expand Down Expand Up @@ -129,7 +128,7 @@ func TestTokens(t *testing.T) {
passed := 0

for _, tt := range tokenTests {
l := NewLexer(bytes.NewBufferString(tt.js))
l := NewLexer([]byte(tt.js))
i := 0
j := 0
for {
Expand Down Expand Up @@ -171,7 +170,7 @@ func TestTokens(t *testing.T) {
////////////////////////////////////////////////////////////////

func ExampleNewLexer() {
l := NewLexer(bytes.NewBufferString("var x = 'lorem ipsum';"))
l := NewLexer([]byte("var x = 'lorem ipsum';"))
out := ""
for {
tt, data := l.Next()
Expand Down
5 changes: 2 additions & 3 deletions json/parse.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ package json // import "github.com/tdewolff/parse/json"

import (
"errors"
"io"
"strconv"

"github.com/tdewolff/buffer"
Expand Down Expand Up @@ -110,9 +109,9 @@ type Parser struct {
}

// NewParser returns a new Parser for a given io.Reader.
func NewParser(r io.Reader) *Parser {
func NewParser(b []byte) *Parser {
return &Parser{
r: buffer.NewMemLexer(r),
r: buffer.NewMemLexer(b),
state: []State{ValueState},
}
}
Expand Down
11 changes: 5 additions & 6 deletions json/parse_test.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package json // import "github.com/tdewolff/parse/json"

import (
"bytes"
"fmt"
"io"
"strconv"
Expand All @@ -12,7 +11,7 @@ import (

func helperStringify(t *testing.T, input string) string {
s := ""
p := NewParser(bytes.NewBufferString(input))
p := NewParser([]byte(input))
for i := 0; i < 10; i++ {
gt, text := p.Next()
if gt == ErrorGrammar {
Expand Down Expand Up @@ -54,7 +53,7 @@ func TestGrammars(t *testing.T) {
}
for _, tt := range grammarTests {
stringify := helperStringify(t, tt.json)
p := NewParser(bytes.NewBufferString(tt.json))
p := NewParser([]byte(tt.json))
i := 0
for {
grammar, _ := p.Next()
Expand Down Expand Up @@ -100,7 +99,7 @@ func TestGrammarsError(t *testing.T) {
}
for _, tt := range grammarErrorTests {
stringify := helperStringify(t, tt.json)
p := NewParser(bytes.NewBufferString(tt.json))
p := NewParser([]byte(tt.json))
for {
grammar, _ := p.Next()
if grammar == ErrorGrammar {
Expand All @@ -122,7 +121,7 @@ func TestStates(t *testing.T) {
}
for _, tt := range stateTests {
stringify := helperStringify(t, tt.json)
p := NewParser(bytes.NewBufferString(tt.json))
p := NewParser([]byte(tt.json))
i := 0
for {
grammar, _ := p.Next()
Expand All @@ -146,7 +145,7 @@ func TestStates(t *testing.T) {
////////////////////////////////////////////////////////////////

func ExampleNewParser() {
p := NewParser(bytes.NewBufferString(`{"key": 5}`))
p := NewParser([]byte(`{"key": 5}`))
out := ""
for {
state := p.State()
Expand Down
23 changes: 23 additions & 0 deletions util_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -174,3 +174,26 @@ func BenchmarkWhitespaceIf5(b *testing.B) {
}
}
}

var ByteSliceTest = []byte("test")

func BenchmarkByteSliceConst(b *testing.B) {
x := []byte("test")
for i := 0; i < b.N; i++ {
bytes.Compare(x, ByteSliceTest)
}
}

func BenchmarkByteSliceAlloc(b *testing.B) {
x := []byte("test")
for i := 0; i < b.N; i++ {
bytes.Compare(x, []byte{'t', 'e', 's', 't'})
}
}

func BenchmarkByteSliceAllocString(b *testing.B) {
x := []byte("test")
for i := 0; i < b.N; i++ {
bytes.Compare(x, []byte("test"))
}
}

0 comments on commit 4a5fd61

Please sign in to comment.