Skip to content

Commit

Permalink
Fix tests and naming.
Browse files Browse the repository at this point in the history
  • Loading branch information
notbdu committed Mar 8, 2019
1 parent 98ee8fa commit 4f44154
Show file tree
Hide file tree
Showing 18 changed files with 120 additions and 86 deletions.
8 changes: 4 additions & 4 deletions calculation/calculation_op.go
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ func (f Op) MustNewResult(t field.ValueType) Result {

// String returns the string representation of the calculation operator.
func (f Op) String() string {
if s, exists := opBytess[f]; exists {
if s, exists := opBytes[f]; exists {
return s
}
// nolint: goconst
Expand Down Expand Up @@ -155,7 +155,7 @@ var (
Min: struct{}{},
Max: struct{}{},
}
opBytess = map[Op]string{
opBytes = map[Op]string{
Count: "COUNT",
Sum: "SUM",
Avg: "AVG",
Expand Down Expand Up @@ -230,8 +230,8 @@ var (
)

func init() {
stringToOps = make(map[string]Op, len(opBytess))
for k, v := range opBytess {
stringToOps = make(map[string]Op, len(opBytes))
for k, v := range opBytes {
stringToOps[v] = k
}
}
22 changes: 11 additions & 11 deletions document/field/value_hash_bench_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ const (
testBenchMapSize = 100000
)

func BenchmarkBuiltinBytesMap(b *testing.B) {
prefix := genBenchBytesKeyPrefix(testBenchKeyPrefixSize)
m := genBenchBuiltinBytesMap(testBenchKeyPrefixSize, testBenchMapSize)
func BenchmarkBuiltinStringMap(b *testing.B) {
prefix := genBenchStringKeyPrefix(testBenchKeyPrefixSize)
m := genBenchBuiltinStringMap(testBenchKeyPrefixSize, testBenchMapSize)
testBenchKey := prefix + "0"
b.ResetTimer()

Expand All @@ -30,9 +30,9 @@ func BenchmarkBuiltinBytesMap(b *testing.B) {
}
}

func BenchmarkCustomBytesMap(b *testing.B) {
prefix := genBenchBytesKeyPrefix(testBenchKeyPrefixSize)
m := genBenchCustomBytesMap(testBenchKeyPrefixSize, testBenchMapSize)
func BenchmarkCustomStringMap(b *testing.B) {
prefix := genBenchStringKeyPrefix(testBenchKeyPrefixSize)
m := genBenchCustomStringMap(testBenchKeyPrefixSize, testBenchMapSize)
testBenchKey := prefix + "0"
b.ResetTimer()

Expand All @@ -46,7 +46,7 @@ func BenchmarkCustomBytesMap(b *testing.B) {
}
}

func genBenchBytesKeyPrefix(
func genBenchStringKeyPrefix(
keyPrefixSize int,
) string {
var b bytes.Buffer
Expand All @@ -57,11 +57,11 @@ func genBenchBytesKeyPrefix(
}

// nolint: unparam
func genBenchBuiltinBytesMap(
func genBenchBuiltinStringMap(
keyPrefixSize int,
mapSize int,
) map[string]struct{} {
prefix := genBenchBytesKeyPrefix(keyPrefixSize)
prefix := genBenchStringKeyPrefix(keyPrefixSize)
m := make(map[string]struct{}, mapSize)
for i := 0; i < mapSize; i++ {
key := fmt.Sprintf("%s%d", prefix, i)
Expand All @@ -71,11 +71,11 @@ func genBenchBuiltinBytesMap(
}

// nolint: unparam
func genBenchCustomBytesMap(
func genBenchCustomStringMap(
keyPrefixSize int,
mapSize int,
) map[xhash.Hash]struct{} {
prefix := genBenchBytesKeyPrefix(keyPrefixSize)
prefix := genBenchStringKeyPrefix(keyPrefixSize)
m := make(map[xhash.Hash]struct{}, mapSize)
for i := 0; i < mapSize; i++ {
key := fmt.Sprintf("%s%d", prefix, i)
Expand Down
8 changes: 4 additions & 4 deletions filter/filter_combinator.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ func newCombinator(str string) (Combinator, error) {

// String returns the string representation of the filter combinator.
func (f Combinator) String() string {
if s, exists := filterCombinatorBytess[f]; exists {
if s, exists := filterCombinatorBytes[f]; exists {
return s
}
// nolint: goconst
Expand Down Expand Up @@ -68,16 +68,16 @@ func (f *Combinator) ToProto() (servicepb.OptionalFilterCombinator, error) {
}

var (
filterCombinatorBytess = map[Combinator]string{
filterCombinatorBytes = map[Combinator]string{
And: "AND",
Or: "OR",
}
stringToCombinators map[string]Combinator
)

func init() {
stringToCombinators = make(map[string]Combinator, len(filterCombinatorBytess))
for k, v := range filterCombinatorBytess {
stringToCombinators = make(map[string]Combinator, len(filterCombinatorBytes))
for k, v := range filterCombinatorBytes {
stringToCombinators[v] = k
}
}
14 changes: 7 additions & 7 deletions filter/filter_op.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ const (

// newOp creates a new filter operator.
func newOp(str string) (Op, error) {
if f, exists := bytesToOps[str]; exists {
if f, exists := stringToOps[str]; exists {
return f, nil
}
return UnknownOp, fmt.Errorf("unknown filter op bytes: %s", str)
Expand Down Expand Up @@ -252,7 +252,7 @@ func (f Op) MultiTypeCombinator() (Combinator, error) {

// String returns the string representation of the filter operator.
func (f Op) String() string {
if s, exists := opBytess[f]; exists {
if s, exists := opStrings[f]; exists {
return s
}
// nolint: goconst
Expand Down Expand Up @@ -566,7 +566,7 @@ var (
},
}

opBytess = map[Op]string{
opStrings = map[Op]string{
Equals: "=",
NotEquals: "!=",
LargerThan: ">",
Expand All @@ -584,7 +584,7 @@ var (
Exists: "exists",
DoesNotExist: "notExists",
}
bytesToOps map[string]Op
stringToOps map[string]Op
)

func addAllowedTypes(op Op, lhsType, rhsType field.ValueType) {
Expand Down Expand Up @@ -634,8 +634,8 @@ func init() {
addAllowedTypes(op, field.TimeType, field.TimeType)
}

bytesToOps = make(map[string]Op, len(opBytess))
for k, v := range opBytess {
bytesToOps[v] = k
stringToOps = make(map[string]Op, len(opStrings))
for k, v := range opStrings {
stringToOps[v] = k
}
}
2 changes: 1 addition & 1 deletion generated/generics/generate.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@
//go:generate sh -c "cat $GOPATH/src/$PACKAGE/x/heap/generic.go | awk '/^package/{i++}i' | genny -out=$GOPATH/src/$PACKAGE/query/bool_result_group_heap.gen.go -pkg=query gen \"GenericValue=boolResultGroup ValueHeap=boolResultGroupHeap NewHeap=newBoolResultGroupHeap TopNValues=topNBools NewTopValues=newTopNBools ValueAddOptions=boolAddOptions\""
//go:generate sh -c "cat $GOPATH/src/$PACKAGE/x/heap/generic.go | awk '/^package/{i++}i' | genny -out=$GOPATH/src/$PACKAGE/query/int_result_group_heap.gen.go -pkg=query gen \"GenericValue=intResultGroup ValueHeap=intResultGroupHeap NewHeap=newIntResultGroupHeap TopNValues=topNInts NewTopValues=newTopNInts ValueAddOptions=intAddOptions\""
//go:generate sh -c "cat $GOPATH/src/$PACKAGE/x/heap/generic.go | awk '/^package/{i++}i' | genny -out=$GOPATH/src/$PACKAGE/query/double_result_group_heap.gen.go -pkg=query gen \"GenericValue=doubleResultGroup ValueHeap=doubleResultGroupHeap NewHeap=newDoubleResultGroupHeap TopNValues=topNDoubles NewTopValues=newTopNDoubles ValueAddOptions=doubleAddOptions\""
//go:generate sh -c "cat $GOPATH/src/$PACKAGE/x/heap/generic.go | awk '/^package/{i++}i' | genny -out=$GOPATH/src/$PACKAGE/query/bytes_result_group_heap.gen.go -pkg=query gen \"GenericValue=bytesResultGroup ValueHeap=bytesResultGroupHeap NewHeap=newBytesResultGroupHeap TopNValues=topNBytess NewTopValues=newTopNBytess ValueAddOptions=bytesAddOptions\""
//go:generate sh -c "cat $GOPATH/src/$PACKAGE/x/heap/generic.go | awk '/^package/{i++}i' | genny -out=$GOPATH/src/$PACKAGE/query/bytes_result_group_heap.gen.go -pkg=query gen \"GenericValue=bytesResultGroup ValueHeap=bytesResultGroupHeap NewHeap=newBytesResultGroupHeap TopNValues=topNBytes NewTopValues=newTopNBytes ValueAddOptions=bytesAddOptions\""
//go:generate sh -c "cat $GOPATH/src/$PACKAGE/x/heap/generic.go | awk '/^package/{i++}i' | genny -out=$GOPATH/src/$PACKAGE/query/time_result_group_heap.gen.go -pkg=query gen \"GenericValue=timeResultGroup ValueHeap=timeResultGroupHeap NewHeap=newTimeResultGroupHeap TopNValues=topNTimes NewTopValues=newTopNTimes ValueAddOptions=timeAddOptions\""
//go:generate sh -c "cat $GOPATH/src/$PACKAGE/x/heap/generic.go | awk '/^package/{i++}i' | genny -out=$GOPATH/src/$PACKAGE/storage/doc_id_values_heap.gen.go -pkg=storage gen \"GenericValue=docIDValues ValueHeap=docIDValuesHeap NewHeap=newDocIDValuesHeap TopNValues=topNDocIDValues NewTopValues=newTopNDocIDValues ValueAddOptions=docIDValuesAddOptions\""

Expand Down
24 changes: 18 additions & 6 deletions index/field/at_position_bytes_field_iterator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,19 @@ func TestNewAtPositionBytesFieldIteratorForwardOnly(t *testing.T) {
valsIt := iterator.NewMockForwardBytesIterator(ctrl)
gomock.InOrder(
valsIt.EXPECT().Next().Return(true),
valsIt.EXPECT().Current().Return([]byte("a")),
valsIt.EXPECT().Current().Return(iterator.Bytes{
Data: []byte("a"),
}),
valsIt.EXPECT().Next().Return(true),
valsIt.EXPECT().Next().Return(true),
valsIt.EXPECT().Current().Return([]byte("c")),
valsIt.EXPECT().Current().Return(iterator.Bytes{
Data: []byte("c"),
}),
valsIt.EXPECT().Next().Return(true),
valsIt.EXPECT().Next().Return(true),
valsIt.EXPECT().Current().Return([]byte("e")),
valsIt.EXPECT().Current().Return(iterator.Bytes{
Data: []byte("e"),
}),
valsIt.EXPECT().Close(),
)

Expand Down Expand Up @@ -96,11 +102,17 @@ func TestNewAtPositionBytesFieldIteratorSeekable(t *testing.T) {
gomock.InOrder(
valsIt.EXPECT().Next().Return(true),
valsIt.EXPECT().SeekForward(0).Return(nil),
valsIt.EXPECT().Current().Return([]byte("a")),
valsIt.EXPECT().Current().Return(iterator.Bytes{
Data: []byte("a"),
}),
valsIt.EXPECT().SeekForward(2).Return(nil),
valsIt.EXPECT().Current().Return([]byte("c")),
valsIt.EXPECT().Current().Return(iterator.Bytes{
Data: []byte("c"),
}),
valsIt.EXPECT().SeekForward(2).Return(nil),
valsIt.EXPECT().Current().Return([]byte("e")),
valsIt.EXPECT().Current().Return(iterator.Bytes{
Data: []byte("e"),
}),
valsIt.EXPECT().Close(),
)

Expand Down
20 changes: 15 additions & 5 deletions index/field/bytes_field_iterator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,15 +35,25 @@ func TestBytesFieldIterator(t *testing.T) {
valsIt := iterator.NewMockForwardBytesIterator(ctrl)
gomock.InOrder(
valsIt.EXPECT().Next().Return(true),
valsIt.EXPECT().Current().Return([]byte("a")),
valsIt.EXPECT().Current().Return(iterator.Bytes{
Data: []byte("a"),
}),
valsIt.EXPECT().Next().Return(true),
valsIt.EXPECT().Current().Return([]byte("b")),
valsIt.EXPECT().Current().Return(iterator.Bytes{
Data: []byte("b"),
}),
valsIt.EXPECT().Next().Return(true),
valsIt.EXPECT().Current().Return([]byte("c")),
valsIt.EXPECT().Current().Return(iterator.Bytes{
Data: []byte("c"),
}),
valsIt.EXPECT().Next().Return(true),
valsIt.EXPECT().Current().Return([]byte("d")),
valsIt.EXPECT().Current().Return(iterator.Bytes{
Data: []byte("d"),
}),
valsIt.EXPECT().Next().Return(true),
valsIt.EXPECT().Current().Return([]byte("e")),
valsIt.EXPECT().Current().Return(iterator.Bytes{
Data: []byte("e"),
}),
valsIt.EXPECT().Next().Return(false),
valsIt.EXPECT().Err().Return(nil),
valsIt.EXPECT().Close(),
Expand Down
6 changes: 3 additions & 3 deletions parser/json/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ func (p *parser) parseBytes() (*value.Value, error) {

func (p *parser) parseBytesAsRaw() ([]byte, error) {
data := p.str[p.pos:]
isValid, hasEscapes, length := findBytesLen(data)
isValid, hasEscapes, length := findStringLen(data)
if !isValid {
return nil, newParseError("string", p.pos, errors.New("unterminated string literal"))
}
Expand Down Expand Up @@ -516,10 +516,10 @@ func (p *parser) skipWS() {
func (p *parser) eos() bool { return p.pos >= len(p.str) }
func (p *parser) current() byte { return p.str[p.pos] }

// findBytesLen tries to scan into the string literal for ending quote char to
// findStringLen tries to scan into the string literal for ending quote char to
// determine required size. The size will be exact if no escapes are present and
// may be inexact if there are escaped chars.
func findBytesLen(data string) (isValid, hasEscapes bool, length int) {
func findStringLen(data string) (isValid, hasEscapes bool, length int) {
delta := 0

for i := 0; i < len(data); i++ {
Expand Down
6 changes: 3 additions & 3 deletions parser/json/parser_benchmark_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,15 @@ import (
"github.com/stretchr/testify/require"
)

func BenchmarkParseRawBytes(b *testing.B) {
func BenchmarkParseRawString(b *testing.B) {
for _, s := range []string{`""`, `"a"`, `"abcd"`, `"abcdefghijk"`, `"qwertyuiopasdfghjklzxcvb"`} {
b.Run(s, func(b *testing.B) {
benchmarkParseRawBytes(b, s)
benchmarkParseRawString(b, s)
})
}
}

func benchmarkParseRawBytes(b *testing.B, s string) {
func benchmarkParseRawString(b *testing.B, s string) {
b.ReportAllocs()
b.SetBytes(int64(len(s)))
b.RunParallel(func(pb *testing.PB) {
Expand Down
6 changes: 3 additions & 3 deletions persist/fs/reader_bench_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import (
func BenchmarkReadField(b *testing.B) {
var (
totDocs = 1024 * 1024 * 80
totRand = len(randomBytess)
totRand = len(randomBytes)
namespace = []byte("namespace")
fieldPath = []string{"foo.bar"}
segMetadata = persist.SegmentMetadata{
Expand All @@ -38,7 +38,7 @@ func BenchmarkReadField(b *testing.B) {
for i := 0; i < totDocs; i++ {
if i%2 == 0 {
builder.Add(int32(i), docfield.NewBytesUnion(iterator.Bytes{
Data: []byte(randomBytess[i%totRand]),
Data: []byte(randomBytes[i%totRand]),
}))
numDocs++
}
Expand Down Expand Up @@ -69,7 +69,7 @@ func BenchmarkReadField(b *testing.B) {
}

var (
randomBytess = []string{
randomBytes = []string{
"F8MCaDITND",
"tymDDCKxzJ",
"mvgWvjecnH",
Expand Down
26 changes: 13 additions & 13 deletions query/bytes_result_group_heap.gen.go
Original file line number Diff line number Diff line change
Expand Up @@ -110,22 +110,22 @@ func (h bytesResultGroupHeap) heapify(i, n int) {
}
}

// topNBytess keeps track of the top n values in a value sequence for the
// topNBytes keeps track of the top n values in a value sequence for the
// order defined by the `lessThanFn`. In particular if `lessThanFn` defines
// an increasing order (returning true if `v1` < `v2`), the collection stores
// the top N largest values, and vice versa.
type topNBytess struct {
type topNBytes struct {
n int
lessThanFn func(v1, v2 bytesResultGroup) bool
h *bytesResultGroupHeap
}

// newTopNBytess creates a new top n value collection.
func newTopNBytess(
// newTopNBytes creates a new top n value collection.
func newTopNBytes(
n int,
lessThanFn func(v1, v2 bytesResultGroup) bool,
) *topNBytess {
return &topNBytess{
) *topNBytes {
return &topNBytes{
n: n,
lessThanFn: lessThanFn,
h: newBytesResultGroupHeap(n, lessThanFn),
Expand All @@ -140,22 +140,22 @@ type bytesAddOptions struct {
}

// Len returns the number of items in the collection.
func (v topNBytess) Len() int { return v.h.Len() }
func (v topNBytes) Len() int { return v.h.Len() }

// Cap returns the collection capacity.
func (v topNBytess) Cap() int { return v.h.Cap() }
func (v topNBytes) Cap() int { return v.h.Cap() }

// RawData returns the underlying array backing the heap in no particular order.
func (v topNBytess) RawData() []bytesResultGroup { return v.h.RawData() }
func (v topNBytes) RawData() []bytesResultGroup { return v.h.RawData() }

// Top returns the "smallest" value according to the `lessThan` function.
func (v topNBytess) Top() bytesResultGroup { return v.h.Min() }
func (v topNBytes) Top() bytesResultGroup { return v.h.Min() }

// Reset resets the internal array backing the heap.
func (v *topNBytess) Reset() { v.h.Reset() }
func (v *topNBytes) Reset() { v.h.Reset() }

// Add adds a value to the collection.
func (v *topNBytess) Add(val bytesResultGroup, opts bytesAddOptions) {
func (v *topNBytes) Add(val bytesResultGroup, opts bytesAddOptions) {
if v.h.Len() < v.n {
if opts.CopyOnAdd {
val = opts.CopyFn(val)
Expand All @@ -178,7 +178,7 @@ func (v *topNBytess) Add(val bytesResultGroup, opts bytesAddOptions) {

// SortInPlace sorts the backing heap in place and returns the sorted data.
// NB: The value collection becomes invalid after this is called.
func (v *topNBytess) SortInPlace() []bytesResultGroup {
func (v *topNBytes) SortInPlace() []bytesResultGroup {
res := v.h.SortInPlace()
v.h = nil
v.lessThanFn = nil
Expand Down
2 changes: 1 addition & 1 deletion query/raw_result.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ type RawResult struct {

// MarshalJSON marshals the raw results as a JSON object using the data field.
func (r RawResult) MarshalJSON() ([]byte, error) {
return json.Marshal(r.Data)
return json.Marshal(string(r.Data))
}

// RawResultLessThanFn compares two raw results.
Expand Down
Loading

0 comments on commit 4f44154

Please sign in to comment.