Skip to content

Commit

Permalink
Update integration tests and remove usage of unsafe.
Browse files Browse the repository at this point in the history
  • Loading branch information
notbdu committed Feb 28, 2019
1 parent 5a78011 commit 9064d51
Show file tree
Hide file tree
Showing 12 changed files with 29 additions and 139 deletions.
117 changes: 0 additions & 117 deletions integration/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -174,121 +174,4 @@ database:
low: 0.7
high: 1.0
`

testConfig2 = `
http:
listenAddress: localhost:5678
readTimeout: 1m
writeTimeout: 1m
handler:
parserPool:
size: 50000
watermark:
low: 0.001
high: 0.002
parser:
maxDepth: 3
excludeKeySuffix: _noindex
database:
namespaces:
- id: testNamespace
retention: 87600h # 10 years in hours, DB has a default data retention of 24 hours that will break fixtures if we run them in the future (fixtures have hard coded timestamps).
numShards: 1
filePathPrefix: /var/lib/eventdb
fieldPathSeparator: .
namespaceFieldName: service
timestampFieldName: "@timestamp"
tickMinInterval: 1s
maxNumDocsPerSegment: 5
segmentUnloadAfterUnreadFor: 1s
persist:
writeBufferSize: 65536
readBufferSize: 65536
timestampPrecision: 1ms
mmapEnableHugePages: true
mmapHugePagesThreshold: 16384 # 2 ^ 14
contextPool:
size: 128
lowWatermark: 0.7
highWatermark: 1.0
maxFinalizerCapacity: 65536
boolArrayPool: # total < 1GB
buckets:
- count: 5000
capacity: 4096
- count: 5000
capacity: 8192
- count: 5000
capacity: 16384
- count: 5000
capacity: 32768
- count: 5000
capacity: 65536
watermark:
low: 0.7
high: 1.0
intArrayPool: # total < 1GB
buckets:
- count: 1000
capacity: 4096
- count: 1000
capacity: 8192
- count: 1000
capacity: 16384
- count: 1000
capacity: 32768
- count: 1000
capacity: 65536
watermark:
low: 0.7
high: 1.0
int64ArrayPool: # For timestamps, 8 shards * 2 segments = 16, ~250MB
buckets:
- count: 32
capacity: 65536
- count: 32
capacity: 131072
- count: 32
capacity: 262144
- count: 32
capacity: 524288
watermark:
low: 0.001
high: 0.002
doubleArrayPool: # total < 1GB
buckets:
- count: 1000
capacity: 4096
- count: 1000
capacity: 8192
- count: 1000
capacity: 16384
- count: 1000
capacity: 32768
- count: 1000
capacity: 65536
watermark:
low: 0.7
high: 1.0
stringArrayPool: # 1K string fields * 8 shards * 2 segments = 16K, ~15G
buckets:
- count: 1600
capacity: 8192
- count: 1600
capacity: 16384
- count: 1600
capacity: 32768
- count: 1600
capacity: 65536
- count: 1600
capacity: 131072
- count: 1600
capacity: 262144
- count: 1600
capacity: 524288
watermark:
low: 0.7
high: 1.0
`
)
10 changes: 8 additions & 2 deletions integration/raw_query_no_filter_orderby_from_disk_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,13 @@ func TestRawQueryNoFilterOrderByFromDisk(t *testing.T) {
}

// Create server.
ts := newTestServerSetup(t, testConfig2)
cfg := loadConfig(t, testConfig1)
cfg.Database.NumShards = 1
ts := newTestServerSetup(t, cfg)
ts.dbOpts = ts.dbOpts.
SetSegmentUnloadAfterUnreadFor(time.Second).
SetTickMinInterval(time.Second).
SetMaxNumDocsPerSegment(5)
defer ts.close(t)

// Start the server.
Expand Down Expand Up @@ -118,7 +124,7 @@ func TestRawQueryNoFilterOrderByFromDisk(t *testing.T) {
}

// Write data.
client := ts.newClient()
client := ts.newHTTPClient()
require.NoError(t, client.write([]byte(strings.TrimSpace(testData))))

// Wait for db flush.
Expand Down
3 changes: 2 additions & 1 deletion integration/raw_query_no_filter_orderby_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ func TestRawQueryNoFilterOrderBy(t *testing.T) {
}

// Create server.
ts := newTestServerSetup(t, testConfig1)
cfg := loadConfig(t, testConfig1)
ts := newTestServerSetup(t, cfg)
defer ts.close(t)

// Start the server.
Expand Down
3 changes: 2 additions & 1 deletion integration/raw_query_with_filter_orderby_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ func TestRawQueryWithFilterOrderBy(t *testing.T) {
}

// Create server.
ts := newTestServerSetup(t, testConfig1)
cfg := loadConfig(t, testConfig1)
ts := newTestServerSetup(t, cfg)
defer ts.close(t)

// Start the server.
Expand Down
4 changes: 1 addition & 3 deletions integration/setup.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,7 @@ type testServerSetup struct {
closedCh chan struct{}
}

func newTestServerSetup(t *testing.T, config string) *testServerSetup {
cfg := loadConfig(t, config)

func newTestServerSetup(t *testing.T, cfg configuration) *testServerSetup {
namespaces, err := cfg.Database.NewNamespacesMetadata()
require.NoError(t, err)

Expand Down
3 changes: 2 additions & 1 deletion integration/time_bucket_no_filter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ func TestTimeBucketQueryNoFilter(t *testing.T) {
}

// Create server.
ts := newTestServerSetup(t, testConfig1)
cfg := loadConfig(t, testConfig1)
ts := newTestServerSetup(t, cfg)
defer ts.close(t)

// Start the server.
Expand Down
3 changes: 2 additions & 1 deletion integration/time_bucket_with_filter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ func TestTimeBucketQueryWithFilter(t *testing.T) {
}

// Create server.
ts := newTestServerSetup(t, testConfig1)
cfg := loadConfig(t, testConfig1)
ts := newTestServerSetup(t, cfg)
defer ts.close(t)

// Start the server.
Expand Down
8 changes: 4 additions & 4 deletions parser/json/parser_benchmark_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import (
"testing"

"github.com/xichen2020/eventdb/parser/json/value"
"github.com/xichen2020/eventdb/x/unsafe"
"github.com/xichen2020/eventdb/x/safe"

"github.com/stretchr/testify/require"
)
Expand Down Expand Up @@ -258,7 +258,7 @@ func benchmarkFastJSONParseGet(b *testing.B, s string) {
func benchmarkStdJSONParseMap(b *testing.B, s string) {
b.ReportAllocs()
b.SetBytes(int64(len(s)))
bb := unsafe.ToBytes(s)
bb := safe.ToBytes(s)
b.RunParallel(func(pb *testing.PB) {
var m map[string]interface{}
for pb.Next() {
Expand All @@ -272,7 +272,7 @@ func benchmarkStdJSONParseMap(b *testing.B, s string) {
func benchmarkStdJSONParseStruct(b *testing.B, s string) {
b.ReportAllocs()
b.SetBytes(int64(len(s)))
bb := unsafe.ToBytes(s)
bb := safe.ToBytes(s)
b.RunParallel(func(pb *testing.PB) {
var m struct {
Sid int
Expand All @@ -295,7 +295,7 @@ func benchmarkStdJSONParseStruct(b *testing.B, s string) {
func benchmarkStdJSONParseEmptyStruct(b *testing.B, s string) {
b.ReportAllocs()
b.SetBytes(int64(len(s)))
bb := unsafe.ToBytes(s)
bb := safe.ToBytes(s)
b.RunParallel(func(pb *testing.PB) {
var m struct{}
for pb.Next() {
Expand Down
4 changes: 2 additions & 2 deletions persist/fs/files.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import (
"strconv"

"github.com/xichen2020/eventdb/persist"
"github.com/xichen2020/eventdb/x/unsafe"
"github.com/xichen2020/eventdb/x/safe"
)

// namespaceDataDirPath returns the path to the directory for a given namespace.
Expand Down Expand Up @@ -78,7 +78,7 @@ func fieldDataFilePath(
}
buf.WriteString(fieldDataFileSuffix)
b := buf.Bytes()
return segmentFilePath(segmentDirPath, unsafe.ToString(b))
return segmentFilePath(segmentDirPath, safe.ToString(b))
}

// openWritable opens a file for writing and truncating as necessary.
Expand Down
3 changes: 1 addition & 2 deletions server/http/handlers/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import (
"github.com/xichen2020/eventdb/parser/json"
"github.com/xichen2020/eventdb/parser/json/value"
"github.com/xichen2020/eventdb/x/safe"
"github.com/xichen2020/eventdb/x/unsafe"

"github.com/m3db/m3x/clock"
"github.com/m3db/m3x/instrument"
Expand Down Expand Up @@ -153,7 +152,7 @@ func (o *Options) TimeNanosFn() TimeNanosFn {
// defaultIDFn simply generates a UUID as the document ID.
func defaultIDFn(*value.Value) ([]byte, error) {
id := uuid.NewUUID().String()
return unsafe.ToBytes(id), nil
return safe.ToBytes(id), nil
}

// defaultNamespaceFn parses the namespace value as a string.
Expand Down
4 changes: 2 additions & 2 deletions values/encoding/string_encode.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import (
"github.com/xichen2020/eventdb/values"
"github.com/xichen2020/eventdb/values/iterator"
"github.com/xichen2020/eventdb/x/proto"
"github.com/xichen2020/eventdb/x/unsafe"
"github.com/xichen2020/eventdb/x/safe"

xerrors "github.com/m3db/m3x/errors"
"github.com/valyala/gozstd"
Expand Down Expand Up @@ -176,7 +176,7 @@ func (enc *stringEncoder) rawSizeEncode(
writer io.Writer,
) error {
for valuesIt.Next() {
b := unsafe.ToBytes(valuesIt.Current())
b := safe.ToBytes(valuesIt.Current())
n := binary.PutVarint(enc.buf, int64(len(b)))
if _, err := writer.Write(enc.buf[:n]); err != nil {
return err
Expand Down
6 changes: 3 additions & 3 deletions x/hash/hash.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package hash

import (
"github.com/xichen2020/eventdb/x/unsafe"
"github.com/xichen2020/eventdb/x/safe"

murmur3 "github.com/m3db/stackmurmur3"
)
Expand All @@ -22,7 +22,7 @@ func BytesHash(d []byte) Hash {

// StringHash returns the hash of a string.
func StringHash(s string) Hash {
return BytesHash(unsafe.ToBytes(s))
return BytesHash(safe.ToBytes(s))
}

// StringArrayHash returns the hash of a string array
Expand All @@ -35,7 +35,7 @@ func StringArrayHash(d []string, sep byte) Hash {
)
// NB: If needed, can fork murmur3 to do this more properly
for i := 0; i < len(d); i++ {
h = h.Write(unsafe.ToBytes(d[i]))
h = h.Write(safe.ToBytes(d[i]))
if i < len(d)-1 {
h = h.Write(b)
}
Expand Down

0 comments on commit 9064d51

Please sign in to comment.