diff --git a/autoencoding/autoencoding.go b/autoencoding/autoencoding.go new file mode 100644 index 0000000..a3e193e --- /dev/null +++ b/autoencoding/autoencoding.go @@ -0,0 +1,213 @@ +package autoencoding + +import ( + "encoding/json" + "errors" + "reflect" + "unicode" + + "github.com/plexsysio/gkvstore" + "github.com/vmihailenco/msgpack/v5" + "google.golang.org/protobuf/proto" +) + +type Encoding int + +const ( + Invalid Encoding = iota + JSON + MsgPack + Protobuf +) + +type ObjectGetter interface { + Get() interface{} +} + +type item struct { + val interface{} + namespace string + id string + encoding Encoding +} + +type itemWithTimeTracker struct { + *item + created string + updated string +} + +func getEncoding(f reflect.StructField) Encoding { + if _, ok := f.Tag.Lookup("protobuf"); ok { + return Protobuf + } + if _, ok := f.Tag.Lookup("json"); ok { + return JSON + } + if _, ok := f.Tag.Lookup("msgpack"); ok { + return MsgPack + } + // If none is provided use JSON as default + return JSON +} + +func newItem(val interface{}) (gkvstore.Item, error) { + foundId, foundCreated, foundUpdated := false, false, false + id, ns, created, updated := "", "", "", "" + + rv := reflect.ValueOf(val) + if rv.Kind() != reflect.Ptr && rv.Kind() != reflect.Interface { + return nil, errors.New("incorrect value type: use pointer") + } + t := rv.Elem().Type() + + if t.Name() == "" { + return nil, errors.New("incorrect name of type") + } + ns = t.Name() + + var encoding Encoding + // Use first exported field for encoding + for i := 0; i < t.NumField(); i++ { + if unicode.IsUpper(rune(t.Field(i).Name[0])) { + encoding = getEncoding(t.Field(i)) + break + } + } + if encoding == 0 { + return nil, errors.New("no exported field") + } + + for i := 0; i < t.NumField(); i++ { + field := t.Field(i) + tagval, ok := field.Tag.Lookup("aenc") + if (field.Name == "ID" || field.Name == "Id") || (ok && tagval == "id") { + if foundId { + return nil, errors.New("duplicate ID field configured") + } + if field.Type.Kind() != reflect.String { + return nil, errors.New("ID field should be string") + } + foundId = true + id = field.Name + } + if (field.Name == "Created" || field.Name == "CreatedAt") || (ok && tagval == "created") { + if foundCreated { + return nil, errors.New("duplicate Created field configured") + } + if field.Type.Kind() != reflect.Int64 { + return nil, errors.New("Created field should be uint64") + } + foundCreated = true + created = field.Name + } + if (field.Name == "Updated" || field.Name == "UpdatedAt") || (ok && tagval == "updated") { + if foundUpdated { + return nil, errors.New("duplicate Updated field configured") + } + if field.Type.Kind() != reflect.Int64 { + return nil, errors.New("Updated field should be uint64") + } + foundUpdated = true + updated = field.Name + } + } + + if !foundId { + return nil, errors.New("ID field not configured") + } + + if foundCreated && foundUpdated { + return &itemWithTimeTracker{ + item: &item{ + namespace: ns, + id: id, + encoding: encoding, + val: val, + }, + created: created, + updated: updated, + }, nil + } + + return &item{ + namespace: ns, + id: id, + encoding: encoding, + val: val, + }, nil +} + +func New(val interface{}) (gkvstore.Item, error) { + return newItem(val) +} + +func MustNew(val interface{}) gkvstore.Item { + it, err := newItem(val) + if err != nil { + panic(err.Error()) + } + return it +} + +func (i *item) GetNamespace() string { + return i.namespace +} + +func (i *item) GetID() string { + v := reflect.ValueOf(i.val).Elem() + return v.FieldByName(i.id).String() +} + +func (i *item) SetID(id string) { + v := reflect.ValueOf(i.val).Elem() + v.FieldByName(i.id).SetString(id) +} + +func (i *item) Get() interface{} { + return i.val +} + +func (i *item) Marshal() ([]byte, error) { + switch i.encoding { + case JSON: + return json.Marshal(i.val) + case MsgPack: + return msgpack.Marshal(i.val) + case Protobuf: + return proto.Marshal(i.val.(proto.Message)) + } + return nil, errors.New("invalid encoding") +} + +func (i *item) Unmarshal(buf []byte) error { + switch i.encoding { + case JSON: + return json.Unmarshal(buf, i.val) + case MsgPack: + return msgpack.Unmarshal(buf, i.val) + case Protobuf: + return proto.Unmarshal(buf, i.val.(proto.Message)) + } + return errors.New("invalid encoding") +} + +func (i *itemWithTimeTracker) GetCreated() int64 { + v := reflect.ValueOf(i.val).Elem() + return v.FieldByName(i.created).Int() +} + +func (i *itemWithTimeTracker) GetUpdated() int64 { + v := reflect.ValueOf(i.val).Elem() + return v.FieldByName(i.updated).Int() +} + +func (i *itemWithTimeTracker) SetCreated(val int64) { + v := reflect.ValueOf(i.val).Elem() + v.FieldByName(i.created).SetInt(val) +} + +func (i *itemWithTimeTracker) SetUpdated(val int64) { + v := reflect.ValueOf(i.val).Elem() + v.FieldByName(i.updated).SetInt(val) +} diff --git a/autoencoding/autoencoding_test.go b/autoencoding/autoencoding_test.go new file mode 100644 index 0000000..0c3fdfb --- /dev/null +++ b/autoencoding/autoencoding_test.go @@ -0,0 +1,219 @@ +package autoencoding_test + +import ( + "testing" + "time" + + "github.com/plexsysio/gkvstore" + "github.com/plexsysio/gkvstore/autoencoding" + "github.com/plexsysio/gkvstore/autoencoding/internal/pbtest" +) + +func TestNew(t *testing.T) { + t.Run("no id field", func(st *testing.T) { + type struct1 struct { + Namespace string + Val string + } + _, err := autoencoding.New(&struct1{}) + if err == nil { + st.Fatal("expected error for no id") + } + }) + t.Run("success without tags", func(st *testing.T) { + type struct1 struct { + Id string + Val string + } + it, err := autoencoding.New(&struct1{}) + if err != nil { + st.Fatal("expected no error") + } + it.(gkvstore.IDSetter).SetID("testID") + if it.GetID() != "testID" { + st.Fatal("unable to set ID") + } + }) + t.Run("success with tags", func(st *testing.T) { + type struct1 struct { + IdField string `aenc:"id"` + Val string + } + it, err := autoencoding.New(&struct1{}) + if err != nil { + st.Fatal("expected no error") + } + it.(gkvstore.IDSetter).SetID("testID") + if it.GetID() != "testID" { + st.Fatal("unable to set ID") + } + }) + t.Run("success with tags and additional tags", func(st *testing.T) { + type struct1 struct { + IdField string `json:"id,omitempty" aenc:"id"` + Val string + } + _, err := autoencoding.New(&struct1{}) + if err != nil { + st.Fatal("expected no error") + } + }) + t.Run("duplicate field and tag", func(st *testing.T) { + type struct1 struct { + Id string + IdField string `aenc:"id"` + Val string + } + _, err := autoencoding.New(&struct1{}) + if err == nil { + st.Fatal("expected error for duplicate id") + } + }) + t.Run("incorrect type of id field", func(st *testing.T) { + type struct1 struct { + Id int + Val string + } + _, err := autoencoding.New(&struct1{}) + if err == nil { + st.Fatal("expected error for incorrect id type") + } + }) + t.Run("MustNew panics on incorrect", func(st *testing.T) { + type struct1 struct { + IdField string + Val string + } + panics := false + done := make(chan struct{}) + go func() { + defer func() { + if r := recover(); r != nil { + panics = true + } + close(done) + }() + _ = autoencoding.MustNew(&struct1{}) + }() + <-done + if !panics { + st.Fatal("expected MustNew to panic") + } + }) + t.Run("timeTracker", func(st *testing.T) { + type struct1 struct { + ID string + Created int64 + Updated int64 + } + it, err := autoencoding.New(&struct1{}) + if err != nil { + st.Fatal("expected no error got", err) + } + tt, ok := it.(gkvstore.TimeTracker) + if !ok { + st.Fatal("timetracker implementation expected") + } + ts := time.Now().UnixNano() + tt.SetCreated(ts) + tt.SetUpdated(ts) + if tt.GetCreated() != tt.GetUpdated() || tt.GetCreated() != ts { + st.Fatal("invalid timetracker implementation") + } + }) +} + +func TestEncoding(t *testing.T) { + t.Run("JSON", func(st *testing.T) { + type jsonItem struct { + ID string `json:"id"` + Val1 int `json:"val1"` + Val2 string `json:"val2"` + } + t1 := &jsonItem{ + ID: "1", + Val1: 100, + Val2: "test1 JSON", + } + ae1 := autoencoding.MustNew(t1) + + buf, err := ae1.Marshal() + if err != nil { + st.Fatalf("failed marshalling %v", err) + } + + st.Log("Size of packing", len(buf)) + + t2 := &jsonItem{} + ae2 := autoencoding.MustNew(t2) + + err = ae2.Unmarshal(buf) + if err != nil { + st.Fatalf("failed unmarshaling %v", err) + } + + if t1.ID != t2.ID || t1.Val1 != t2.Val1 || t1.Val2 != t2.Val2 { + st.Fatalf("read incorrect value expected %v found %v", t1, t2) + } + }) + t.Run("MsgPack", func(st *testing.T) { + type msgpackItem struct { + ID string `msgpack:"id"` + Val1 int `msgpack:"val1"` + Val2 string `msgpack:"val2"` + } + t1 := &msgpackItem{ + ID: "1", + Val1: 100, + Val2: "test1 JSON", + } + ae1 := autoencoding.MustNew(t1) + + buf, err := ae1.Marshal() + if err != nil { + st.Fatalf("failed marshalling %v", err) + } + + st.Log("Size of packing", len(buf)) + + t2 := &msgpackItem{} + ae2 := autoencoding.MustNew(t2) + + err = ae2.Unmarshal(buf) + if err != nil { + st.Fatalf("failed unmarshaling %v", err) + } + + if t1.ID != t2.ID || t1.Val1 != t2.Val1 || t1.Val2 != t2.Val2 { + st.Fatalf("read incorrect value expected %v found %v", t1, t2) + } + }) + t.Run("Protobuf", func(st *testing.T) { + t1 := &pbtest.TestItem{ + Id: "1", + Val1: 100, + Val2: "test1 JSON", + } + ae1 := autoencoding.MustNew(t1) + + buf, err := ae1.Marshal() + if err != nil { + st.Fatalf("failed marshalling %v", err) + } + + st.Log("Size of packing", len(buf)) + + t2 := &pbtest.TestItem{} + ae2 := autoencoding.MustNew(t2) + + err = ae2.Unmarshal(buf) + if err != nil { + st.Fatalf("failed unmarshaling %v", err) + } + + if t1.Id != t2.Id || t1.Val1 != t2.Val1 || t1.Val2 != t2.Val2 { + st.Fatalf("read incorrect value expected %v found %v", t1, t2) + } + + }) +} diff --git a/autoencoding/internal/pbtest/pbtest.go b/autoencoding/internal/pbtest/pbtest.go new file mode 100644 index 0000000..5a2f24f --- /dev/null +++ b/autoencoding/internal/pbtest/pbtest.go @@ -0,0 +1,3 @@ +package pbtest + +//go:generate protoc -I. --go_out=. --go_opt=paths=source_relative pbtest.proto diff --git a/autoencoding/internal/pbtest/pbtest.pb.go b/autoencoding/internal/pbtest/pbtest.pb.go new file mode 100644 index 0000000..4f9a234 --- /dev/null +++ b/autoencoding/internal/pbtest/pbtest.pb.go @@ -0,0 +1,162 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.27.1 +// protoc v3.17.3 +// source: pbtest.proto + +package pbtest + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type TestItem struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Val1 int64 `protobuf:"varint,2,opt,name=val1,proto3" json:"val1,omitempty"` + Val2 string `protobuf:"bytes,3,opt,name=val2,proto3" json:"val2,omitempty"` +} + +func (x *TestItem) Reset() { + *x = TestItem{} + if protoimpl.UnsafeEnabled { + mi := &file_pbtest_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TestItem) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TestItem) ProtoMessage() {} + +func (x *TestItem) ProtoReflect() protoreflect.Message { + mi := &file_pbtest_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TestItem.ProtoReflect.Descriptor instead. +func (*TestItem) Descriptor() ([]byte, []int) { + return file_pbtest_proto_rawDescGZIP(), []int{0} +} + +func (x *TestItem) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *TestItem) GetVal1() int64 { + if x != nil { + return x.Val1 + } + return 0 +} + +func (x *TestItem) GetVal2() string { + if x != nil { + return x.Val2 + } + return "" +} + +var File_pbtest_proto protoreflect.FileDescriptor + +var file_pbtest_proto_rawDesc = []byte{ + 0x0a, 0x0c, 0x70, 0x62, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, + 0x70, 0x62, 0x74, 0x65, 0x73, 0x74, 0x22, 0x42, 0x0a, 0x08, 0x54, 0x65, 0x73, 0x74, 0x49, 0x74, + 0x65, 0x6d, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, + 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x76, 0x61, 0x6c, 0x31, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, + 0x52, 0x04, 0x76, 0x61, 0x6c, 0x31, 0x12, 0x12, 0x0a, 0x04, 0x76, 0x61, 0x6c, 0x32, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x76, 0x61, 0x6c, 0x32, 0x42, 0x3c, 0x5a, 0x3a, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x6c, 0x65, 0x78, 0x73, 0x79, 0x73, + 0x69, 0x6f, 0x2f, 0x67, 0x6b, 0x76, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x75, 0x74, 0x6f, + 0x65, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x2f, 0x70, 0x62, 0x74, 0x65, 0x73, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_pbtest_proto_rawDescOnce sync.Once + file_pbtest_proto_rawDescData = file_pbtest_proto_rawDesc +) + +func file_pbtest_proto_rawDescGZIP() []byte { + file_pbtest_proto_rawDescOnce.Do(func() { + file_pbtest_proto_rawDescData = protoimpl.X.CompressGZIP(file_pbtest_proto_rawDescData) + }) + return file_pbtest_proto_rawDescData +} + +var file_pbtest_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_pbtest_proto_goTypes = []interface{}{ + (*TestItem)(nil), // 0: pbtest.TestItem +} +var file_pbtest_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_pbtest_proto_init() } +func file_pbtest_proto_init() { + if File_pbtest_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_pbtest_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TestItem); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_pbtest_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_pbtest_proto_goTypes, + DependencyIndexes: file_pbtest_proto_depIdxs, + MessageInfos: file_pbtest_proto_msgTypes, + }.Build() + File_pbtest_proto = out.File + file_pbtest_proto_rawDesc = nil + file_pbtest_proto_goTypes = nil + file_pbtest_proto_depIdxs = nil +} diff --git a/autoencoding/internal/pbtest/pbtest.proto b/autoencoding/internal/pbtest/pbtest.proto new file mode 100644 index 0000000..03f26d3 --- /dev/null +++ b/autoencoding/internal/pbtest/pbtest.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package pbtest; + +option go_package = "github.com/plexsysio/gkvstore/autoencoding/internal/pbtest"; + +message TestItem { + string id = 1; + int64 val1 = 2; + string val2 = 3; +} diff --git a/go.mod b/go.mod index 8a70348..77e9077 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,10 @@ module github.com/plexsysio/gkvstore go 1.16 require ( - github.com/google/uuid v1.3.0 // indirect - github.com/opentracing/opentracing-go v1.2.0 // indirect - go.uber.org/atomic v1.9.0 // indirect + github.com/google/uuid v1.3.0 + github.com/opentracing/opentracing-go v1.2.0 + github.com/vmihailenco/msgpack/v5 v5.3.5 + go.uber.org/atomic v1.9.0 + go.uber.org/multierr v1.7.0 + google.golang.org/protobuf v1.27.1 ) diff --git a/go.sum b/go.sum index db17ef4..d0935ed 100644 --- a/go.sum +++ b/go.sum @@ -1,11 +1,35 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/vmihailenco/msgpack/v5 v5.3.5 h1:5gO0H1iULLWGhs2H5tbAHIZTV8/cYafcFOr9znI5mJU= +github.com/vmihailenco/msgpack/v5 v5.3.5/go.mod h1:7xyJ9e+0+9SaZT0Wt1RGleJXzli6Q/V5KbhBonMG9jc= +github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= +github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/multierr v1.7.0 h1:zaiO/rmgFjbmCXdSYJWQcdvOCsthmdaHfr3Gm2Kx4Ec= +go.uber.org/multierr v1.7.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/prefix/prefix.go b/prefix/prefix.go new file mode 100644 index 0000000..79045e2 --- /dev/null +++ b/prefix/prefix.go @@ -0,0 +1,88 @@ +package prefixstore + +import ( + "context" + "errors" + "strings" + + "github.com/plexsysio/gkvstore" + "go.uber.org/multierr" +) + +type prefixStore struct { + stores map[string]gkvstore.Store +} + +var ErrStoreNotConfigured error = errors.New("prefix store not configured") + +type Mount struct { + Prefix string + Store gkvstore.Store +} + +func New(mnts ...Mount) gkvstore.Store { + stores := make(map[string]gkvstore.Store) + for _, mnt := range mnts { + stores[mnt.Prefix] = mnt.Store + } + return &prefixStore{ + stores: stores, + } +} + +func (t *prefixStore) getStore(prefix string) (gkvstore.Store, bool) { + for k, v := range t.stores { + if strings.HasPrefix(prefix, k) { + return v, true + } + } + return nil, false +} + +func (t *prefixStore) Create(ctx context.Context, item gkvstore.Item) error { + st, found := t.getStore(item.GetNamespace()) + if !found { + return ErrStoreNotConfigured + } + return st.Create(ctx, item) +} + +func (t *prefixStore) Read(ctx context.Context, item gkvstore.Item) error { + st, found := t.getStore(item.GetNamespace()) + if !found { + return ErrStoreNotConfigured + } + return st.Read(ctx, item) +} + +func (t *prefixStore) Update(ctx context.Context, item gkvstore.Item) error { + st, found := t.getStore(item.GetNamespace()) + if !found { + return ErrStoreNotConfigured + } + return st.Update(ctx, item) +} + +func (t *prefixStore) Delete(ctx context.Context, item gkvstore.Item) error { + st, found := t.getStore(item.GetNamespace()) + if !found { + return ErrStoreNotConfigured + } + return st.Delete(ctx, item) +} + +func (t *prefixStore) List(ctx context.Context, factory gkvstore.Factory, opts gkvstore.ListOpt) (<-chan *gkvstore.Result, error) { + st, found := t.getStore(factory().GetNamespace()) + if !found { + return nil, ErrStoreNotConfigured + } + return st.List(ctx, factory, opts) +} + +func (t *prefixStore) Close() error { + var err error + for _, st := range t.stores { + multierr.AppendInto(&err, st.Close()) + } + return err +} diff --git a/prefix/prefix_test.go b/prefix/prefix_test.go new file mode 100644 index 0000000..ac01643 --- /dev/null +++ b/prefix/prefix_test.go @@ -0,0 +1,220 @@ +package prefixstore_test + +import ( + "context" + "testing" + + "github.com/plexsysio/gkvstore" + "github.com/plexsysio/gkvstore/autoencoding" + "github.com/plexsysio/gkvstore/inmem" + prefixstore "github.com/plexsysio/gkvstore/prefix" + syncstore "github.com/plexsysio/gkvstore/sync" +) + +type user struct { + Id string + Name string + Age int64 +} + +type product struct { + Id string + Name string + Price int64 +} + +func TestCRUDL(t *testing.T) { + st1 := inmem.New() + st2 := syncstore.New(inmem.New()) + pfxStore := prefixstore.New( + prefixstore.Mount{ + Prefix: "user", + Store: st1, + }, + prefixstore.Mount{ + Prefix: "product", + Store: st2, + }, + ) + + t.Run("Create", func(t *testing.T) { + err := pfxStore.Create(context.TODO(), autoencoding.MustNew(&user{ + Name: "user1", + Age: 20, + })) + if err != nil { + t.Fatal(err) + } + err = pfxStore.Create(context.TODO(), autoencoding.MustNew(&product{ + Name: "product1", + Price: 100, + })) + if err != nil { + t.Fatal(err) + } + }) + t.Run("Read", func(t *testing.T) { + u1 := &user{Id: "1"} + err := pfxStore.Read(context.TODO(), autoencoding.MustNew(u1)) + if err != nil { + t.Fatal(err) + } + if u1.Name != "user1" || u1.Age != 20 { + t.Fatal("incorrect value read") + } + p1 := &product{Id: "1"} + err = pfxStore.Read(context.TODO(), autoencoding.MustNew(p1)) + if err != nil { + t.Fatal(err) + } + if p1.Name != "product1" || p1.Price != 100 { + t.Fatal("incorrect value read") + } + }) + t.Run("Read individual stores", func(t *testing.T) { + u1 := &user{Id: "1"} + err := st1.Read(context.TODO(), autoencoding.MustNew(u1)) + if err != nil { + t.Fatal(err) + } + if u1.Name != "user1" || u1.Age != 20 { + t.Fatal("incorrect value read") + } + err = st2.Read(context.TODO(), autoencoding.MustNew(u1)) + if err == nil { + t.Fatal("expected error from second store for user") + } + p1 := &product{Id: "1"} + err = st1.Read(context.TODO(), autoencoding.MustNew(p1)) + if err == nil { + t.Fatal("expected error from first store for product") + } + err = st2.Read(context.TODO(), autoencoding.MustNew(p1)) + if err != nil { + t.Fatal(err) + } + if p1.Name != "product1" || p1.Price != 100 { + t.Fatal("read incorrect product details") + } + }) + t.Run("Update", func(t *testing.T) { + err := pfxStore.Update(context.TODO(), autoencoding.MustNew(&user{ + Id: "1", + Name: "user1", + Age: 30, + })) + if err != nil { + t.Fatal(err) + } + err = pfxStore.Update(context.TODO(), autoencoding.MustNew(&product{ + Id: "1", + Name: "product1", + Price: 200, + })) + if err != nil { + t.Fatal(err) + } + }) + t.Run("Read individual stores after update", func(t *testing.T) { + u1 := &user{Id: "1"} + err := st1.Read(context.TODO(), autoencoding.MustNew(u1)) + if err != nil { + t.Fatal(err) + } + if u1.Name != "user1" || u1.Age != 30 { + t.Fatal("incorrect value read") + } + err = st2.Read(context.TODO(), autoencoding.MustNew(u1)) + if err == nil { + t.Fatal("expected error from second store for user") + } + p1 := &product{Id: "1"} + err = st1.Read(context.TODO(), autoencoding.MustNew(p1)) + if err == nil { + t.Fatal("expected error from first store for product") + } + err = st2.Read(context.TODO(), autoencoding.MustNew(p1)) + if err != nil { + t.Fatal(err) + } + if p1.Name != "product1" || p1.Price != 200 { + t.Fatal("read incorrect product details") + } + }) + t.Run("List", func(t *testing.T) { + res, err := pfxStore.List(context.TODO(), func() gkvstore.Item { + return autoencoding.MustNew(&user{}) + }, gkvstore.ListOpt{Limit: 5}) + if err != nil { + t.Fatal(err) + } + countUsr := 0 + for v := range res { + countUsr++ + u, ok := v.Val.(autoencoding.ObjectGetter).Get().(*user) + if !ok { + t.Fatal("incorrect obj in list response") + } + if u.Name != "user1" || u.Age != 30 { + t.Fatal("incorrect value returned on List") + } + } + if countUsr != 1 { + t.Fatal("incorrect no of items returned in List") + } + res, err = pfxStore.List(context.TODO(), func() gkvstore.Item { + return autoencoding.MustNew(&product{}) + }, gkvstore.ListOpt{Limit: 5}) + if err != nil { + t.Fatal(err) + } + countProd := 0 + for v := range res { + countProd++ + p, ok := v.Val.(autoencoding.ObjectGetter).Get().(*product) + if !ok { + t.Fatal("incorrect obj in list response") + } + if p.Name != "product1" || p.Price != 200 { + t.Fatal("incorrect value returned on List") + } + } + if countProd != 1 { + t.Fatal("incorrect no of items returned in List") + } + }) + t.Run("Delete", func(t *testing.T) { + err := pfxStore.Delete(context.TODO(), autoencoding.MustNew(&user{ + Id: "1", + })) + if err != nil { + t.Fatal(err) + } + err = pfxStore.Delete(context.TODO(), autoencoding.MustNew(&product{ + Id: "1", + })) + if err != nil { + t.Fatal(err) + } + }) + t.Run("Read individual stores after delete", func(t *testing.T) { + u1 := &user{Id: "1"} + err := st1.Read(context.TODO(), autoencoding.MustNew(u1)) + if err == nil { + t.Fatal("expected error after delete") + } + err = st2.Read(context.TODO(), autoencoding.MustNew(u1)) + if err == nil { + t.Fatal("expected error from second store for user") + } + p1 := &product{Id: "1"} + err = st1.Read(context.TODO(), autoencoding.MustNew(p1)) + if err == nil { + t.Fatal("expected error from first store for product") + } + err = st2.Read(context.TODO(), autoencoding.MustNew(p1)) + if err == nil { + t.Fatal("expected error after delete") + } + }) +} diff --git a/sync/sync.go b/sync/sync.go index 64580eb..f17b0d7 100644 --- a/sync/sync.go +++ b/sync/sync.go @@ -23,28 +23,28 @@ func (t *syncStore) Create(ctx context.Context, item gkvstore.Item) error { t.mu.Lock() defer t.mu.Unlock() - return t.Store.Create(context.TODO(), item) + return t.Store.Create(ctx, item) } func (t *syncStore) Read(ctx context.Context, item gkvstore.Item) error { t.mu.RLock() defer t.mu.RUnlock() - return t.Store.Read(context.TODO(), item) + return t.Store.Read(ctx, item) } func (t *syncStore) Update(ctx context.Context, item gkvstore.Item) error { t.mu.Lock() defer t.mu.Unlock() - return t.Store.Update(context.TODO(), item) + return t.Store.Update(ctx, item) } func (t *syncStore) Delete(ctx context.Context, item gkvstore.Item) error { t.mu.Lock() defer t.mu.Unlock() - return t.Store.Delete(context.TODO(), item) + return t.Store.Delete(ctx, item) } func (t *syncStore) List(ctx context.Context, factory gkvstore.Factory, opts gkvstore.ListOpt) (<-chan *gkvstore.Result, error) { diff --git a/testsuite/benchmark.go b/testsuite/benchmark.go index f31fe93..dc49816 100644 --- a/testsuite/benchmark.go +++ b/testsuite/benchmark.go @@ -1,18 +1,17 @@ package testsuite import ( - "bytes" "context" - "encoding/gob" "fmt" "math/rand" "testing" "github.com/plexsysio/gkvstore" + "github.com/plexsysio/gkvstore/autoencoding" ) type testBStruct struct { - Key string + Key string `aenc:"id"` Val []byte Size int64 Created int64 @@ -20,127 +19,112 @@ type testBStruct struct { } func newStruct(size int) *testBStruct { - return &testBStruct{Size: int64(size)} + buf := make([]byte, size) + return &testBStruct{Val: buf, Size: int64(size)} } -func (t *testBStruct) GetNamespace() string { return "testStruct" } - -func (t *testBStruct) GetID() string { return t.Key } - -func (t *testBStruct) SetID(id string) { t.Key = id } +func (t *testBStruct) fillRandom() { + _, _ = rand.Read(t.Val) +} -func (t *testBStruct) GetCreated() int64 { return t.Created } +func (t *testBStruct) edit(buf []byte) { + idx := rand.Intn(len(t.Val) - len(buf) - 1) + copy(t.Val[idx:idx+len(buf)], buf) +} -func (t *testBStruct) SetCreated(created int64) { t.Created = created } +func (t *testBStruct) setKey(key string) { + t.Key = key +} -func (t *testBStruct) GetUpdated() int64 { return t.Updated } +func (t *testBStruct) getKey() string { + return t.Key +} -func (t *testBStruct) SetUpdated(updated int64) { t.Updated = updated } +type testCStruct struct { + Key string `json:"key" aenc:"id"` + Val []byte `json:"val"` + Size int64 `json:"size"` +} -func (t *testBStruct) Marshal() ([]byte, error) { - if t.Val == nil { - t.Val = make([]byte, t.Size-32) - _, err := rand.Read(t.Val) - if err != nil { - return nil, err - } - } - var buf bytes.Buffer - defer buf.Reset() - err := gob.NewEncoder(&buf).Encode(t) - if err != nil { - return nil, err - } - return buf.Bytes(), nil +func newStructC(size int) *testCStruct { + buf := make([]byte, size) + return &testCStruct{Val: buf, Size: int64(size)} } -func (t *testBStruct) Unmarshal(buf []byte) error { - bw := bytes.NewBuffer(buf) - defer bw.Reset() - return gob.NewDecoder(bw).Decode(t) +func (t *testCStruct) fillRandom() { + _, _ = rand.Read(t.Val) } -func (t *testBStruct) edit(buf []byte) { +func (t *testCStruct) edit(buf []byte) { idx := rand.Intn(len(t.Val) - len(buf) - 1) copy(t.Val[idx:idx+len(buf)], buf) } -func (t *testBStruct) setKey(key string) { +func (t *testCStruct) setKey(key string) { t.Key = key } -type testCStruct struct { - Key string - Val []byte - Size int64 +func (t *testCStruct) getKey() string { + return t.Key } -func newStructC(size int) *testCStruct { - return &testCStruct{Size: int64(size)} +type testDStruct struct { + Key string `msgpack:"key" aenc:"id"` + Val []byte `msgpack:"val"` + Size int64 `msgpack:"size"` } -func (t *testCStruct) GetNamespace() string { return "testStruct" } - -func (t *testCStruct) GetID() string { return t.Key } - -func (t *testCStruct) SetID(id string) { t.Key = id } +func newStructD(size int) *testDStruct { + buf := make([]byte, size) + _, _ = rand.Read(buf) + return &testDStruct{Val: buf, Size: int64(size)} +} -func (t *testCStruct) Marshal() ([]byte, error) { - if t.Val == nil { - t.Val = make([]byte, t.Size-32) - _, err := rand.Read(t.Val) - if err != nil { - return nil, err - } - } - var buf bytes.Buffer - defer buf.Reset() - err := gob.NewEncoder(&buf).Encode(t) - if err != nil { - return nil, err - } - return buf.Bytes(), nil +func (t *testDStruct) fillRandom() { + _, _ = rand.Read(t.Val) } -func (t *testCStruct) edit(buf []byte) { +func (t *testDStruct) edit(buf []byte) { idx := rand.Intn(len(t.Val) - len(buf) - 1) copy(t.Val[idx:idx+len(buf)], buf) } -func (t *testCStruct) setKey(key string) { +func (t *testDStruct) setKey(key string) { t.Key = key } +func (t *testDStruct) getKey() string { + return t.Key +} + type testStructHelper interface { + fillRandom() edit([]byte) setKey(string) + getKey() string } -func (t *testCStruct) Unmarshal(buf []byte) error { - bw := bytes.NewBuffer(buf) - defer bw.Reset() - return gob.NewDecoder(bw).Decode(t) -} - -func BenchmarkCreate(sb *testing.B, st gkvstore.Store, newStruct func() gkvstore.Item) { +func BenchmarkCreate(sb *testing.B, st gkvstore.Store, newStruct func() interface{}) { sb.ReportAllocs() sb.ResetTimer() for n := 0; n < sb.N; n++ { it := newStruct() - err := st.Create(context.TODO(), it) + it.(testStructHelper).fillRandom() + err := st.Create(context.TODO(), autoencoding.MustNew(it)) if err != nil { sb.Fatal(err) } } } -func BenchmarkUpdate(sb *testing.B, st gkvstore.Store, newStruct func() gkvstore.Item) { +func BenchmarkUpdate(sb *testing.B, st gkvstore.Store, newStruct func() interface{}) { - var items []gkvstore.Item + var items []interface{} for n := 0; n < sb.N; n++ { it := newStruct() - err := st.Create(context.TODO(), it) + it.(testStructHelper).fillRandom() + err := st.Create(context.TODO(), autoencoding.MustNew(it)) if err != nil { sb.Fatal(err) } @@ -158,21 +142,24 @@ func BenchmarkUpdate(sb *testing.B, st gkvstore.Store, newStruct func() gkvstore for _, v := range items { v.(testStructHelper).edit(editBuf) - err := st.Update(context.TODO(), v) + err := st.Update(context.TODO(), autoencoding.MustNew(v)) if err != nil { sb.Fatal(err) } } } -func BenchmarkRead(sb *testing.B, st gkvstore.Store, newStruct func() gkvstore.Item) { +func BenchmarkRead(sb *testing.B, st gkvstore.Store, newStruct func() interface{}) { + ids := []string{} for n := 0; n < sb.N; n++ { it := newStruct() - err := st.Create(context.TODO(), it) + it.(testStructHelper).fillRandom() + err := st.Create(context.TODO(), autoencoding.MustNew(it)) if err != nil { sb.Fatal(err) } + ids = append(ids, it.(testStructHelper).getKey()) } sb.ReportAllocs() @@ -180,58 +167,74 @@ func BenchmarkRead(sb *testing.B, st gkvstore.Store, newStruct func() gkvstore.I for n := 0; n < sb.N; n++ { it := newStruct() - it.(testStructHelper).setKey(fmt.Sprintf("%d", n+1)) + it.(testStructHelper).setKey(ids[0]) + ids = ids[1:] - err := st.Read(context.TODO(), it) + err := st.Read(context.TODO(), autoencoding.MustNew(it)) if err != nil { - sb.Fatal(err) + sb.Fatal(err, "key", fmt.Sprintf("%d", n+1)) } } } func BenchmarkSuite(b *testing.B, st gkvstore.Store) { - b.Run("With time tracker", func(sb1 *testing.B) { - sb1.Run("64B", func(sb2 *testing.B) { - sb2.Run("Create", func(sb3 *testing.B) { BenchmarkCreate(sb3, st, func() gkvstore.Item { return newStruct(64) }) }) - sb2.Run("Read", func(sb3 *testing.B) { BenchmarkRead(sb3, st, func() gkvstore.Item { return newStruct(64) }) }) - sb2.Run("Update", func(sb3 *testing.B) { BenchmarkUpdate(sb3, st, func() gkvstore.Item { return newStruct(64) }) }) - }) - sb1.Run("128B", func(sb2 *testing.B) { - sb2.Run("Create", func(sb3 *testing.B) { BenchmarkCreate(sb3, st, func() gkvstore.Item { return newStruct(128) }) }) - sb2.Run("Read", func(sb3 *testing.B) { BenchmarkRead(sb3, st, func() gkvstore.Item { return newStruct(128) }) }) - sb2.Run("Update", func(sb3 *testing.B) { BenchmarkUpdate(sb3, st, func() gkvstore.Item { return newStruct(128) }) }) - }) - sb1.Run("256B", func(sb2 *testing.B) { - sb2.Run("Create", func(sb3 *testing.B) { BenchmarkCreate(sb3, st, func() gkvstore.Item { return newStruct(256) }) }) - sb2.Run("Read", func(sb3 *testing.B) { BenchmarkRead(sb3, st, func() gkvstore.Item { return newStruct(256) }) }) - sb2.Run("Update", func(sb3 *testing.B) { BenchmarkUpdate(sb3, st, func() gkvstore.Item { return newStruct(256) }) }) + for _, tc := range []struct { + Name string + Ctor func() interface{} + }{ + { + Name: "JSON 64B WithTimeTracker", + Ctor: func() interface{} { return newStruct(64) }, + }, + { + Name: "JSON 128B WithTimeTracker", + Ctor: func() interface{} { return newStruct(128) }, + }, + { + Name: "JSON 256B WithTimeTracker", + Ctor: func() interface{} { return newStruct(256) }, + }, + { + Name: "JSON 512B WithTimeTracker", + Ctor: func() interface{} { return newStruct(512) }, + }, + { + Name: "JSON 64B WithoutTimeTracker", + Ctor: func() interface{} { return newStructC(64) }, + }, + { + Name: "JSON 128B WithoutTimeTracker", + Ctor: func() interface{} { return newStructC(128) }, + }, + { + Name: "JSON 256B WithoutTimeTracker", + Ctor: func() interface{} { return newStructC(256) }, + }, + { + Name: "JSON 512B WithoutTimeTracker", + Ctor: func() interface{} { return newStructC(512) }, + }, + { + Name: "Msgpack 64B WithoutTimeTracker", + Ctor: func() interface{} { return newStructD(64) }, + }, + { + Name: "Msgpack 128B WithoutTimeTracker", + Ctor: func() interface{} { return newStructD(128) }, + }, + { + Name: "Msgpack 256B WithoutTimeTracker", + Ctor: func() interface{} { return newStructD(256) }, + }, + { + Name: "Msgpack 512B WithoutTimeTracker", + Ctor: func() interface{} { return newStructD(512) }, + }, + } { + b.Run(tc.Name, func(sb1 *testing.B) { + sb1.Run("Create", func(sb2 *testing.B) { BenchmarkCreate(sb2, st, tc.Ctor) }) + sb1.Run("Read", func(sb2 *testing.B) { BenchmarkRead(sb2, st, tc.Ctor) }) + sb1.Run("Update", func(sb2 *testing.B) { BenchmarkUpdate(sb2, st, tc.Ctor) }) }) - sb1.Run("512B", func(sb2 *testing.B) { - sb2.Run("Create", func(sb3 *testing.B) { BenchmarkCreate(sb3, st, func() gkvstore.Item { return newStruct(512) }) }) - sb2.Run("Read", func(sb3 *testing.B) { BenchmarkRead(sb3, st, func() gkvstore.Item { return newStruct(512) }) }) - sb2.Run("Update", func(sb3 *testing.B) { BenchmarkUpdate(sb3, st, func() gkvstore.Item { return newStruct(512) }) }) - }) - }) - b.Run("Without time tracker", func(sb1 *testing.B) { - sb1.Run("64B", func(sb2 *testing.B) { - sb2.Run("Create", func(sb3 *testing.B) { BenchmarkCreate(sb3, st, func() gkvstore.Item { return newStructC(64) }) }) - sb2.Run("Read", func(sb3 *testing.B) { BenchmarkRead(sb3, st, func() gkvstore.Item { return newStructC(64) }) }) - sb2.Run("Update", func(sb3 *testing.B) { BenchmarkUpdate(sb3, st, func() gkvstore.Item { return newStructC(64) }) }) - }) - sb1.Run("128B", func(sb2 *testing.B) { - sb2.Run("Create", func(sb3 *testing.B) { BenchmarkCreate(sb3, st, func() gkvstore.Item { return newStructC(128) }) }) - sb2.Run("Read", func(sb3 *testing.B) { BenchmarkRead(sb3, st, func() gkvstore.Item { return newStructC(128) }) }) - sb2.Run("Update", func(sb3 *testing.B) { BenchmarkUpdate(sb3, st, func() gkvstore.Item { return newStructC(128) }) }) - }) - sb1.Run("256B", func(sb2 *testing.B) { - sb2.Run("Create", func(sb3 *testing.B) { BenchmarkCreate(sb3, st, func() gkvstore.Item { return newStructC(256) }) }) - sb2.Run("Read", func(sb3 *testing.B) { BenchmarkRead(sb3, st, func() gkvstore.Item { return newStructC(256) }) }) - sb2.Run("Update", func(sb3 *testing.B) { BenchmarkUpdate(sb3, st, func() gkvstore.Item { return newStructC(256) }) }) - }) - sb1.Run("512B", func(sb2 *testing.B) { - sb2.Run("Create", func(sb3 *testing.B) { BenchmarkCreate(sb3, st, func() gkvstore.Item { return newStructC(512) }) }) - sb2.Run("Read", func(sb3 *testing.B) { BenchmarkRead(sb3, st, func() gkvstore.Item { return newStructC(512) }) }) - sb2.Run("Update", func(sb3 *testing.B) { BenchmarkUpdate(sb3, st, func() gkvstore.Item { return newStructC(512) }) }) - }) - }) + } }