diff --git a/proto/pb/sync/sync.pb.go b/proto/pb/sync/sync.pb.go index 0657bb7c39d7..c0cbdefae460 100644 --- a/proto/pb/sync/sync.pb.go +++ b/proto/pb/sync/sync.pb.go @@ -314,12 +314,12 @@ type SyncGetChangeProofRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - StartRootHash []byte `protobuf:"bytes,1,opt,name=start_root_hash,json=startRootHash,proto3" json:"start_root_hash,omitempty"` - EndRootHash []byte `protobuf:"bytes,2,opt,name=end_root_hash,json=endRootHash,proto3" json:"end_root_hash,omitempty"` - StartKey []byte `protobuf:"bytes,3,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` - EndKey []byte `protobuf:"bytes,4,opt,name=end_key,json=endKey,proto3" json:"end_key,omitempty"` - KeyLimit uint32 `protobuf:"varint,5,opt,name=key_limit,json=keyLimit,proto3" json:"key_limit,omitempty"` - BytesLimit uint32 `protobuf:"varint,6,opt,name=bytes_limit,json=bytesLimit,proto3" json:"bytes_limit,omitempty"` + StartRootHash []byte `protobuf:"bytes,1,opt,name=start_root_hash,json=startRootHash,proto3" json:"start_root_hash,omitempty"` + EndRootHash []byte `protobuf:"bytes,2,opt,name=end_root_hash,json=endRootHash,proto3" json:"end_root_hash,omitempty"` + StartKey *MaybeBytes `protobuf:"bytes,3,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` + EndKey *MaybeBytes `protobuf:"bytes,4,opt,name=end_key,json=endKey,proto3" json:"end_key,omitempty"` + KeyLimit uint32 `protobuf:"varint,5,opt,name=key_limit,json=keyLimit,proto3" json:"key_limit,omitempty"` + BytesLimit uint32 `protobuf:"varint,6,opt,name=bytes_limit,json=bytesLimit,proto3" json:"bytes_limit,omitempty"` } func (x *SyncGetChangeProofRequest) Reset() { @@ -368,14 +368,14 @@ func (x *SyncGetChangeProofRequest) GetEndRootHash() []byte { return nil } -func (x *SyncGetChangeProofRequest) GetStartKey() []byte { +func (x *SyncGetChangeProofRequest) GetStartKey() *MaybeBytes { if x != nil { return x.StartKey } return nil } -func (x *SyncGetChangeProofRequest) GetEndKey() []byte { +func (x *SyncGetChangeProofRequest) GetEndKey() *MaybeBytes { if x != nil { return x.EndKey } @@ -482,11 +482,11 @@ type GetChangeProofRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - StartRootHash []byte `protobuf:"bytes,1,opt,name=start_root_hash,json=startRootHash,proto3" json:"start_root_hash,omitempty"` - EndRootHash []byte `protobuf:"bytes,2,opt,name=end_root_hash,json=endRootHash,proto3" json:"end_root_hash,omitempty"` - StartKey []byte `protobuf:"bytes,3,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` - EndKey []byte `protobuf:"bytes,4,opt,name=end_key,json=endKey,proto3" json:"end_key,omitempty"` - KeyLimit uint32 `protobuf:"varint,5,opt,name=key_limit,json=keyLimit,proto3" json:"key_limit,omitempty"` + StartRootHash []byte `protobuf:"bytes,1,opt,name=start_root_hash,json=startRootHash,proto3" json:"start_root_hash,omitempty"` + EndRootHash []byte `protobuf:"bytes,2,opt,name=end_root_hash,json=endRootHash,proto3" json:"end_root_hash,omitempty"` + StartKey *MaybeBytes `protobuf:"bytes,3,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` + EndKey *MaybeBytes `protobuf:"bytes,4,opt,name=end_key,json=endKey,proto3" json:"end_key,omitempty"` + KeyLimit uint32 `protobuf:"varint,5,opt,name=key_limit,json=keyLimit,proto3" json:"key_limit,omitempty"` } func (x *GetChangeProofRequest) Reset() { @@ -535,14 +535,14 @@ func (x *GetChangeProofRequest) GetEndRootHash() []byte { return nil } -func (x *GetChangeProofRequest) GetStartKey() []byte { +func (x *GetChangeProofRequest) GetStartKey() *MaybeBytes { if x != nil { return x.StartKey } return nil } -func (x *GetChangeProofRequest) GetEndKey() []byte { +func (x *GetChangeProofRequest) GetEndKey() *MaybeBytes { if x != nil { return x.EndKey } @@ -562,8 +562,8 @@ type VerifyChangeProofRequest struct { unknownFields protoimpl.UnknownFields Proof *ChangeProof `protobuf:"bytes,1,opt,name=proof,proto3" json:"proof,omitempty"` - StartKey []byte `protobuf:"bytes,2,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` - EndKey []byte `protobuf:"bytes,3,opt,name=end_key,json=endKey,proto3" json:"end_key,omitempty"` + StartKey *MaybeBytes `protobuf:"bytes,2,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` + EndKey *MaybeBytes `protobuf:"bytes,3,opt,name=end_key,json=endKey,proto3" json:"end_key,omitempty"` ExpectedRootHash []byte `protobuf:"bytes,4,opt,name=expected_root_hash,json=expectedRootHash,proto3" json:"expected_root_hash,omitempty"` } @@ -606,14 +606,14 @@ func (x *VerifyChangeProofRequest) GetProof() *ChangeProof { return nil } -func (x *VerifyChangeProofRequest) GetStartKey() []byte { +func (x *VerifyChangeProofRequest) GetStartKey() *MaybeBytes { if x != nil { return x.StartKey } return nil } -func (x *VerifyChangeProofRequest) GetEndKey() []byte { +func (x *VerifyChangeProofRequest) GetEndKey() *MaybeBytes { if x != nil { return x.EndKey } @@ -729,11 +729,11 @@ type SyncGetRangeProofRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - RootHash []byte `protobuf:"bytes,1,opt,name=root_hash,json=rootHash,proto3" json:"root_hash,omitempty"` - StartKey []byte `protobuf:"bytes,2,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` - EndKey []byte `protobuf:"bytes,3,opt,name=end_key,json=endKey,proto3" json:"end_key,omitempty"` - KeyLimit uint32 `protobuf:"varint,4,opt,name=key_limit,json=keyLimit,proto3" json:"key_limit,omitempty"` - BytesLimit uint32 `protobuf:"varint,5,opt,name=bytes_limit,json=bytesLimit,proto3" json:"bytes_limit,omitempty"` + RootHash []byte `protobuf:"bytes,1,opt,name=root_hash,json=rootHash,proto3" json:"root_hash,omitempty"` + StartKey *MaybeBytes `protobuf:"bytes,2,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` + EndKey *MaybeBytes `protobuf:"bytes,3,opt,name=end_key,json=endKey,proto3" json:"end_key,omitempty"` + KeyLimit uint32 `protobuf:"varint,4,opt,name=key_limit,json=keyLimit,proto3" json:"key_limit,omitempty"` + BytesLimit uint32 `protobuf:"varint,5,opt,name=bytes_limit,json=bytesLimit,proto3" json:"bytes_limit,omitempty"` } func (x *SyncGetRangeProofRequest) Reset() { @@ -775,14 +775,14 @@ func (x *SyncGetRangeProofRequest) GetRootHash() []byte { return nil } -func (x *SyncGetRangeProofRequest) GetStartKey() []byte { +func (x *SyncGetRangeProofRequest) GetStartKey() *MaybeBytes { if x != nil { return x.StartKey } return nil } -func (x *SyncGetRangeProofRequest) GetEndKey() []byte { +func (x *SyncGetRangeProofRequest) GetEndKey() *MaybeBytes { if x != nil { return x.EndKey } @@ -808,10 +808,10 @@ type GetRangeProofRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - RootHash []byte `protobuf:"bytes,1,opt,name=root_hash,json=rootHash,proto3" json:"root_hash,omitempty"` - StartKey []byte `protobuf:"bytes,2,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` - EndKey []byte `protobuf:"bytes,3,opt,name=end_key,json=endKey,proto3" json:"end_key,omitempty"` - KeyLimit uint32 `protobuf:"varint,4,opt,name=key_limit,json=keyLimit,proto3" json:"key_limit,omitempty"` + RootHash []byte `protobuf:"bytes,1,opt,name=root_hash,json=rootHash,proto3" json:"root_hash,omitempty"` + StartKey *MaybeBytes `protobuf:"bytes,2,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` + EndKey *MaybeBytes `protobuf:"bytes,3,opt,name=end_key,json=endKey,proto3" json:"end_key,omitempty"` + KeyLimit uint32 `protobuf:"varint,4,opt,name=key_limit,json=keyLimit,proto3" json:"key_limit,omitempty"` } func (x *GetRangeProofRequest) Reset() { @@ -853,14 +853,14 @@ func (x *GetRangeProofRequest) GetRootHash() []byte { return nil } -func (x *GetRangeProofRequest) GetStartKey() []byte { +func (x *GetRangeProofRequest) GetStartKey() *MaybeBytes { if x != nil { return x.StartKey } return nil } -func (x *GetRangeProofRequest) GetEndKey() []byte { +func (x *GetRangeProofRequest) GetEndKey() *MaybeBytes { if x != nil { return x.EndKey } @@ -926,7 +926,7 @@ type CommitRangeProofRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - StartKey []byte `protobuf:"bytes,1,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` + StartKey *MaybeBytes `protobuf:"bytes,1,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` RangeProof *RangeProof `protobuf:"bytes,2,opt,name=range_proof,json=rangeProof,proto3" json:"range_proof,omitempty"` } @@ -962,7 +962,7 @@ func (*CommitRangeProofRequest) Descriptor() ([]byte, []int) { return file_sync_sync_proto_rawDescGZIP(), []int{14} } -func (x *CommitRangeProofRequest) GetStartKey() []byte { +func (x *CommitRangeProofRequest) GetStartKey() *MaybeBytes { if x != nil { return x.StartKey } @@ -1428,179 +1428,191 @@ var file_sync_sync_proto_rawDesc = []byte{ 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x25, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, - 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0xdb, + 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0xff, 0x01, 0x0a, 0x19, 0x53, 0x79, 0x6e, 0x63, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0d, 0x73, 0x74, 0x61, 0x72, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x22, 0x0a, 0x0d, 0x65, 0x6e, 0x64, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x65, 0x6e, 0x64, - 0x52, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, - 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x73, 0x74, 0x61, - 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x17, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x1b, - 0x0a, 0x09, 0x6b, 0x65, 0x79, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x0d, 0x52, 0x08, 0x6b, 0x65, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x62, - 0x79, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0d, - 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0x95, 0x01, 0x0a, - 0x1a, 0x53, 0x79, 0x6e, 0x63, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, - 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x36, 0x0a, 0x0c, 0x63, - 0x68, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x11, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, - 0x72, 0x6f, 0x6f, 0x66, 0x48, 0x00, 0x52, 0x0b, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, - 0x6f, 0x6f, 0x66, 0x12, 0x33, 0x0a, 0x0b, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x70, 0x72, 0x6f, - 0x6f, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, - 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x48, 0x00, 0x52, 0x0a, 0x72, 0x61, - 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xb6, 0x01, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, - 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, - 0x0a, 0x0f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, - 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0d, 0x73, 0x74, 0x61, 0x72, 0x74, 0x52, 0x6f, - 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x22, 0x0a, 0x0d, 0x65, 0x6e, 0x64, 0x5f, 0x72, 0x6f, - 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x65, - 0x6e, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x74, - 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x73, - 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x17, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, - 0x65, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, - 0x12, 0x1b, 0x0a, 0x09, 0x6b, 0x65, 0x79, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x0d, 0x52, 0x08, 0x6b, 0x65, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0xa7, 0x01, - 0x0a, 0x18, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, - 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x05, 0x70, 0x72, - 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x73, 0x79, 0x6e, 0x63, - 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x05, 0x70, 0x72, - 0x6f, 0x6f, 0x66, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, - 0x12, 0x17, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x12, 0x65, 0x78, 0x70, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x10, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, - 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x22, 0x31, 0x0a, 0x19, 0x56, 0x65, 0x72, 0x69, 0x66, - 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x43, 0x0a, 0x18, 0x43, 0x6f, - 0x6d, 0x6d, 0x69, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, + 0x52, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x2d, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, + 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x08, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, + 0x65, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, + 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, + 0x65, 0x79, 0x12, 0x1b, 0x0a, 0x09, 0x6b, 0x65, 0x79, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x6b, 0x65, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, + 0x1f, 0x0a, 0x0b, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x4c, 0x69, 0x6d, 0x69, 0x74, + 0x22, 0x95, 0x01, 0x0a, 0x1a, 0x53, 0x79, 0x6e, 0x63, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, + 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x36, 0x0a, 0x0c, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x68, 0x61, - 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x22, - 0xab, 0x01, 0x0a, 0x18, 0x53, 0x79, 0x6e, 0x63, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, - 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, - 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, - 0x08, 0x72, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x74, 0x61, - 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x73, 0x74, - 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x17, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, 0x65, - 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, + 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x48, 0x00, 0x52, 0x0b, 0x63, 0x68, 0x61, 0x6e, + 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x33, 0x0a, 0x0b, 0x72, 0x61, 0x6e, 0x67, 0x65, + 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, + 0x79, 0x6e, 0x63, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x48, 0x00, + 0x52, 0x0a, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x42, 0x0a, 0x0a, 0x08, + 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xda, 0x01, 0x0a, 0x15, 0x47, 0x65, 0x74, + 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, + 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0d, 0x73, 0x74, 0x61, + 0x72, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x22, 0x0a, 0x0d, 0x65, 0x6e, + 0x64, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x0b, 0x65, 0x6e, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x2d, + 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, + 0x74, 0x65, 0x73, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a, + 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, + 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, + 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x1b, 0x0a, 0x09, 0x6b, 0x65, 0x79, 0x5f, + 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x6b, 0x65, 0x79, + 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0xcb, 0x01, 0x0a, 0x18, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, + 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x27, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x11, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, + 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x2d, 0x0a, 0x09, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, + 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, + 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x07, 0x65, 0x6e, + 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, + 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x65, + 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x12, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x10, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x48, + 0x61, 0x73, 0x68, 0x22, 0x31, 0x0a, 0x19, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, + 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x43, 0x0a, 0x18, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x27, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x11, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, + 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0xcf, 0x01, 0x0a, 0x18, + 0x53, 0x79, 0x6e, 0x63, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, + 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x72, 0x6f, 0x6f, 0x74, + 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x72, 0x6f, 0x6f, + 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x2d, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, + 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, + 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, + 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x1b, 0x0a, 0x09, 0x6b, 0x65, 0x79, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x6b, 0x65, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x0d, 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0x86, 0x01, + 0x0d, 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0xaa, 0x01, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x72, 0x6f, 0x6f, 0x74, 0x48, - 0x61, 0x73, 0x68, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, - 0x12, 0x17, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x1b, 0x0a, 0x09, 0x6b, 0x65, 0x79, - 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x6b, 0x65, - 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0x3f, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, - 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x26, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, - 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, - 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0x69, 0x0a, 0x17, 0x43, 0x6f, 0x6d, 0x6d, 0x69, - 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, - 0x31, 0x0a, 0x0b, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x52, 0x61, 0x6e, 0x67, - 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x0a, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x22, 0xd0, 0x01, 0x0a, 0x0b, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x12, 0x2f, 0x0a, 0x14, 0x68, 0x61, 0x64, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x5f, - 0x69, 0x6e, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, - 0x52, 0x11, 0x68, 0x61, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x49, 0x6e, 0x48, 0x69, 0x73, 0x74, - 0x6f, 0x72, 0x79, 0x12, 0x30, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x70, 0x72, 0x6f, - 0x6f, 0x66, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, - 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, - 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x2c, 0x0a, 0x09, 0x65, 0x6e, 0x64, 0x5f, 0x70, 0x72, 0x6f, - 0x6f, 0x66, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, - 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x08, 0x65, 0x6e, 0x64, 0x50, 0x72, - 0x6f, 0x6f, 0x66, 0x12, 0x30, 0x0a, 0x0b, 0x6b, 0x65, 0x79, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x67, - 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, - 0x4b, 0x65, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x0a, 0x6b, 0x65, 0x79, 0x43, 0x68, - 0x61, 0x6e, 0x67, 0x65, 0x73, 0x22, 0x85, 0x01, 0x0a, 0x0a, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, - 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x25, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, - 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x21, 0x0a, 0x03, 0x65, - 0x6e, 0x64, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, - 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x2d, - 0x0a, 0x0a, 0x6b, 0x65, 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x52, 0x09, 0x6b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0xe1, 0x01, - 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x26, 0x0a, 0x03, 0x6b, - 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, - 0x53, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x50, 0x61, 0x74, 0x68, 0x52, 0x03, - 0x6b, 0x65, 0x79, 0x12, 0x34, 0x0a, 0x0d, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x6f, 0x72, 0x5f, - 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, - 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x0b, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x4f, 0x72, 0x48, 0x61, 0x73, 0x68, 0x12, 0x39, 0x0a, 0x08, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x79, - 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x2e, 0x43, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x63, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x1a, 0x3b, 0x0a, 0x0d, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0d, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x22, 0x45, 0x0a, 0x09, 0x4b, 0x65, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x26, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, - 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x4b, 0x0a, 0x0e, 0x53, 0x65, 0x72, 0x69, - 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x50, 0x61, 0x74, 0x68, 0x12, 0x23, 0x0a, 0x0d, 0x6e, 0x69, - 0x62, 0x62, 0x6c, 0x65, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x04, 0x52, 0x0c, 0x6e, 0x69, 0x62, 0x62, 0x6c, 0x65, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, - 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x41, 0x0a, 0x0a, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, - 0x74, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x73, 0x5f, - 0x6e, 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x69, - 0x73, 0x4e, 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x22, 0x32, 0x0a, 0x08, 0x4b, 0x65, 0x79, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x32, 0xff, 0x03, 0x0a, - 0x02, 0x44, 0x42, 0x12, 0x44, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, - 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1b, 0x2e, 0x73, - 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, - 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x39, 0x0a, 0x08, 0x47, 0x65, 0x74, - 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x15, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, - 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x73, - 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x40, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, - 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1b, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, - 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x11, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, - 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x54, 0x0a, 0x11, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, - 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1e, 0x2e, 0x73, 0x79, - 0x6e, 0x63, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, - 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x73, 0x79, - 0x6e, 0x63, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, - 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4b, 0x0a, 0x11, - 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, - 0x66, 0x12, 0x1e, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x43, - 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x48, 0x0a, 0x0d, 0x47, 0x65, 0x74, - 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1a, 0x2e, 0x73, 0x79, 0x6e, - 0x63, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, + 0x61, 0x73, 0x68, 0x12, 0x2d, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, + 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, + 0x65, 0x79, 0x12, 0x29, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, + 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x1b, 0x0a, + 0x09, 0x6b, 0x65, 0x79, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, + 0x52, 0x08, 0x6b, 0x65, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0x3f, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x49, 0x0a, 0x10, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x61, 0x6e, - 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1d, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, + 0x6e, 0x73, 0x65, 0x12, 0x26, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, + 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0x7b, 0x0a, 0x17, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x42, 0x2f, - 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x76, 0x61, - 0x2d, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x61, 0x76, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x67, - 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x62, 0x2f, 0x73, 0x79, 0x6e, 0x63, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, + 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, + 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x08, 0x73, 0x74, 0x61, + 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x31, 0x0a, 0x0b, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x70, + 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, + 0x63, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x0a, 0x72, 0x61, + 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0xd0, 0x01, 0x0a, 0x0b, 0x43, 0x68, 0x61, + 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x2f, 0x0a, 0x14, 0x68, 0x61, 0x64, 0x5f, + 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x5f, 0x69, 0x6e, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x68, 0x61, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x73, + 0x49, 0x6e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x30, 0x0a, 0x0b, 0x73, 0x74, 0x61, + 0x72, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, + 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, + 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x2c, 0x0a, 0x09, 0x65, + 0x6e, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, + 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, + 0x08, 0x65, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x30, 0x0a, 0x0b, 0x6b, 0x65, 0x79, + 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, + 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4b, 0x65, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x52, + 0x0a, 0x6b, 0x65, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x22, 0x85, 0x01, 0x0a, 0x0a, + 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x25, 0x0a, 0x05, 0x73, 0x74, + 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, + 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x12, 0x21, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, + 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, + 0x03, 0x65, 0x6e, 0x64, 0x12, 0x2d, 0x0a, 0x0a, 0x6b, 0x65, 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, + 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x09, 0x6b, 0x65, 0x79, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x73, 0x22, 0xe1, 0x01, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, + 0x65, 0x12, 0x26, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, + 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x53, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, + 0x50, 0x61, 0x74, 0x68, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x34, 0x0a, 0x0d, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x5f, 0x6f, 0x72, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, + 0x65, 0x73, 0x52, 0x0b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x72, 0x48, 0x61, 0x73, 0x68, 0x12, + 0x39, 0x0a, 0x08, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, + 0x64, 0x65, 0x2e, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x52, 0x08, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x1a, 0x3b, 0x0a, 0x0d, 0x43, 0x68, + 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, + 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x45, 0x0a, 0x09, 0x4b, 0x65, 0x79, 0x43, 0x68, + 0x61, 0x6e, 0x67, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x26, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, + 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x4b, + 0x0a, 0x0e, 0x53, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x50, 0x61, 0x74, 0x68, + 0x12, 0x23, 0x0a, 0x0d, 0x6e, 0x69, 0x62, 0x62, 0x6c, 0x65, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0c, 0x6e, 0x69, 0x62, 0x62, 0x6c, 0x65, 0x4c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x41, 0x0a, 0x0a, 0x4d, + 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, + 0x1d, 0x0a, 0x0a, 0x69, 0x73, 0x5f, 0x6e, 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x08, 0x52, 0x09, 0x69, 0x73, 0x4e, 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x22, 0x32, + 0x0a, 0x08, 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x32, 0xff, 0x03, 0x0a, 0x02, 0x44, 0x42, 0x12, 0x44, 0x0a, 0x0d, 0x47, 0x65, 0x74, + 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, + 0x74, 0x79, 0x1a, 0x1b, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x72, + 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x39, 0x0a, 0x08, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x15, 0x2e, 0x73, 0x79, + 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, + 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x40, 0x0a, 0x0e, 0x47, 0x65, + 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1b, 0x2e, 0x73, + 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, + 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x11, 0x2e, 0x73, 0x79, 0x6e, 0x63, + 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x54, 0x0a, 0x11, + 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, + 0x66, 0x12, 0x1e, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, + 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x1f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, + 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x4b, 0x0a, 0x11, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x43, 0x68, 0x61, 0x6e, + 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1e, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, + 0x48, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, + 0x12, 0x1a, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, + 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x73, + 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, + 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x49, 0x0a, 0x10, 0x43, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1d, 0x2e, + 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, + 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, + 0x6d, 0x70, 0x74, 0x79, 0x42, 0x2f, 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x61, 0x76, 0x61, 0x2d, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x61, 0x76, 0x61, 0x6c, + 0x61, 0x6e, 0x63, 0x68, 0x65, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x62, + 0x2f, 0x73, 0x79, 0x6e, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1648,41 +1660,52 @@ var file_sync_sync_proto_depIdxs = []int32{ 4, // 2: sync.GetProofResponse.proof:type_name -> sync.Proof 20, // 3: sync.Proof.value:type_name -> sync.MaybeBytes 17, // 4: sync.Proof.proof:type_name -> sync.ProofNode - 15, // 5: sync.SyncGetChangeProofResponse.change_proof:type_name -> sync.ChangeProof - 16, // 6: sync.SyncGetChangeProofResponse.range_proof:type_name -> sync.RangeProof - 15, // 7: sync.VerifyChangeProofRequest.proof:type_name -> sync.ChangeProof - 15, // 8: sync.CommitChangeProofRequest.proof:type_name -> sync.ChangeProof - 16, // 9: sync.GetRangeProofResponse.proof:type_name -> sync.RangeProof - 16, // 10: sync.CommitRangeProofRequest.range_proof:type_name -> sync.RangeProof - 17, // 11: sync.ChangeProof.start_proof:type_name -> sync.ProofNode - 17, // 12: sync.ChangeProof.end_proof:type_name -> sync.ProofNode - 18, // 13: sync.ChangeProof.key_changes:type_name -> sync.KeyChange - 17, // 14: sync.RangeProof.start:type_name -> sync.ProofNode - 17, // 15: sync.RangeProof.end:type_name -> sync.ProofNode - 21, // 16: sync.RangeProof.key_values:type_name -> sync.KeyValue - 19, // 17: sync.ProofNode.key:type_name -> sync.SerializedPath - 20, // 18: sync.ProofNode.value_or_hash:type_name -> sync.MaybeBytes - 22, // 19: sync.ProofNode.children:type_name -> sync.ProofNode.ChildrenEntry - 20, // 20: sync.KeyChange.value:type_name -> sync.MaybeBytes - 23, // 21: sync.DB.GetMerkleRoot:input_type -> google.protobuf.Empty - 2, // 22: sync.DB.GetProof:input_type -> sync.GetProofRequest - 7, // 23: sync.DB.GetChangeProof:input_type -> sync.GetChangeProofRequest - 8, // 24: sync.DB.VerifyChangeProof:input_type -> sync.VerifyChangeProofRequest - 10, // 25: sync.DB.CommitChangeProof:input_type -> sync.CommitChangeProofRequest - 12, // 26: sync.DB.GetRangeProof:input_type -> sync.GetRangeProofRequest - 14, // 27: sync.DB.CommitRangeProof:input_type -> sync.CommitRangeProofRequest - 1, // 28: sync.DB.GetMerkleRoot:output_type -> sync.GetMerkleRootResponse - 3, // 29: sync.DB.GetProof:output_type -> sync.GetProofResponse - 15, // 30: sync.DB.GetChangeProof:output_type -> sync.ChangeProof - 9, // 31: sync.DB.VerifyChangeProof:output_type -> sync.VerifyChangeProofResponse - 23, // 32: sync.DB.CommitChangeProof:output_type -> google.protobuf.Empty - 13, // 33: sync.DB.GetRangeProof:output_type -> sync.GetRangeProofResponse - 23, // 34: sync.DB.CommitRangeProof:output_type -> google.protobuf.Empty - 28, // [28:35] is the sub-list for method output_type - 21, // [21:28] is the sub-list for method input_type - 21, // [21:21] is the sub-list for extension type_name - 21, // [21:21] is the sub-list for extension extendee - 0, // [0:21] is the sub-list for field type_name + 20, // 5: sync.SyncGetChangeProofRequest.start_key:type_name -> sync.MaybeBytes + 20, // 6: sync.SyncGetChangeProofRequest.end_key:type_name -> sync.MaybeBytes + 15, // 7: sync.SyncGetChangeProofResponse.change_proof:type_name -> sync.ChangeProof + 16, // 8: sync.SyncGetChangeProofResponse.range_proof:type_name -> sync.RangeProof + 20, // 9: sync.GetChangeProofRequest.start_key:type_name -> sync.MaybeBytes + 20, // 10: sync.GetChangeProofRequest.end_key:type_name -> sync.MaybeBytes + 15, // 11: sync.VerifyChangeProofRequest.proof:type_name -> sync.ChangeProof + 20, // 12: sync.VerifyChangeProofRequest.start_key:type_name -> sync.MaybeBytes + 20, // 13: sync.VerifyChangeProofRequest.end_key:type_name -> sync.MaybeBytes + 15, // 14: sync.CommitChangeProofRequest.proof:type_name -> sync.ChangeProof + 20, // 15: sync.SyncGetRangeProofRequest.start_key:type_name -> sync.MaybeBytes + 20, // 16: sync.SyncGetRangeProofRequest.end_key:type_name -> sync.MaybeBytes + 20, // 17: sync.GetRangeProofRequest.start_key:type_name -> sync.MaybeBytes + 20, // 18: sync.GetRangeProofRequest.end_key:type_name -> sync.MaybeBytes + 16, // 19: sync.GetRangeProofResponse.proof:type_name -> sync.RangeProof + 20, // 20: sync.CommitRangeProofRequest.start_key:type_name -> sync.MaybeBytes + 16, // 21: sync.CommitRangeProofRequest.range_proof:type_name -> sync.RangeProof + 17, // 22: sync.ChangeProof.start_proof:type_name -> sync.ProofNode + 17, // 23: sync.ChangeProof.end_proof:type_name -> sync.ProofNode + 18, // 24: sync.ChangeProof.key_changes:type_name -> sync.KeyChange + 17, // 25: sync.RangeProof.start:type_name -> sync.ProofNode + 17, // 26: sync.RangeProof.end:type_name -> sync.ProofNode + 21, // 27: sync.RangeProof.key_values:type_name -> sync.KeyValue + 19, // 28: sync.ProofNode.key:type_name -> sync.SerializedPath + 20, // 29: sync.ProofNode.value_or_hash:type_name -> sync.MaybeBytes + 22, // 30: sync.ProofNode.children:type_name -> sync.ProofNode.ChildrenEntry + 20, // 31: sync.KeyChange.value:type_name -> sync.MaybeBytes + 23, // 32: sync.DB.GetMerkleRoot:input_type -> google.protobuf.Empty + 2, // 33: sync.DB.GetProof:input_type -> sync.GetProofRequest + 7, // 34: sync.DB.GetChangeProof:input_type -> sync.GetChangeProofRequest + 8, // 35: sync.DB.VerifyChangeProof:input_type -> sync.VerifyChangeProofRequest + 10, // 36: sync.DB.CommitChangeProof:input_type -> sync.CommitChangeProofRequest + 12, // 37: sync.DB.GetRangeProof:input_type -> sync.GetRangeProofRequest + 14, // 38: sync.DB.CommitRangeProof:input_type -> sync.CommitRangeProofRequest + 1, // 39: sync.DB.GetMerkleRoot:output_type -> sync.GetMerkleRootResponse + 3, // 40: sync.DB.GetProof:output_type -> sync.GetProofResponse + 15, // 41: sync.DB.GetChangeProof:output_type -> sync.ChangeProof + 9, // 42: sync.DB.VerifyChangeProof:output_type -> sync.VerifyChangeProofResponse + 23, // 43: sync.DB.CommitChangeProof:output_type -> google.protobuf.Empty + 13, // 44: sync.DB.GetRangeProof:output_type -> sync.GetRangeProofResponse + 23, // 45: sync.DB.CommitRangeProof:output_type -> google.protobuf.Empty + 39, // [39:46] is the sub-list for method output_type + 32, // [32:39] is the sub-list for method input_type + 32, // [32:32] is the sub-list for extension type_name + 32, // [32:32] is the sub-list for extension extendee + 0, // [0:32] is the sub-list for field type_name } func init() { file_sync_sync_proto_init() } diff --git a/proto/sync/sync.proto b/proto/sync/sync.proto index 49be63b49a12..079f625e0cee 100644 --- a/proto/sync/sync.proto +++ b/proto/sync/sync.proto @@ -54,8 +54,8 @@ message Proof { message SyncGetChangeProofRequest { bytes start_root_hash = 1; bytes end_root_hash = 2; - bytes start_key = 3; - bytes end_key = 4; + MaybeBytes start_key = 3; + MaybeBytes end_key = 4; uint32 key_limit = 5; uint32 bytes_limit = 6; } @@ -70,15 +70,15 @@ message SyncGetChangeProofResponse { message GetChangeProofRequest { bytes start_root_hash = 1; bytes end_root_hash = 2; - bytes start_key = 3; - bytes end_key = 4; + MaybeBytes start_key = 3; + MaybeBytes end_key = 4; uint32 key_limit = 5; } message VerifyChangeProofRequest { ChangeProof proof = 1; - bytes start_key = 2; - bytes end_key = 3; + MaybeBytes start_key = 2; + MaybeBytes end_key = 3; bytes expected_root_hash = 4; } @@ -95,16 +95,16 @@ message CommitChangeProofRequest { // the response. GetRangeProof in the DB service doesn't. message SyncGetRangeProofRequest { bytes root_hash = 1; - bytes start_key = 2; - bytes end_key = 3; + MaybeBytes start_key = 2; + MaybeBytes end_key = 3; uint32 key_limit = 4; uint32 bytes_limit = 5; } message GetRangeProofRequest { bytes root_hash = 1; - bytes start_key = 2; - bytes end_key = 3; + MaybeBytes start_key = 2; + MaybeBytes end_key = 3; uint32 key_limit = 4; } @@ -113,7 +113,7 @@ message GetRangeProofResponse { } message CommitRangeProofRequest { - bytes start_key = 1; + MaybeBytes start_key = 1; RangeProof range_proof = 2; } diff --git a/x/merkledb/db.go b/x/merkledb/db.go index 1a1ddef6981e..5ff935989386 100644 --- a/x/merkledb/db.go +++ b/x/merkledb/db.go @@ -60,8 +60,8 @@ type ChangeProofer interface { ctx context.Context, startRootID ids.ID, endRootID ids.ID, - start []byte, - end []byte, + start Maybe[[]byte], + end Maybe[[]byte], maxLength int, ) (*ChangeProof, error) @@ -82,8 +82,8 @@ type ChangeProofer interface { VerifyChangeProof( ctx context.Context, proof *ChangeProof, - start []byte, - end []byte, + start Maybe[[]byte], + end Maybe[[]byte], expectedEndRootID ids.ID, ) error @@ -97,14 +97,14 @@ type RangeProofer interface { GetRangeProofAtRoot( ctx context.Context, rootID ids.ID, - start, - end []byte, + start Maybe[[]byte], + end Maybe[[]byte], maxLength int, ) (*RangeProof, error) // CommitRangeProof commits the key/value pairs within the [proof] to the db. // [start] is the smallest key in the range this [proof] covers. - CommitRangeProof(ctx context.Context, start []byte, proof *RangeProof) error + CommitRangeProof(ctx context.Context, start Maybe[[]byte], proof *RangeProof) error } type MerkleDB interface { @@ -308,7 +308,7 @@ func (db *merkleDB) CommitChangeProof(ctx context.Context, proof *ChangeProof) e return view.commitToDB(ctx) } -func (db *merkleDB) CommitRangeProof(ctx context.Context, start []byte, proof *RangeProof) error { +func (db *merkleDB) CommitRangeProof(ctx context.Context, start Maybe[[]byte], proof *RangeProof) error { db.commitLock.Lock() defer db.commitLock.Unlock() @@ -476,12 +476,13 @@ func (db *merkleDB) getProof(ctx context.Context, key []byte) (*Proof, error) { return view.getProof(ctx, key) } -// GetRangeProof returns a proof for the key/value pairs in this trie within the range -// [start, end]. +// GetRangeProof returns a proof for the key/value pairs in this trie within the range [start, end]. +// If [start] is Nothing, there's no lower bound on the range. +// If [end] is Nothing, there's no upper bound on the range. func (db *merkleDB) GetRangeProof( ctx context.Context, - start, - end []byte, + start Maybe[[]byte], + end Maybe[[]byte], maxLength int, ) (*RangeProof, error) { db.commitLock.RLock() @@ -492,11 +493,13 @@ func (db *merkleDB) GetRangeProof( // GetRangeProofAtRoot returns a proof for the key/value pairs in this trie within the range // [start, end] when the root of the trie was [rootID]. +// If [start] is Nothing, there's no lower bound on the range. +// If [end] is Nothing, there's no upper bound on the range. func (db *merkleDB) GetRangeProofAtRoot( ctx context.Context, rootID ids.ID, - start, - end []byte, + start Maybe[[]byte], + end Maybe[[]byte], maxLength int, ) (*RangeProof, error) { db.commitLock.RLock() @@ -509,8 +512,8 @@ func (db *merkleDB) GetRangeProofAtRoot( func (db *merkleDB) getRangeProofAtRoot( ctx context.Context, rootID ids.ID, - start, - end []byte, + start Maybe[[]byte], + end Maybe[[]byte], maxLength int, ) (*RangeProof, error) { if db.closed { @@ -531,11 +534,11 @@ func (db *merkleDB) GetChangeProof( ctx context.Context, startRootID ids.ID, endRootID ids.ID, - start []byte, - end []byte, + start Maybe[[]byte], + end Maybe[[]byte], maxLength int, ) (*ChangeProof, error) { - if len(end) > 0 && bytes.Compare(start, end) == 1 { + if start.hasValue && end.hasValue && bytes.Compare(start.value, end.value) > 0 { return nil, ErrStartAfterEnd } if startRootID == endRootID { @@ -584,7 +587,7 @@ func (db *merkleDB) GetChangeProof( largestKey := end if len(result.KeyChanges) > 0 { - largestKey = result.KeyChanges[len(result.KeyChanges)-1].Key + largestKey = Some(result.KeyChanges[len(result.KeyChanges)-1].Key) } // Since we hold [db.commitlock] we must still have sufficient @@ -594,16 +597,16 @@ func (db *merkleDB) GetChangeProof( return nil, err } - if len(largestKey) > 0 { - endProof, err := historicalView.getProof(ctx, largestKey) + if largestKey.hasValue { + endProof, err := historicalView.getProof(ctx, largestKey.value) if err != nil { return nil, err } result.EndProof = endProof.Path } - if len(start) > 0 { - startProof, err := historicalView.getProof(ctx, start) + if start.hasValue { + startProof, err := historicalView.getProof(ctx, start.value) if err != nil { return nil, err } @@ -957,11 +960,11 @@ func (*merkleDB) CommitToDB(context.Context) error { func (db *merkleDB) VerifyChangeProof( ctx context.Context, proof *ChangeProof, - start []byte, - end []byte, + start Maybe[[]byte], + end Maybe[[]byte], expectedEndRootID ids.ID, ) error { - if len(end) > 0 && bytes.Compare(start, end) > 0 { + if start.hasValue && end.hasValue && bytes.Compare(start.value, end.value) > 0 { return ErrStartAfterEnd } @@ -978,10 +981,10 @@ func (db *merkleDB) VerifyChangeProof( switch { case proof.Empty(): return ErrNoMerkleProof - case len(end) > 0 && len(proof.EndProof) == 0: + case end.hasValue && len(proof.EndProof) == 0: // We requested an end proof but didn't get one. return ErrNoEndProof - case len(start) > 0 && len(proof.StartProof) == 0 && len(proof.EndProof) == 0: + case start.hasValue && len(proof.StartProof) == 0 && len(proof.EndProof) == 0: // We requested a start proof but didn't get one. // Note that we also have to check that [proof.EndProof] is empty // to handle the case that the start proof is empty because all @@ -994,7 +997,7 @@ func (db *merkleDB) VerifyChangeProof( return err } - smallestPath := newPath(start) + smallestPath := newPath(start.value) // Make sure the start proof, if given, is well-formed. if err := verifyProofPath(proof.StartProof, smallestPath); err != nil { @@ -1003,15 +1006,16 @@ func (db *merkleDB) VerifyChangeProof( // Find the greatest key in [proof.KeyChanges] // Note that [proof.EndProof] is a proof for this key. - // [largestPath] is also used when we add children of proof nodes to [trie] below. largestKey := end if len(proof.KeyChanges) > 0 { // If [proof] has key-value pairs, we should insert children // greater than [end] to ancestors of the node containing [end] // so that we get the expected root ID. - largestKey = proof.KeyChanges[len(proof.KeyChanges)-1].Key + largestKey = Some(proof.KeyChanges[len(proof.KeyChanges)-1].Key) } - largestPath := newPath(largestKey) + + // Used when we add children of proof nodes to [trie] below. + largestPath := newPath(largestKey.value) // Make sure the end proof, if given, is well-formed. if err := verifyProofPath(proof.EndProof, largestPath); err != nil { @@ -1166,11 +1170,13 @@ func (db *merkleDB) initializeRootIfNeeded() (ids.ID, error) { } // Returns a view of the trie as it was when it had root [rootID] for keys within range [start, end]. +// If [start] is Nothing, there's no lower bound on the range. +// If [end] is Nothing, there's no upper bound on the range. // Assumes [db.commitLock] is read locked. func (db *merkleDB) getHistoricalViewForRange( rootID ids.ID, - start []byte, - end []byte, + start Maybe[[]byte], + end Maybe[[]byte], ) (*trieView, error) { currentRootID := db.getMerkleRoot() @@ -1189,17 +1195,17 @@ func (db *merkleDB) getHistoricalViewForRange( // Returns all keys in range [start, end] that aren't in [keySet]. // If [start] is nil, then the range has no lower bound. // If [end] is nil, then the range has no upper bound. -func (db *merkleDB) getKeysNotInSet(start, end []byte, keySet set.Set[string]) ([][]byte, error) { +func (db *merkleDB) getKeysNotInSet(start Maybe[[]byte], end Maybe[[]byte], keySet set.Set[string]) ([][]byte, error) { db.lock.RLock() defer db.lock.RUnlock() - it := db.NewIteratorWithStart(start) + it := db.NewIteratorWithStart(start.value) defer it.Release() keysNotInSet := make([][]byte, 0, keySet.Len()) for it.Next() { key := it.Key() - if len(end) != 0 && bytes.Compare(key, end) > 0 { + if !end.IsNothing() && bytes.Compare(key, end.value) > 0 { break } if !keySet.Contains(string(key)) { @@ -1314,7 +1320,7 @@ func (db *merkleDB) prepareChangeProofView(proof *ChangeProof) (*trieView, error // Returns a new view atop [db] with the key/value pairs in [proof.KeyValues] added and // any existing key-value pairs in the proof's range but not in the proof removed. // Assumes [db.commitLock] is locked. -func (db *merkleDB) prepareRangeProofView(start []byte, proof *RangeProof) (*trieView, error) { +func (db *merkleDB) prepareRangeProofView(start Maybe[[]byte], proof *RangeProof) (*trieView, error) { // Don't need to lock [view] because nobody else has a reference to it. view, err := db.newUntrackedView(len(proof.KeyValues)) if err != nil { @@ -1328,9 +1334,9 @@ func (db *merkleDB) prepareRangeProofView(start []byte, proof *RangeProof) (*tri } } - var largestKey []byte + largestKey := Nothing[[]byte]() if len(proof.KeyValues) > 0 { - largestKey = proof.KeyValues[len(proof.KeyValues)-1].Key + largestKey = Some(proof.KeyValues[len(proof.KeyValues)-1].Key) } keysToDelete, err := db.getKeysNotInSet(start, largestKey, keys) if err != nil { diff --git a/x/merkledb/db_test.go b/x/merkledb/db_test.go index e93521a9aa80..98ad9dcc05f4 100644 --- a/x/merkledb/db_test.go +++ b/x/merkledb/db_test.go @@ -259,13 +259,13 @@ func Test_MerkleDB_Commit_Proof_To_Empty_Trie(t *testing.T) { require.NoError(batch.Put([]byte("key3"), []byte("3"))) require.NoError(batch.Write()) - proof, err := db.GetRangeProof(context.Background(), []byte("key1"), []byte("key3"), 10) + proof, err := db.GetRangeProof(context.Background(), Some([]byte("key1")), Some([]byte("key3")), 10) require.NoError(err) freshDB, err := getBasicDB() require.NoError(err) - require.NoError(freshDB.CommitRangeProof(context.Background(), []byte("key1"), proof)) + require.NoError(freshDB.CommitRangeProof(context.Background(), Some([]byte("key1")), proof)) value, err := freshDB.Get([]byte("key2")) require.NoError(err) @@ -289,7 +289,7 @@ func Test_MerkleDB_Commit_Proof_To_Filled_Trie(t *testing.T) { require.NoError(batch.Put([]byte("key3"), []byte("3"))) require.NoError(batch.Write()) - proof, err := db.GetRangeProof(context.Background(), []byte("key1"), []byte("key3"), 10) + proof, err := db.GetRangeProof(context.Background(), Some([]byte("key1")), Some([]byte("key3")), 10) require.NoError(err) freshDB, err := getBasicDB() @@ -301,7 +301,7 @@ func Test_MerkleDB_Commit_Proof_To_Filled_Trie(t *testing.T) { require.NoError(batch.Put([]byte("key25"), []byte("5"))) require.NoError(batch.Write()) - require.NoError(freshDB.CommitRangeProof(context.Background(), []byte("key1"), proof)) + require.NoError(freshDB.CommitRangeProof(context.Background(), Some([]byte("key1")), proof)) value, err := freshDB.Get([]byte("key2")) require.NoError(err) @@ -768,12 +768,21 @@ func runRandDBTest(require *require.Assertions, r *rand.Rand, rt randTest) { if len(pastRoots) > 0 { root = pastRoots[r.Intn(len(pastRoots))] } - rangeProof, err := db.GetRangeProofAtRoot(context.Background(), root, step.key, step.value, 100) + startKey := Nothing[[]byte]() + if len(step.key) > 0 { + startKey = Some(step.key) + } + endKey := Nothing[[]byte]() + if len(step.value) > 0 { + endKey = Some(step.value) + } + rangeProof, err := db.GetRangeProofAtRoot(context.Background(), root, startKey, endKey, 100) require.NoError(err) + require.NoError(rangeProof.Verify( context.Background(), - step.key, - step.value, + startKey, + endKey, root, )) require.LessOrEqual(len(rangeProof.KeyValues), 100) @@ -783,7 +792,17 @@ func runRandDBTest(require *require.Assertions, r *rand.Rand, rt randTest) { if len(pastRoots) > 1 { root = pastRoots[r.Intn(len(pastRoots))] } - changeProof, err := db.GetChangeProof(context.Background(), startRoot, root, step.key, step.value, 100) + + startKey := Nothing[[]byte]() + if len(step.key) > 0 { + startKey = Some(step.key) + } + endKey := Nothing[[]byte]() + if len(step.value) > 0 { + endKey = Some(step.value) + } + + changeProof, err := db.GetChangeProof(context.Background(), startRoot, root, startKey, endKey, 100) if startRoot == root { require.ErrorIs(err, errSameRoot) continue @@ -794,8 +813,8 @@ func runRandDBTest(require *require.Assertions, r *rand.Rand, rt randTest) { require.NoError(changeProofDB.VerifyChangeProof( context.Background(), changeProof, - step.key, - step.value, + startKey, + endKey, root, )) require.LessOrEqual(len(changeProof.KeyChanges), 100) diff --git a/x/merkledb/history.go b/x/merkledb/history.go index 870c61848964..115931c04302 100644 --- a/x/merkledb/history.go +++ b/x/merkledb/history.go @@ -78,7 +78,15 @@ func newTrieHistory(maxHistoryLookback int) *trieHistory { // Returns up to [maxLength] key-value pair changes with keys in [start, end] that // occurred between [startRoot] and [endRoot]. -func (th *trieHistory) getValueChanges(startRoot, endRoot ids.ID, start, end []byte, maxLength int) (*changeSummary, error) { +// If [start] is Nothing, there's no lower bound on the range. +// If [end] is Nothing, there's no upper bound on the range. +func (th *trieHistory) getValueChanges( + startRoot ids.ID, + endRoot ids.ID, + start Maybe[[]byte], + end Maybe[[]byte], + maxLength int, +) (*changeSummary, error) { if maxLength <= 0 { return nil, fmt.Errorf("%w but was %d", ErrInvalidMaxLength, maxLength) } @@ -131,8 +139,10 @@ func (th *trieHistory) getValueChanges(startRoot, endRoot ids.ID, start, end []b }, ) - startPath := newPath(start) - endPath := newPath(end) + // Note [startPath] and [endPath] are only used + // if [start.hasValue] and [end.hasValue] respectively. + startPath := newPath(start.value) + endPath := newPath(end.value) // For each element in the history in the range between [startRoot]'s // last appearance (exclusive) and [endRoot]'s last appearance (inclusive), @@ -157,8 +167,8 @@ func (th *trieHistory) getValueChanges(startRoot, endRoot ids.ID, start, end []b // Add the changes from this commit to [combinedChanges]. for key, valueChange := range item.values { // The key is outside the range [start, end]. - if (len(startPath) > 0 && key.Compare(startPath) < 0) || - (len(endPath) > 0 && key.Compare(endPath) > 0) { + if (start.hasValue && key.Compare(startPath) < 0) || + (end.hasValue && key.Compare(endPath) > 0) { continue } @@ -196,9 +206,9 @@ func (th *trieHistory) getValueChanges(startRoot, endRoot ids.ID, start, end []b // Returns the changes to go from the current trie state back to the requested [rootID] // for the keys in [start, end]. -// If [start] is nil, all keys are considered > [start]. -// If [end] is nil, all keys are considered < [end]. -func (th *trieHistory) getChangesToGetToRoot(rootID ids.ID, start, end []byte) (*changeSummary, error) { +// If [start] is Nothing, all keys are considered > [start]. +// If [end] is Nothing, all keys are considered < [end]. +func (th *trieHistory) getChangesToGetToRoot(rootID ids.ID, start Maybe[[]byte], end Maybe[[]byte]) (*changeSummary, error) { // [lastRootChange] is the last change in the history resulting in [rootID]. lastRootChange, ok := th.lastChanges[rootID] if !ok { @@ -206,8 +216,10 @@ func (th *trieHistory) getChangesToGetToRoot(rootID ids.ID, start, end []byte) ( } var ( - startPath = newPath(start) - endPath = newPath(end) + // Note that [startPath] and [endPath] are only used + // if [start.hasValue] and [end.hasValue] respectively. + startPath = newPath(start.value) + endPath = newPath(end.value) combinedChanges = newChangeSummary(defaultPreallocationSize) ) @@ -226,8 +238,8 @@ func (th *trieHistory) getChangesToGetToRoot(rootID ids.ID, start, end []byte) ( } for key, valueChange := range item.values { - if (len(startPath) == 0 || key.Compare(startPath) >= 0) && - (len(endPath) == 0 || key.Compare(endPath) <= 0) { + if (!start.hasValue || key.Compare(startPath) >= 0) && + (!end.hasValue || key.Compare(endPath) <= 0) { if existing, ok := combinedChanges.values[key]; ok { existing.after = valueChange.before } else { diff --git a/x/merkledb/history_test.go b/x/merkledb/history_test.go index 2905d1c51eea..79448d55007d 100644 --- a/x/merkledb/history_test.go +++ b/x/merkledb/history_test.go @@ -32,36 +32,36 @@ func Test_History_Simple(t *testing.T) { require.NoError(err) require.Equal([]byte("value"), val) - origProof, err := db.GetRangeProof(context.Background(), []byte("k"), []byte("key3"), 10) + origProof, err := db.GetRangeProof(context.Background(), Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(origProof) origRootID := db.root.id - require.NoError(origProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(origProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) batch = db.NewBatch() require.NoError(batch.Put([]byte("key"), []byte("value0"))) require.NoError(batch.Write()) - newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, []byte("k"), []byte("key3"), 10) + newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(newProof) - require.NoError(newProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(newProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) batch = db.NewBatch() require.NoError(batch.Put([]byte("key1"), []byte("value1"))) require.NoError(batch.Put([]byte("key8"), []byte("value8"))) require.NoError(batch.Write()) - newProof, err = db.GetRangeProofAtRoot(context.Background(), origRootID, []byte("k"), []byte("key3"), 10) + newProof, err = db.GetRangeProofAtRoot(context.Background(), origRootID, Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(newProof) - require.NoError(newProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(newProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) batch = db.NewBatch() require.NoError(batch.Put([]byte("k"), []byte("v"))) require.NoError(batch.Write()) - newProof, err = db.GetRangeProofAtRoot(context.Background(), origRootID, []byte("k"), []byte("key3"), 10) + newProof, err = db.GetRangeProofAtRoot(context.Background(), origRootID, Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(newProof) - require.NoError(newProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(newProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) batch = db.NewBatch() require.NoError(batch.Delete([]byte("k"))) @@ -74,10 +74,10 @@ func Test_History_Simple(t *testing.T) { require.NoError(batch.Delete([]byte("key5"))) require.NoError(batch.Delete([]byte("key8"))) require.NoError(batch.Write()) - newProof, err = db.GetRangeProofAtRoot(context.Background(), origRootID, []byte("k"), []byte("key3"), 10) + newProof, err = db.GetRangeProofAtRoot(context.Background(), origRootID, Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(newProof) - require.NoError(newProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(newProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) } func Test_History_Large(t *testing.T) { @@ -133,11 +133,11 @@ func Test_History_Large(t *testing.T) { require.NoError(err) roots = append(roots, root) } - proof, err := db.GetRangeProofAtRoot(context.Background(), roots[0], nil, nil, 10) + proof, err := db.GetRangeProofAtRoot(context.Background(), roots[0], Nothing[[]byte](), Nothing[[]byte](), 10) require.NoError(err) require.NotNil(proof) - require.NoError(proof.Verify(context.Background(), nil, nil, roots[0])) + require.NoError(proof.Verify(context.Background(), Nothing[[]byte](), Nothing[[]byte](), roots[0])) } } @@ -182,15 +182,15 @@ func Test_History_Bad_GetValueChanges_Input(t *testing.T) { endRoot := db.getMerkleRoot() // ensure these start as valid calls - _, err = db.history.getValueChanges(toBeDeletedRoot, endRoot, nil, nil, 1) + _, err = db.history.getValueChanges(toBeDeletedRoot, endRoot, Nothing[[]byte](), Nothing[[]byte](), 1) require.NoError(err) - _, err = db.history.getValueChanges(startRoot, endRoot, nil, nil, 1) + _, err = db.history.getValueChanges(startRoot, endRoot, Nothing[[]byte](), Nothing[[]byte](), 1) require.NoError(err) - _, err = db.history.getValueChanges(startRoot, endRoot, nil, nil, -1) + _, err = db.history.getValueChanges(startRoot, endRoot, Nothing[[]byte](), Nothing[[]byte](), -1) require.ErrorIs(err, ErrInvalidMaxLength) - _, err = db.history.getValueChanges(endRoot, startRoot, nil, nil, 1) + _, err = db.history.getValueChanges(endRoot, startRoot, Nothing[[]byte](), Nothing[[]byte](), 1) require.ErrorIs(err, ErrStartRootNotFound) // trigger the first root to be deleted by exiting the lookback window @@ -199,11 +199,11 @@ func Test_History_Bad_GetValueChanges_Input(t *testing.T) { require.NoError(batch.Write()) // now this root should no longer be present - _, err = db.history.getValueChanges(toBeDeletedRoot, endRoot, nil, nil, 1) + _, err = db.history.getValueChanges(toBeDeletedRoot, endRoot, Nothing[[]byte](), Nothing[[]byte](), 1) require.ErrorIs(err, ErrStartRootNotFound) // same start/end roots should yield an empty changelist - changes, err := db.history.getValueChanges(endRoot, endRoot, nil, nil, 10) + changes, err := db.history.getValueChanges(endRoot, endRoot, Nothing[[]byte](), Nothing[[]byte](), 10) require.NoError(err) require.Empty(changes.values) } @@ -227,13 +227,13 @@ func Test_History_Trigger_History_Queue_Looping(t *testing.T) { require.NoError(batch.Write()) origRootID := db.getMerkleRoot() - origProof, err := db.GetRangeProof(context.Background(), []byte("k"), []byte("key3"), 10) + origProof, err := db.GetRangeProof(context.Background(), Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(origProof) require.NoError(origProof.Verify( context.Background(), - []byte("k"), - []byte("key3"), + Some([]byte("k")), + Some([]byte("key3")), origRootID, )) @@ -243,13 +243,13 @@ func Test_History_Trigger_History_Queue_Looping(t *testing.T) { require.NoError(batch.Write()) // ensure that previous root is still present and generates a valid proof - newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, []byte("k"), []byte("key3"), 10) + newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(newProof) require.NoError(newProof.Verify( context.Background(), - []byte("k"), - []byte("key3"), + Some([]byte("k")), + Some([]byte("key3")), origRootID, )) @@ -259,7 +259,7 @@ func Test_History_Trigger_History_Queue_Looping(t *testing.T) { require.NoError(batch.Write()) // proof from first root shouldn't be generatable since it should have been removed from the history - _, err = db.GetRangeProofAtRoot(context.Background(), origRootID, []byte("k"), []byte("key3"), 10) + _, err = db.GetRangeProofAtRoot(context.Background(), origRootID, Some([]byte("k")), Some([]byte("key3")), 10) require.ErrorIs(err, ErrRootIDNotPresent) } @@ -305,7 +305,7 @@ func Test_History_Values_Lookup_Over_Queue_Break(t *testing.T) { endRoot := db.getMerkleRoot() // changes should still be collectable even though the history has had to loop due to hitting max size - changes, err := db.history.getValueChanges(startRoot, endRoot, nil, nil, 10) + changes, err := db.history.getValueChanges(startRoot, endRoot, Nothing[[]byte](), Nothing[[]byte](), 10) require.NoError(err) require.Contains(changes.values, newPath([]byte("key1"))) require.Equal([]byte("value1"), changes.values[newPath([]byte("key1"))].after.value) @@ -328,21 +328,21 @@ func Test_History_RepeatedRoot(t *testing.T) { require.NoError(batch.Put([]byte("key3"), []byte("value3"))) require.NoError(batch.Write()) - origProof, err := db.GetRangeProof(context.Background(), []byte("k"), []byte("key3"), 10) + origProof, err := db.GetRangeProof(context.Background(), Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(origProof) origRootID := db.root.id - require.NoError(origProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(origProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) batch = db.NewBatch() require.NoError(batch.Put([]byte("key1"), []byte("other"))) require.NoError(batch.Put([]byte("key2"), []byte("other"))) require.NoError(batch.Put([]byte("key3"), []byte("other"))) require.NoError(batch.Write()) - newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, []byte("k"), []byte("key3"), 10) + newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(newProof) - require.NoError(newProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(newProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) // revert state to be the same as in orig proof batch = db.NewBatch() @@ -351,10 +351,10 @@ func Test_History_RepeatedRoot(t *testing.T) { require.NoError(batch.Put([]byte("key3"), []byte("value3"))) require.NoError(batch.Write()) - newProof, err = db.GetRangeProofAtRoot(context.Background(), origRootID, []byte("k"), []byte("key3"), 10) + newProof, err = db.GetRangeProofAtRoot(context.Background(), origRootID, Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(newProof) - require.NoError(newProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(newProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) } func Test_History_ExcessDeletes(t *testing.T) { @@ -370,11 +370,11 @@ func Test_History_ExcessDeletes(t *testing.T) { require.NoError(batch.Put([]byte("key"), []byte("value"))) require.NoError(batch.Write()) - origProof, err := db.GetRangeProof(context.Background(), []byte("k"), []byte("key3"), 10) + origProof, err := db.GetRangeProof(context.Background(), Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(origProof) origRootID := db.root.id - require.NoError(origProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(origProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) batch = db.NewBatch() require.NoError(batch.Delete([]byte("key1"))) @@ -383,10 +383,10 @@ func Test_History_ExcessDeletes(t *testing.T) { require.NoError(batch.Delete([]byte("key4"))) require.NoError(batch.Delete([]byte("key5"))) require.NoError(batch.Write()) - newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, []byte("k"), []byte("key3"), 10) + newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(newProof) - require.NoError(newProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(newProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) } func Test_History_DontIncludeAllNodes(t *testing.T) { @@ -402,19 +402,19 @@ func Test_History_DontIncludeAllNodes(t *testing.T) { require.NoError(batch.Put([]byte("key"), []byte("value"))) require.NoError(batch.Write()) - origProof, err := db.GetRangeProof(context.Background(), []byte("k"), []byte("key3"), 10) + origProof, err := db.GetRangeProof(context.Background(), Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(origProof) origRootID := db.root.id - require.NoError(origProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(origProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) batch = db.NewBatch() require.NoError(batch.Put([]byte("z"), []byte("z"))) require.NoError(batch.Write()) - newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, []byte("k"), []byte("key3"), 10) + newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(newProof) - require.NoError(newProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(newProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) } func Test_History_Branching2Nodes(t *testing.T) { @@ -430,19 +430,19 @@ func Test_History_Branching2Nodes(t *testing.T) { require.NoError(batch.Put([]byte("key"), []byte("value"))) require.NoError(batch.Write()) - origProof, err := db.GetRangeProof(context.Background(), []byte("k"), []byte("key3"), 10) + origProof, err := db.GetRangeProof(context.Background(), Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(origProof) origRootID := db.root.id - require.NoError(origProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(origProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) batch = db.NewBatch() require.NoError(batch.Put([]byte("k"), []byte("v"))) require.NoError(batch.Write()) - newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, []byte("k"), []byte("key3"), 10) + newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(newProof) - require.NoError(newProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(newProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) } func Test_History_Branching3Nodes(t *testing.T) { @@ -458,19 +458,19 @@ func Test_History_Branching3Nodes(t *testing.T) { require.NoError(batch.Put([]byte("key123"), []byte("value123"))) require.NoError(batch.Write()) - origProof, err := db.GetRangeProof(context.Background(), []byte("k"), []byte("key3"), 10) + origProof, err := db.GetRangeProof(context.Background(), Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(origProof) origRootID := db.root.id - require.NoError(origProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(origProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) batch = db.NewBatch() require.NoError(batch.Put([]byte("key321"), []byte("value321"))) require.NoError(batch.Write()) - newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, []byte("k"), []byte("key3"), 10) + newProof, err := db.GetRangeProofAtRoot(context.Background(), origRootID, Some([]byte("k")), Some([]byte("key3")), 10) require.NoError(err) require.NotNil(newProof) - require.NoError(newProof.Verify(context.Background(), []byte("k"), []byte("key3"), origRootID)) + require.NoError(newProof.Verify(context.Background(), Some([]byte("k")), Some([]byte("key3")), origRootID)) } func Test_History_MaxLength(t *testing.T) { @@ -543,7 +543,7 @@ func Test_Change_List(t *testing.T) { endRoot, err := db.GetMerkleRoot(context.Background()) require.NoError(err) - changes, err := db.history.getValueChanges(startRoot, endRoot, nil, nil, 8) + changes, err := db.history.getValueChanges(startRoot, endRoot, Nothing[[]byte](), Nothing[[]byte](), 8) require.NoError(err) require.Len(changes.values, 8) } @@ -642,8 +642,8 @@ func TestHistoryGetChangesToRoot(t *testing.T) { type test struct { name string rootID ids.ID - start []byte - end []byte + start Maybe[[]byte] + end Maybe[[]byte] validateFunc func(*require.Assertions, *changeSummary) expectedErr error } @@ -696,7 +696,7 @@ func TestHistoryGetChangesToRoot(t *testing.T) { { name: "third most recent change with start filter", rootID: changes[maxHistoryLen-3].rootID, - start: []byte{byte(maxHistoryLen - 1)}, // Omit values from second most recent change + start: Some([]byte{byte(maxHistoryLen - 1)}), // Omit values from second most recent change validateFunc: func(require *require.Assertions, got *changeSummary) { require.Len(got.nodes, 2) require.Len(got.values, 1) @@ -713,7 +713,7 @@ func TestHistoryGetChangesToRoot(t *testing.T) { { name: "third most recent change with end filter", rootID: changes[maxHistoryLen-3].rootID, - end: []byte{byte(maxHistoryLen - 2)}, // Omit values from most recent change + end: Some([]byte{byte(maxHistoryLen - 2)}), // Omit values from most recent change validateFunc: func(require *require.Assertions, got *changeSummary) { require.Len(got.nodes, 2) require.Len(got.values, 1) diff --git a/x/merkledb/maybe.go b/x/merkledb/maybe.go index acebb47fdf18..47184aa3ea4f 100644 --- a/x/merkledb/maybe.go +++ b/x/merkledb/maybe.go @@ -3,17 +3,24 @@ package merkledb -import "golang.org/x/exp/slices" +import ( + "bytes" + "fmt" + + "golang.org/x/exp/slices" +) // Maybe T = Some T | Nothing. // A data wrapper that allows values to be something [Some T] or nothing [Nothing]. +// Invariant: If [hasValue] is false, then [value] is the zero value of type T. // Maybe is used to wrap types: // * That can't be represented by nil. // * That use nil as a valid value instead of an indicator of a missing value. // For more info see https://en.wikipedia.org/wiki/Option_type type Maybe[T any] struct { hasValue bool - value T + // If [hasValue] is false, [value] is the zero value of type T. + value T } // Returns a new Maybe[T] with the value val. @@ -35,13 +42,37 @@ func (m Maybe[T]) IsNothing() bool { } // Returns the value of [m]. +// If [m.IsNothing()], returns the zero value of type T. func (m Maybe[T]) Value() T { return m.value } +func (m Maybe[T]) String() string { + if !m.hasValue { + return fmt.Sprintf("Nothing[%T]", m.value) + } + return fmt.Sprintf("Some[%T]{%v}", m.value, m.value) +} + func Clone(m Maybe[[]byte]) Maybe[[]byte] { if !m.hasValue { return Nothing[[]byte]() } return Some(slices.Clone(m.value)) } + +// Return true iff [a] and [b] are equal. +func MaybeBytesEquals(a, b Maybe[[]byte]) bool { + aNothing := a.IsNothing() + bNothing := b.IsNothing() + + if aNothing { + return bNothing + } + + if bNothing { + return false + } + + return bytes.Equal(a.Value(), b.Value()) +} diff --git a/x/merkledb/maybe_test.go b/x/merkledb/maybe_test.go index 3c0bc7c95a2f..c3c2d6c65dbd 100644 --- a/x/merkledb/maybe_test.go +++ b/x/merkledb/maybe_test.go @@ -31,3 +31,70 @@ func TestMaybeClone(t *testing.T) { require.True(mClone.IsNothing()) } } + +func TestMaybeString(t *testing.T) { + require := require.New(t) + + // Case: Value is maybe + { + val := []int{1, 2, 3} + m := Some(val) + require.Equal("Some[[]int]{[1 2 3]}", m.String()) + } + + // Case: Value is nothing + { + m := Nothing[int]() + require.Equal("Nothing[int]", m.String()) + } +} + +func TestMaybeBytesEquals(t *testing.T) { + type test struct { + name string + a Maybe[[]byte] + b Maybe[[]byte] + expected bool + } + + tests := []test{ + { + name: "a and b are both nothing", + a: Nothing[[]byte](), + b: Nothing[[]byte](), + expected: true, + }, + { + name: "a is nothing and b is something", + a: Nothing[[]byte](), + b: Some([]byte{1, 2, 3}), + expected: false, + }, + { + name: "a is something and b is nothing", + a: Some([]byte{1, 2, 3}), + b: Nothing[[]byte](), + expected: false, + }, + { + name: "a and b are the same something", + a: Some([]byte{1, 2, 3}), + b: Some([]byte{1, 2, 3}), + expected: true, + }, + { + name: "a and b are different somethings", + a: Some([]byte{1, 2, 3}), + b: Some([]byte{1, 2, 4}), + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + require := require.New(t) + + require.Equal(tt.expected, MaybeBytesEquals(tt.a, tt.b)) + }) + } +} diff --git a/x/merkledb/mock_db.go b/x/merkledb/mock_db.go index a37830388698..078ffac64d49 100644 --- a/x/merkledb/mock_db.go +++ b/x/merkledb/mock_db.go @@ -68,7 +68,7 @@ func (mr *MockMerkleDBMockRecorder) CommitChangeProof(arg0, arg1 interface{}) *g } // CommitRangeProof mocks base method. -func (m *MockMerkleDB) CommitRangeProof(arg0 context.Context, arg1 []byte, arg2 *RangeProof) error { +func (m *MockMerkleDB) CommitRangeProof(arg0 context.Context, arg1 Maybe[[]uint8], arg2 *RangeProof) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "CommitRangeProof", arg0, arg1, arg2) ret0, _ := ret[0].(error) @@ -125,7 +125,7 @@ func (mr *MockMerkleDBMockRecorder) Get(arg0 interface{}) *gomock.Call { } // GetChangeProof mocks base method. -func (m *MockMerkleDB) GetChangeProof(arg0 context.Context, arg1, arg2 ids.ID, arg3, arg4 []byte, arg5 int) (*ChangeProof, error) { +func (m *MockMerkleDB) GetChangeProof(arg0 context.Context, arg1, arg2 ids.ID, arg3, arg4 Maybe[[]uint8], arg5 int) (*ChangeProof, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetChangeProof", arg0, arg1, arg2, arg3, arg4, arg5) ret0, _ := ret[0].(*ChangeProof) @@ -170,7 +170,7 @@ func (mr *MockMerkleDBMockRecorder) GetProof(arg0, arg1 interface{}) *gomock.Cal } // GetRangeProof mocks base method. -func (m *MockMerkleDB) GetRangeProof(arg0 context.Context, arg1, arg2 []byte, arg3 int) (*RangeProof, error) { +func (m *MockMerkleDB) GetRangeProof(arg0 context.Context, arg1, arg2 Maybe[[]uint8], arg3 int) (*RangeProof, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetRangeProof", arg0, arg1, arg2, arg3) ret0, _ := ret[0].(*RangeProof) @@ -185,7 +185,7 @@ func (mr *MockMerkleDBMockRecorder) GetRangeProof(arg0, arg1, arg2, arg3 interfa } // GetRangeProofAtRoot mocks base method. -func (m *MockMerkleDB) GetRangeProofAtRoot(arg0 context.Context, arg1 ids.ID, arg2, arg3 []byte, arg4 int) (*RangeProof, error) { +func (m *MockMerkleDB) GetRangeProofAtRoot(arg0 context.Context, arg1 ids.ID, arg2, arg3 Maybe[[]uint8], arg4 int) (*RangeProof, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetRangeProofAtRoot", arg0, arg1, arg2, arg3, arg4) ret0, _ := ret[0].(*RangeProof) @@ -402,7 +402,7 @@ func (mr *MockMerkleDBMockRecorder) Remove(arg0, arg1 interface{}) *gomock.Call } // VerifyChangeProof mocks base method. -func (m *MockMerkleDB) VerifyChangeProof(arg0 context.Context, arg1 *ChangeProof, arg2, arg3 []byte, arg4 ids.ID) error { +func (m *MockMerkleDB) VerifyChangeProof(arg0 context.Context, arg1 *ChangeProof, arg2, arg3 Maybe[[]uint8], arg4 ids.ID) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "VerifyChangeProof", arg0, arg1, arg2, arg3, arg4) ret0, _ := ret[0].(error) diff --git a/x/merkledb/proof.go b/x/merkledb/proof.go index 4c20e390c0a2..82b94b7144c6 100644 --- a/x/merkledb/proof.go +++ b/x/merkledb/proof.go @@ -285,18 +285,18 @@ type RangeProof struct { // pairs and start/end proofs. func (proof *RangeProof) Verify( ctx context.Context, - start []byte, - end []byte, + start Maybe[[]byte], + end Maybe[[]byte], expectedRootID ids.ID, ) error { switch { - case len(end) > 0 && bytes.Compare(start, end) > 0: + case start.hasValue && end.hasValue && bytes.Compare(start.value, end.value) > 0: return ErrStartAfterEnd case len(proof.KeyValues) == 0 && len(proof.StartProof) == 0 && len(proof.EndProof) == 0: return ErrNoMerkleProof - case len(start) == 0 && len(end) == 0 && len(proof.KeyValues) == 0 && len(proof.EndProof) != 1: + case !start.hasValue && !end.hasValue && len(proof.KeyValues) == 0 && len(proof.EndProof) != 1: return ErrShouldJustBeRoot - case len(proof.EndProof) == 0 && len(end) > 0: + case end.hasValue && len(proof.EndProof) == 0: return ErrNoEndProof } @@ -310,7 +310,7 @@ func (proof *RangeProof) Verify( // If [proof] has key-value pairs, we should insert children // greater than [largestKey] to ancestors of the node containing // [largestKey] so that we get the expected root ID. - largestKey = proof.KeyValues[len(proof.KeyValues)-1].Key + largestKey = Some(proof.KeyValues[len(proof.KeyValues)-1].Key) } // The key-value pairs (allegedly) proven by [proof]. @@ -319,8 +319,8 @@ func (proof *RangeProof) Verify( keyValues[newPath(keyValue.Key)] = keyValue.Value } - smallestPath := newPath(start) - largestPath := newPath(largestKey) + smallestPath := newPath(start.value) + largestPath := newPath(largestKey.value) // Ensure that the start proof is valid and contains values that // match the key/values that were sent. @@ -494,10 +494,10 @@ type ChangeProof struct { // Note that this may not be an entire proof -- nodes are omitted if // they are also in [EndProof]. StartProof []ProofNode - // A proof that the largest key in [KeyValues] and [DeletedKeys] - // does/doesn't exist in the trie with the requested start root. + // A proof that the largest key in [KeyValues] does/doesn't exist + // in the trie with the requested start root. // Empty iff no upper bound on the requested range was given - // and [KeyValues] and [DeletedKeys] are empty. + // and [KeyValues] is are empty. EndProof []ProofNode // A subset of key-values that were added, removed, or had their values modified // between the requested start root (exclusive) and the requested @@ -641,10 +641,10 @@ func (proof *ChangeProof) Empty() bool { // Returns nil iff both hold: // 1. [kvs] is sorted by key in increasing order. // 2. All keys in [kvs] are in the range [start, end]. -// If [start] is nil, there is no lower bound on acceptable keys. -// If [end] is nil, there is no upper bound on acceptable keys. +// If [start] is Nothing, there is no lower bound on acceptable keys. +// If [end] is Nothing, there is no upper bound on acceptable keys. // If [kvs] is empty, returns nil. -func verifyKeyChanges(kvs []KeyChange, start, end []byte) error { +func verifyKeyChanges(kvs []KeyChange, start Maybe[[]byte], end Maybe[[]byte]) error { if len(kvs) == 0 { return nil } @@ -657,8 +657,8 @@ func verifyKeyChanges(kvs []KeyChange, start, end []byte) error { } // ensure that the keys are within the range [start, end] - if (len(start) > 0 && bytes.Compare(kvs[0].Key, start) < 0) || - (len(end) > 0 && bytes.Compare(kvs[len(kvs)-1].Key, end) > 0) { + if (start.hasValue && bytes.Compare(kvs[0].Key, start.value) < 0) || + (end.hasValue && bytes.Compare(kvs[len(kvs)-1].Key, end.value) > 0) { return ErrStateFromOutsideOfRange } @@ -668,21 +668,27 @@ func verifyKeyChanges(kvs []KeyChange, start, end []byte) error { // Returns nil iff both hold: // 1. [kvs] is sorted by key in increasing order. // 2. All keys in [kvs] are in the range [start, end]. -// If [start] is nil, there is no lower bound on acceptable keys. -// If [end] is nil, there is no upper bound on acceptable keys. +// If [start] is Nothing, there is no lower bound on acceptable keys. +// If [end] is Nothing, there is no upper bound on acceptable keys. // If [kvs] is empty, returns nil. -func verifyKeyValues(kvs []KeyValue, start, end []byte) error { - hasLowerBound := len(start) > 0 - hasUpperBound := len(end) > 0 - for i := 0; i < len(kvs); i++ { - if i < len(kvs)-1 && bytes.Compare(kvs[i].Key, kvs[i+1].Key) >= 0 { +func verifyKeyValues(kvs []KeyValue, start Maybe[[]byte], end Maybe[[]byte]) error { + if len(kvs) == 0 { + return nil + } + + // ensure that the keys are in increasing order + for i := 0; i < len(kvs)-1; i++ { + if bytes.Compare(kvs[i].Key, kvs[i+1].Key) >= 0 { return ErrNonIncreasingValues } - if (hasLowerBound && bytes.Compare(kvs[i].Key, start) < 0) || - (hasUpperBound && bytes.Compare(kvs[i].Key, end) > 0) { - return ErrStateFromOutsideOfRange - } } + + // ensure that the keys are within the range [start, end] + if (start.hasValue && bytes.Compare(kvs[0].Key, start.value) < 0) || + (end.hasValue && bytes.Compare(kvs[len(kvs)-1].Key, end.value) > 0) { + return ErrStateFromOutsideOfRange + } + return nil } diff --git a/x/merkledb/proof_test.go b/x/merkledb/proof_test.go index 026afe550579..2f615a9091d4 100644 --- a/x/merkledb/proof_test.go +++ b/x/merkledb/proof_test.go @@ -163,14 +163,14 @@ func Test_RangeProof_Extra_Value(t *testing.T) { require.NoError(err) require.Equal([]byte{2}, val) - proof, err := db.GetRangeProof(context.Background(), []byte{1}, []byte{5, 5}, 10) + proof, err := db.GetRangeProof(context.Background(), Some([]byte{1}), Some([]byte{5, 5}), 10) require.NoError(err) require.NotNil(proof) require.NoError(proof.Verify( context.Background(), - []byte{1}, - []byte{5, 5}, + Some([]byte{1}), + Some([]byte{5, 5}), db.root.id, )) @@ -178,8 +178,8 @@ func Test_RangeProof_Extra_Value(t *testing.T) { err = proof.Verify( context.Background(), - []byte{1}, - []byte{5, 5}, + Some([]byte{1}), + Some([]byte{5, 5}), db.root.id, ) require.ErrorIs(err, ErrInvalidProof) @@ -236,13 +236,13 @@ func Test_RangeProof_Verify_Bad_Data(t *testing.T) { require.NoError(err) writeBasicBatch(t, db) - proof, err := db.GetRangeProof(context.Background(), []byte{2}, []byte{3, 0}, 50) + proof, err := db.GetRangeProof(context.Background(), Some([]byte{2}), Some([]byte{3, 0}), 50) require.NoError(err) require.NotNil(proof) tt.malform(proof) - err = proof.Verify(context.Background(), []byte{2}, []byte{3, 0}, db.getMerkleRoot()) + err = proof.Verify(context.Background(), Some([]byte{2}), Some([]byte{3, 0}), db.getMerkleRoot()) require.ErrorIs(err, tt.expectedErr) }) } @@ -257,10 +257,10 @@ func Test_RangeProof_MaxLength(t *testing.T) { trie, err := dbTrie.NewView() require.NoError(err) - _, err = trie.GetRangeProof(context.Background(), nil, nil, -1) + _, err = trie.GetRangeProof(context.Background(), Nothing[[]byte](), Nothing[[]byte](), -1) require.ErrorIs(err, ErrInvalidMaxLength) - _, err = trie.GetRangeProof(context.Background(), nil, nil, 0) + _, err = trie.GetRangeProof(context.Background(), Nothing[[]byte](), Nothing[[]byte](), 0) require.ErrorIs(err, ErrInvalidMaxLength) } @@ -306,8 +306,8 @@ func Test_Proof(t *testing.T) { func Test_RangeProof_Syntactic_Verify(t *testing.T) { type test struct { name string - start []byte - end []byte + start Maybe[[]byte] + end Maybe[[]byte] proof *RangeProof expectedErr error } @@ -315,22 +315,22 @@ func Test_RangeProof_Syntactic_Verify(t *testing.T) { tests := []test{ { name: "start > end", - start: []byte{1}, - end: []byte{0}, + start: Some([]byte{1}), + end: Some([]byte{0}), proof: &RangeProof{}, expectedErr: ErrStartAfterEnd, }, { name: "empty", // Also tests start can be > end if end is nil - start: []byte{1}, - end: nil, + start: Some([]byte{1}), + end: Nothing[[]byte](), proof: &RangeProof{}, expectedErr: ErrNoMerkleProof, }, { name: "should just be root", - start: nil, - end: nil, + start: Nothing[[]byte](), + end: Nothing[[]byte](), proof: &RangeProof{ EndProof: []ProofNode{{}, {}}, }, @@ -338,8 +338,8 @@ func Test_RangeProof_Syntactic_Verify(t *testing.T) { }, { name: "no end proof", - start: []byte{1}, - end: []byte{1}, + start: Some([]byte{1}), + end: Some([]byte{1}), proof: &RangeProof{ KeyValues: []KeyValue{{Key: []byte{1}, Value: []byte{1}}}, }, @@ -347,8 +347,8 @@ func Test_RangeProof_Syntactic_Verify(t *testing.T) { }, { name: "unsorted key values", - start: []byte{1}, - end: nil, + start: Some([]byte{1}), + end: Nothing[[]byte](), proof: &RangeProof{ KeyValues: []KeyValue{ {Key: []byte{1}, Value: []byte{1}}, @@ -359,8 +359,8 @@ func Test_RangeProof_Syntactic_Verify(t *testing.T) { }, { name: "key lower than start", - start: []byte{1}, - end: nil, + start: Some([]byte{1}), + end: Nothing[[]byte](), proof: &RangeProof{ KeyValues: []KeyValue{ {Key: []byte{0}, Value: []byte{0}}, @@ -370,8 +370,8 @@ func Test_RangeProof_Syntactic_Verify(t *testing.T) { }, { name: "key greater than end", - start: []byte{1}, - end: []byte{1}, + start: Some([]byte{1}), + end: Some([]byte{1}), proof: &RangeProof{ KeyValues: []KeyValue{ {Key: []byte{2}, Value: []byte{0}}, @@ -382,8 +382,8 @@ func Test_RangeProof_Syntactic_Verify(t *testing.T) { }, { name: "start proof nodes in wrong order", - start: []byte{1, 2}, - end: nil, + start: Some([]byte{1, 2}), + end: Nothing[[]byte](), proof: &RangeProof{ KeyValues: []KeyValue{ {Key: []byte{1, 2}, Value: []byte{1}}, @@ -401,8 +401,8 @@ func Test_RangeProof_Syntactic_Verify(t *testing.T) { }, { name: "start proof has node for wrong key", - start: []byte{1, 2}, - end: nil, + start: Some([]byte{1, 2}), + end: Nothing[[]byte](), proof: &RangeProof{ KeyValues: []KeyValue{ {Key: []byte{1, 2}, Value: []byte{1}}, @@ -423,8 +423,8 @@ func Test_RangeProof_Syntactic_Verify(t *testing.T) { }, { name: "end proof nodes in wrong order", - start: nil, - end: []byte{1, 2}, + start: Nothing[[]byte](), + end: Some([]byte{1, 2}), proof: &RangeProof{ KeyValues: []KeyValue{ {Key: []byte{1, 2}, Value: []byte{1}}, @@ -442,8 +442,8 @@ func Test_RangeProof_Syntactic_Verify(t *testing.T) { }, { name: "end proof has node for wrong key", - start: nil, - end: []byte{1, 2}, + start: Nothing[[]byte](), + end: Some([]byte{1, 2}), proof: &RangeProof{ KeyValues: []KeyValue{ {Key: []byte{1, 2}, Value: []byte{1}}, @@ -479,7 +479,7 @@ func Test_RangeProof(t *testing.T) { require.NoError(err) writeBasicBatch(t, db) - proof, err := db.GetRangeProof(context.Background(), []byte{1}, []byte{3, 5}, 10) + proof, err := db.GetRangeProof(context.Background(), Some([]byte{1}), Some([]byte{3, 5}), 10) require.NoError(err) require.NotNil(proof) require.Len(proof.KeyValues, 3) @@ -501,8 +501,8 @@ func Test_RangeProof(t *testing.T) { require.NoError(proof.Verify( context.Background(), - []byte{1}, - []byte{3, 5}, + Some([]byte{1}), + Some([]byte{3, 5}), db.root.id, )) } @@ -514,7 +514,7 @@ func Test_RangeProof_BadBounds(t *testing.T) { require.NoError(err) // non-nil start/end - proof, err := db.GetRangeProof(context.Background(), []byte{4}, []byte{3}, 50) + proof, err := db.GetRangeProof(context.Background(), Some([]byte{4}), Some([]byte{3}), 50) require.ErrorIs(err, ErrStartAfterEnd) require.Nil(proof) } @@ -535,7 +535,7 @@ func Test_RangeProof_NilStart(t *testing.T) { require.NoError(err) require.Equal([]byte("value1"), val) - proof, err := db.GetRangeProof(context.Background(), nil, []byte("key35"), 2) + proof, err := db.GetRangeProof(context.Background(), Nothing[[]byte](), Some([]byte("key35")), 2) require.NoError(err) require.NotNil(proof) @@ -553,8 +553,8 @@ func Test_RangeProof_NilStart(t *testing.T) { require.NoError(proof.Verify( context.Background(), - nil, - []byte("key35"), + Nothing[[]byte](), + Some([]byte("key35")), db.root.id, )) } @@ -567,7 +567,7 @@ func Test_RangeProof_NilEnd(t *testing.T) { writeBasicBatch(t, db) require.NoError(err) - proof, err := db.GetRangeProof(context.Background(), []byte{1}, nil, 2) + proof, err := db.GetRangeProof(context.Background(), Some([]byte{1}), Nothing[[]byte](), 2) require.NoError(err) require.NotNil(proof) @@ -587,8 +587,8 @@ func Test_RangeProof_NilEnd(t *testing.T) { require.NoError(proof.Verify( context.Background(), - []byte{1}, - nil, + Some([]byte{1}), + Nothing[[]byte](), db.root.id, )) } @@ -608,7 +608,7 @@ func Test_RangeProof_EmptyValues(t *testing.T) { require.NoError(err) require.Equal([]byte("value1"), val) - proof, err := db.GetRangeProof(context.Background(), []byte("key1"), []byte("key2"), 10) + proof, err := db.GetRangeProof(context.Background(), Some([]byte("key1")), Some([]byte("key2")), 10) require.NoError(err) require.NotNil(proof) @@ -629,8 +629,8 @@ func Test_RangeProof_EmptyValues(t *testing.T) { require.NoError(proof.Verify( context.Background(), - []byte("key1"), - []byte("key2"), + Some([]byte("key1")), + Some([]byte("key2")), db.root.id, )) } @@ -643,12 +643,12 @@ func Test_ChangeProof_Missing_History_For_EndRoot(t *testing.T) { startRoot, err := db.GetMerkleRoot(context.Background()) require.NoError(err) - proof, err := db.GetChangeProof(context.Background(), startRoot, ids.Empty, nil, nil, 50) + proof, err := db.GetChangeProof(context.Background(), startRoot, ids.Empty, Nothing[[]byte](), Nothing[[]byte](), 50) require.NoError(err) require.NotNil(proof) require.False(proof.HadRootsInHistory) - require.NoError(db.VerifyChangeProof(context.Background(), proof, nil, nil, db.getMerkleRoot())) + require.NoError(db.VerifyChangeProof(context.Background(), proof, Nothing[[]byte](), Nothing[[]byte](), db.getMerkleRoot())) } func Test_ChangeProof_BadBounds(t *testing.T) { @@ -666,7 +666,7 @@ func Test_ChangeProof_BadBounds(t *testing.T) { require.NoError(err) // non-nil start/end - proof, err := db.GetChangeProof(context.Background(), startRoot, endRoot, []byte("key4"), []byte("key3"), 50) + proof, err := db.GetChangeProof(context.Background(), startRoot, endRoot, Some([]byte("key4")), Some([]byte("key3")), 50) require.ErrorIs(err, ErrStartAfterEnd) require.Nil(proof) } @@ -722,36 +722,36 @@ func Test_ChangeProof_Verify(t *testing.T) { require.NoError(err) // non-nil start/end - proof, err := db.GetChangeProof(context.Background(), startRoot, endRoot, []byte("key21"), []byte("key30"), 50) + proof, err := db.GetChangeProof(context.Background(), startRoot, endRoot, Some([]byte("key21")), Some([]byte("key30")), 50) require.NoError(err) require.NotNil(proof) - require.NoError(dbClone.VerifyChangeProof(context.Background(), proof, []byte("key21"), []byte("key30"), db.getMerkleRoot())) + require.NoError(dbClone.VerifyChangeProof(context.Background(), proof, Some([]byte("key21")), Some([]byte("key30")), db.getMerkleRoot())) // low maxLength - proof, err = db.GetChangeProof(context.Background(), startRoot, endRoot, nil, nil, 5) + proof, err = db.GetChangeProof(context.Background(), startRoot, endRoot, Nothing[[]byte](), Nothing[[]byte](), 5) require.NoError(err) require.NotNil(proof) - require.NoError(dbClone.VerifyChangeProof(context.Background(), proof, nil, nil, db.getMerkleRoot())) + require.NoError(dbClone.VerifyChangeProof(context.Background(), proof, Nothing[[]byte](), Nothing[[]byte](), db.getMerkleRoot())) // nil start/end - proof, err = db.GetChangeProof(context.Background(), startRoot, endRoot, nil, nil, 50) + proof, err = db.GetChangeProof(context.Background(), startRoot, endRoot, Nothing[[]byte](), Nothing[[]byte](), 50) require.NoError(err) require.NotNil(proof) - require.NoError(dbClone.VerifyChangeProof(context.Background(), proof, nil, nil, endRoot)) + require.NoError(dbClone.VerifyChangeProof(context.Background(), proof, Nothing[[]byte](), Nothing[[]byte](), endRoot)) require.NoError(dbClone.CommitChangeProof(context.Background(), proof)) newRoot, err := dbClone.GetMerkleRoot(context.Background()) require.NoError(err) require.Equal(endRoot, newRoot) - proof, err = db.GetChangeProof(context.Background(), startRoot, endRoot, []byte("key20"), []byte("key30"), 50) + proof, err = db.GetChangeProof(context.Background(), startRoot, endRoot, Some([]byte("key20")), Some([]byte("key30")), 50) require.NoError(err) require.NotNil(proof) - require.NoError(dbClone.VerifyChangeProof(context.Background(), proof, []byte("key20"), []byte("key30"), db.getMerkleRoot())) + require.NoError(dbClone.VerifyChangeProof(context.Background(), proof, Some([]byte("key20")), Some([]byte("key30")), db.getMerkleRoot())) } func Test_ChangeProof_Verify_Bad_Data(t *testing.T) { @@ -809,13 +809,13 @@ func Test_ChangeProof_Verify_Bad_Data(t *testing.T) { dbClone, err := getBasicDB() require.NoError(err) - proof, err := db.GetChangeProof(context.Background(), startRoot, endRoot, []byte{2}, []byte{3, 0}, 50) + proof, err := db.GetChangeProof(context.Background(), startRoot, endRoot, Some([]byte{2}), Some([]byte{3, 0}), 50) require.NoError(err) require.NotNil(proof) tt.malform(proof) - err = dbClone.VerifyChangeProof(context.Background(), proof, []byte{2}, []byte{3, 0}, db.getMerkleRoot()) + err = dbClone.VerifyChangeProof(context.Background(), proof, Some([]byte{2}), Some([]byte{3, 0}), db.getMerkleRoot()) require.ErrorIs(err, tt.expectedErr) }) } @@ -825,8 +825,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { type test struct { name string proof *ChangeProof - start []byte - end []byte + start Maybe[[]byte] + end Maybe[[]byte] expectedErr error } @@ -834,8 +834,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { { name: "start after end", proof: nil, - start: []byte{1}, - end: []byte{0}, + start: Some([]byte{1}), + end: Some([]byte{0}), expectedErr: ErrStartAfterEnd, }, { @@ -844,8 +844,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { HadRootsInHistory: false, KeyChanges: []KeyChange{{Key: []byte{1}, Value: Some([]byte{1})}}, }, - start: []byte{0}, - end: nil, // Also tests start can be after end if end is nil + start: Some([]byte{0}), + end: Nothing[[]byte](), // Also tests start can be after end if end is nil expectedErr: ErrDataInMissingRootProof, }, { @@ -854,8 +854,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { HadRootsInHistory: false, KeyChanges: []KeyChange{{Key: []byte{1}}}, }, - start: nil, - end: nil, + start: Nothing[[]byte](), + end: Nothing[[]byte](), expectedErr: ErrDataInMissingRootProof, }, { @@ -864,8 +864,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { HadRootsInHistory: false, StartProof: []ProofNode{{}}, }, - start: nil, - end: nil, + start: Nothing[[]byte](), + end: Nothing[[]byte](), expectedErr: ErrDataInMissingRootProof, }, { @@ -874,8 +874,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { HadRootsInHistory: false, EndProof: []ProofNode{{}}, }, - start: nil, - end: nil, + start: Nothing[[]byte](), + end: Nothing[[]byte](), expectedErr: ErrDataInMissingRootProof, }, { @@ -883,8 +883,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { proof: &ChangeProof{ HadRootsInHistory: false, }, - start: nil, - end: nil, + start: Nothing[[]byte](), + end: Nothing[[]byte](), expectedErr: nil, }, { @@ -892,8 +892,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { proof: &ChangeProof{ HadRootsInHistory: true, }, - start: nil, - end: nil, + start: Nothing[[]byte](), + end: Nothing[[]byte](), expectedErr: ErrNoMerkleProof, }, { @@ -902,8 +902,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { HadRootsInHistory: true, StartProof: []ProofNode{{}}, }, - start: nil, - end: []byte{1}, + start: Nothing[[]byte](), + end: Some([]byte{1}), expectedErr: ErrNoEndProof, }, { @@ -912,8 +912,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { HadRootsInHistory: true, KeyChanges: []KeyChange{{Key: []byte{1}}}, }, - start: []byte{1}, - end: nil, + start: Some([]byte{1}), + end: Nothing[[]byte](), expectedErr: ErrNoStartProof, }, { @@ -925,8 +925,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { {Key: []byte{0}}, }, }, - start: nil, - end: nil, + start: Nothing[[]byte](), + end: Nothing[[]byte](), expectedErr: ErrNonIncreasingValues, }, { @@ -938,8 +938,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { {Key: []byte{0}}, }, }, - start: []byte{1}, - end: nil, + start: Some([]byte{1}), + end: Nothing[[]byte](), expectedErr: ErrStateFromOutsideOfRange, }, { @@ -951,8 +951,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { {Key: []byte{2}}, }, }, - start: nil, - end: []byte{1}, + start: Nothing[[]byte](), + end: Some([]byte{1}), expectedErr: ErrStateFromOutsideOfRange, }, { @@ -964,8 +964,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { {Key: []byte{1}}, }, }, - start: nil, - end: nil, + start: Nothing[[]byte](), + end: Nothing[[]byte](), expectedErr: ErrNonIncreasingValues, }, { @@ -977,8 +977,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { {KeyPath: newPath([]byte{2, 3}).Serialize()}, }, }, - start: []byte{1, 2, 3}, - end: nil, + start: Some([]byte{1, 2, 3}), + end: Nothing[[]byte](), expectedErr: ErrProofNodeNotForKey, }, { @@ -990,8 +990,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { {KeyPath: newPath([]byte{2, 3}).Serialize()}, }, }, - start: []byte{1, 2, 3}, - end: nil, + start: Some([]byte{1, 2, 3}), + end: Nothing[[]byte](), expectedErr: ErrNonIncreasingProofNodes, }, { @@ -1006,8 +1006,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { {KeyPath: newPath([]byte{2, 3}).Serialize()}, }, }, - start: nil, - end: nil, + start: Nothing[[]byte](), + end: Nothing[[]byte](), expectedErr: ErrProofNodeNotForKey, }, { @@ -1022,8 +1022,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { {KeyPath: newPath([]byte{2, 3}).Serialize()}, }, }, - start: nil, - end: nil, + start: Nothing[[]byte](), + end: Nothing[[]byte](), expectedErr: ErrNonIncreasingProofNodes, }, } @@ -1043,8 +1043,8 @@ func Test_ChangeProof_Syntactic_Verify(t *testing.T) { func TestVerifyKeyValues(t *testing.T) { type test struct { name string - start []byte - end []byte + start Maybe[[]byte] + end Maybe[[]byte] kvs []KeyValue expectedErr error } @@ -1052,15 +1052,15 @@ func TestVerifyKeyValues(t *testing.T) { tests := []test{ { name: "empty", - start: nil, - end: nil, + start: Nothing[[]byte](), + end: Nothing[[]byte](), kvs: nil, expectedErr: nil, }, { name: "1 key", - start: nil, - end: nil, + start: Nothing[[]byte](), + end: Nothing[[]byte](), kvs: []KeyValue{ {Key: []byte{0}}, }, @@ -1068,8 +1068,8 @@ func TestVerifyKeyValues(t *testing.T) { }, { name: "non-increasing keys", - start: nil, - end: nil, + start: Nothing[[]byte](), + end: Nothing[[]byte](), kvs: []KeyValue{ {Key: []byte{0}}, {Key: []byte{0}}, @@ -1078,8 +1078,8 @@ func TestVerifyKeyValues(t *testing.T) { }, { name: "key before start", - start: []byte{1, 2}, - end: nil, + start: Some([]byte{1, 2}), + end: Nothing[[]byte](), kvs: []KeyValue{ {Key: []byte{1}}, {Key: []byte{1, 2}}, @@ -1088,8 +1088,8 @@ func TestVerifyKeyValues(t *testing.T) { }, { name: "key after end", - start: nil, - end: []byte{1, 2}, + start: Nothing[[]byte](), + end: Some([]byte{1, 2}), kvs: []KeyValue{ {Key: []byte{1}}, {Key: []byte{1, 2}}, @@ -1099,8 +1099,8 @@ func TestVerifyKeyValues(t *testing.T) { }, { name: "happy path", - start: nil, - end: []byte{1, 2, 3}, + start: Nothing[[]byte](), + end: Some([]byte{1, 2, 3}), kvs: []KeyValue{ {Key: []byte{1}}, {Key: []byte{1, 2}}, @@ -1680,14 +1680,14 @@ func FuzzRangeProofInvariants(f *testing.F) { f.Fuzz(func( t *testing.T, - start []byte, - end []byte, + startBytes []byte, + endBytes []byte, maxProofLen uint, ) { require := require.New(t) // Make sure proof bounds are valid - if len(end) != 0 && bytes.Compare(start, end) > 0 { + if len(endBytes) != 0 && bytes.Compare(startBytes, endBytes) > 0 { return } // Make sure proof length is valid @@ -1695,6 +1695,16 @@ func FuzzRangeProofInvariants(f *testing.F) { return } + start := Nothing[[]byte]() + if len(startBytes) > 0 { + start = Some(startBytes) + } + + end := Nothing[[]byte]() + if len(endBytes) > 0 { + end = Some(endBytes) + } + rangeProof, err := db.GetRangeProof( context.Background(), start, @@ -1718,7 +1728,7 @@ func FuzzRangeProofInvariants(f *testing.F) { // Make sure the EndProof invariant is maintained switch { - case len(end) == 0: + case len(endBytes) == 0: if len(rangeProof.KeyValues) == 0 { if len(rangeProof.StartProof) == 0 { require.Len(rangeProof.EndProof, 1) // Just the root @@ -1732,7 +1742,7 @@ func FuzzRangeProofInvariants(f *testing.F) { // EndProof should be a proof for upper range bound. value := Nothing[[]byte]() - upperRangeBoundVal, err := db.Get(end) + upperRangeBoundVal, err := db.Get(endBytes) if err != nil { require.ErrorIs(err, database.ErrNotFound) } else { @@ -1741,7 +1751,7 @@ func FuzzRangeProofInvariants(f *testing.F) { proof := Proof{ Path: rangeProof.EndProof, - Key: end, + Key: endBytes, Value: value, } diff --git a/x/merkledb/trie.go b/x/merkledb/trie.go index 10a340211843..c2cae56daf2a 100644 --- a/x/merkledb/trie.go +++ b/x/merkledb/trie.go @@ -43,8 +43,11 @@ type ReadOnlyTrie interface { // get an editable copy of the node with the given key path getEditableNode(key path) (*node, error) - // GetRangeProof generates a proof of up to maxLength smallest key/values with keys between start and end - GetRangeProof(ctx context.Context, start, end []byte, maxLength int) (*RangeProof, error) + // GetRangeProof returns a proof of up to [maxLength] key-value pairs with + // keys in range [start, end]. + // If [start] is Nothing, there's no lower bound on the range. + // If [end] is Nothing, there's no upper bound on the range. + GetRangeProof(ctx context.Context, start Maybe[[]byte], end Maybe[[]byte], maxLength int) (*RangeProof, error) database.Iteratee } diff --git a/x/merkledb/trie_test.go b/x/merkledb/trie_test.go index 23b853d59baf..8dea9ac5a4bf 100644 --- a/x/merkledb/trie_test.go +++ b/x/merkledb/trie_test.go @@ -1391,7 +1391,7 @@ func Test_Trie_ConcurrentInsertAndRangeProof(t *testing.T) { require.Eventually( func() bool { - rangeProof, err := newTrie.GetRangeProof(context.Background(), []byte("key1"), []byte("key3"), 3) + rangeProof, err := newTrie.GetRangeProof(context.Background(), Some([]byte("key1")), Some([]byte("key3")), 3) require.NoError(err) require.NotNil(rangeProof) diff --git a/x/merkledb/trieview.go b/x/merkledb/trieview.go index 3fc694b9cb1b..c1a97f5f6ebc 100644 --- a/x/merkledb/trieview.go +++ b/x/merkledb/trieview.go @@ -391,13 +391,14 @@ func (t *trieView) getProof(ctx context.Context, key []byte) (*Proof, error) { // [maxLength] must be > 0. func (t *trieView) GetRangeProof( ctx context.Context, - start, end []byte, + start Maybe[[]byte], + end Maybe[[]byte], maxLength int, ) (*RangeProof, error) { ctx, span := t.db.tracer.Start(ctx, "MerkleDB.trieview.GetRangeProof") defer span.End() - if len(end) > 0 && bytes.Compare(start, end) == 1 { + if start.hasValue && end.hasValue && bytes.Compare(start.value, end.value) > 0 { return nil, ErrStartAfterEnd } @@ -420,11 +421,14 @@ func (t *trieView) GetRangeProof( t.lock.RLock() } - var result RangeProof - - result.KeyValues = make([]KeyValue, 0, initKeyValuesSize) - it := t.NewIteratorWithStart(start) - for it.Next() && len(result.KeyValues) < maxLength && (len(end) == 0 || bytes.Compare(it.Key(), end) <= 0) { + var ( + result = RangeProof{ + KeyValues: make([]KeyValue, 0, initKeyValuesSize), + } + it = t.NewIteratorWithStart(start.value) + endIsNothing = !end.hasValue + ) + for it.Next() && len(result.KeyValues) < maxLength && (endIsNothing || bytes.Compare(it.Key(), end.value) <= 0) { // clone the value to prevent editing of the values stored within the trie result.KeyValues = append(result.KeyValues, KeyValue{ Key: it.Key(), @@ -449,8 +453,8 @@ func (t *trieView) GetRangeProof( if err != nil { return nil, err } - } else if len(end) > 0 { - endProof, err = t.getProof(ctx, end) + } else if end.hasValue { + endProof, err = t.getProof(ctx, end.value) if err != nil { return nil, err } @@ -459,8 +463,8 @@ func (t *trieView) GetRangeProof( result.EndProof = endProof.Path } - if len(start) > 0 { - startProof, err := t.getProof(ctx, start) + if start.hasValue { + startProof, err := t.getProof(ctx, start.value) if err != nil { return nil, err } diff --git a/x/sync/client.go b/x/sync/client.go index 6484dcbe919d..16bddee9249b 100644 --- a/x/sync/client.go +++ b/x/sync/client.go @@ -111,7 +111,16 @@ func (c *client) GetChangeProof(ctx context.Context, req *pb.SyncGetChangeProofR return nil, err } - if err := db.VerifyChangeProof(ctx, &changeProof, req.StartKey, req.EndKey, endRoot); err != nil { + startKey := merkledb.Nothing[[]byte]() + if req.StartKey != nil && !req.StartKey.IsNothing { + startKey = merkledb.Some(req.StartKey.Value) + } + endKey := merkledb.Nothing[[]byte]() + if req.EndKey != nil && !req.EndKey.IsNothing { + endKey = merkledb.Some(req.EndKey.Value) + } + + if err := db.VerifyChangeProof(ctx, &changeProof, startKey, endKey, endRoot); err != nil { return nil, fmt.Errorf("%s due to %w", errInvalidRangeProof, err) } return &changeProof, nil @@ -157,10 +166,19 @@ func (c *client) GetRangeProof(ctx context.Context, req *pb.SyncGetRangeProofReq return nil, err } + startKey := merkledb.Nothing[[]byte]() + if !req.StartKey.IsNothing { + startKey = merkledb.Some(req.StartKey.Value) + } + endKey := merkledb.Nothing[[]byte]() + if !req.EndKey.IsNothing { + endKey = merkledb.Some(req.EndKey.Value) + } + if err := rangeProof.Verify( ctx, - req.StartKey, - req.EndKey, + startKey, + endKey, root, ); err != nil { return nil, fmt.Errorf("%s due to %w", errInvalidRangeProof, err) diff --git a/x/sync/client_test.go b/x/sync/client_test.go index 0b118c704e45..eed1f0670a73 100644 --- a/x/sync/client_test.go +++ b/x/sync/client_test.go @@ -153,7 +153,13 @@ func TestGetRangeProof(t *testing.T) { "proof restricted by BytesLimit": { db: smallTrieDB, request: &pb.SyncGetRangeProofRequest{ - RootHash: smallTrieRoot[:], + RootHash: smallTrieRoot[:], + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, KeyLimit: defaultRequestKeyLimit, BytesLimit: 10000, }, @@ -161,7 +167,13 @@ func TestGetRangeProof(t *testing.T) { "full response for small (single request) trie": { db: smallTrieDB, request: &pb.SyncGetRangeProofRequest{ - RootHash: smallTrieRoot[:], + RootHash: smallTrieRoot[:], + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, KeyLimit: defaultRequestKeyLimit, BytesLimit: defaultRequestByteSizeLimit, }, @@ -170,7 +182,13 @@ func TestGetRangeProof(t *testing.T) { "too many leaves in response": { db: smallTrieDB, request: &pb.SyncGetRangeProofRequest{ - RootHash: smallTrieRoot[:], + RootHash: smallTrieRoot[:], + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, KeyLimit: defaultRequestKeyLimit, BytesLimit: defaultRequestByteSizeLimit, }, @@ -182,7 +200,13 @@ func TestGetRangeProof(t *testing.T) { "partial response to request for entire trie (full leaf limit)": { db: largeTrieDB, request: &pb.SyncGetRangeProofRequest{ - RootHash: largeTrieRoot[:], + RootHash: largeTrieRoot[:], + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, KeyLimit: defaultRequestKeyLimit, BytesLimit: defaultRequestByteSizeLimit, }, @@ -191,8 +215,13 @@ func TestGetRangeProof(t *testing.T) { "full response from near end of trie to end of trie (less than leaf limit)": { db: largeTrieDB, request: &pb.SyncGetRangeProofRequest{ - RootHash: largeTrieRoot[:], - StartKey: largeTrieKeys[len(largeTrieKeys)-30], // Set start 30 keys from the end of the large trie + RootHash: largeTrieRoot[:], + StartKey: &pb.MaybeBytes{ + Value: largeTrieKeys[len(largeTrieKeys)-30], // Set start 30 keys from the end of the large trie + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, KeyLimit: defaultRequestKeyLimit, BytesLimit: defaultRequestByteSizeLimit, }, @@ -201,9 +230,13 @@ func TestGetRangeProof(t *testing.T) { "full response for intermediate range of trie (less than leaf limit)": { db: largeTrieDB, request: &pb.SyncGetRangeProofRequest{ - RootHash: largeTrieRoot[:], - StartKey: largeTrieKeys[1000], // Set the range for 1000 leafs in an intermediate range of the trie - EndKey: largeTrieKeys[1099], // (inclusive range) + RootHash: largeTrieRoot[:], + StartKey: &pb.MaybeBytes{ + Value: largeTrieKeys[1000], // Set the range for 1000 leafs in an intermediate range of the trie + }, + EndKey: &pb.MaybeBytes{ + Value: largeTrieKeys[1099], // (inclusive range) + }, KeyLimit: defaultRequestKeyLimit, BytesLimit: defaultRequestByteSizeLimit, }, @@ -212,7 +245,13 @@ func TestGetRangeProof(t *testing.T) { "removed first key in response": { db: largeTrieDB, request: &pb.SyncGetRangeProofRequest{ - RootHash: largeTrieRoot[:], + RootHash: largeTrieRoot[:], + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, KeyLimit: defaultRequestKeyLimit, BytesLimit: defaultRequestByteSizeLimit, }, @@ -224,7 +263,13 @@ func TestGetRangeProof(t *testing.T) { "removed first key in response and replaced proof": { db: largeTrieDB, request: &pb.SyncGetRangeProofRequest{ - RootHash: largeTrieRoot[:], + RootHash: largeTrieRoot[:], + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, KeyLimit: defaultRequestKeyLimit, BytesLimit: defaultRequestByteSizeLimit, }, @@ -232,7 +277,7 @@ func TestGetRangeProof(t *testing.T) { start := response.KeyValues[1].Key rootID, err := largeTrieDB.GetMerkleRoot(context.Background()) require.NoError(t, err) - proof, err := largeTrieDB.GetRangeProofAtRoot(context.Background(), rootID, start, nil, defaultRequestKeyLimit) + proof, err := largeTrieDB.GetRangeProofAtRoot(context.Background(), rootID, merkledb.Some(start), merkledb.Nothing[[]byte](), defaultRequestKeyLimit) require.NoError(t, err) response.KeyValues = proof.KeyValues response.StartProof = proof.StartProof @@ -243,7 +288,13 @@ func TestGetRangeProof(t *testing.T) { "removed last key in response": { db: largeTrieDB, request: &pb.SyncGetRangeProofRequest{ - RootHash: largeTrieRoot[:], + RootHash: largeTrieRoot[:], + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, KeyLimit: defaultRequestKeyLimit, BytesLimit: defaultRequestByteSizeLimit, }, @@ -255,7 +306,13 @@ func TestGetRangeProof(t *testing.T) { "removed key from middle of response": { db: largeTrieDB, request: &pb.SyncGetRangeProofRequest{ - RootHash: largeTrieRoot[:], + RootHash: largeTrieRoot[:], + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, KeyLimit: defaultRequestKeyLimit, BytesLimit: defaultRequestByteSizeLimit, }, @@ -267,7 +324,13 @@ func TestGetRangeProof(t *testing.T) { "all proof keys removed from response": { db: largeTrieDB, request: &pb.SyncGetRangeProofRequest{ - RootHash: largeTrieRoot[:], + RootHash: largeTrieRoot[:], + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, KeyLimit: defaultRequestKeyLimit, BytesLimit: defaultRequestByteSizeLimit, }, @@ -460,16 +523,28 @@ func TestGetChangeProof(t *testing.T) { request: &pb.SyncGetChangeProofRequest{ StartRootHash: startRoot[:], EndRootHash: endRoot[:], - KeyLimit: defaultRequestKeyLimit, - BytesLimit: 10000, + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, + KeyLimit: defaultRequestKeyLimit, + BytesLimit: 10000, }, }, "full response for small (single request) trie": { request: &pb.SyncGetChangeProofRequest{ StartRootHash: startRoot[:], EndRootHash: endRoot[:], - KeyLimit: defaultRequestKeyLimit, - BytesLimit: defaultRequestByteSizeLimit, + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, + KeyLimit: defaultRequestKeyLimit, + BytesLimit: defaultRequestByteSizeLimit, }, expectedResponseLen: defaultRequestKeyLimit, }, @@ -477,8 +552,14 @@ func TestGetChangeProof(t *testing.T) { request: &pb.SyncGetChangeProofRequest{ StartRootHash: startRoot[:], EndRootHash: endRoot[:], - KeyLimit: defaultRequestKeyLimit, - BytesLimit: defaultRequestByteSizeLimit, + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, + KeyLimit: defaultRequestKeyLimit, + BytesLimit: defaultRequestByteSizeLimit, }, modifyResponse: func(response *merkledb.ChangeProof) { response.KeyChanges = append(response.KeyChanges, make([]merkledb.KeyChange, defaultRequestKeyLimit)...) @@ -489,8 +570,14 @@ func TestGetChangeProof(t *testing.T) { request: &pb.SyncGetChangeProofRequest{ StartRootHash: startRoot[:], EndRootHash: endRoot[:], - KeyLimit: defaultRequestKeyLimit, - BytesLimit: defaultRequestByteSizeLimit, + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, + KeyLimit: defaultRequestKeyLimit, + BytesLimit: defaultRequestByteSizeLimit, }, expectedResponseLen: defaultRequestKeyLimit, }, @@ -498,8 +585,14 @@ func TestGetChangeProof(t *testing.T) { request: &pb.SyncGetChangeProofRequest{ StartRootHash: startRoot[:], EndRootHash: endRoot[:], - KeyLimit: defaultRequestKeyLimit, - BytesLimit: defaultRequestByteSizeLimit, + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, + KeyLimit: defaultRequestKeyLimit, + BytesLimit: defaultRequestByteSizeLimit, }, modifyResponse: func(response *merkledb.ChangeProof) { response.KeyChanges = response.KeyChanges[1:] @@ -510,8 +603,14 @@ func TestGetChangeProof(t *testing.T) { request: &pb.SyncGetChangeProofRequest{ StartRootHash: startRoot[:], EndRootHash: endRoot[:], - KeyLimit: defaultRequestKeyLimit, - BytesLimit: defaultRequestByteSizeLimit, + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, + KeyLimit: defaultRequestKeyLimit, + BytesLimit: defaultRequestByteSizeLimit, }, modifyResponse: func(response *merkledb.ChangeProof) { response.KeyChanges = response.KeyChanges[:len(response.KeyChanges)-2] @@ -522,8 +621,14 @@ func TestGetChangeProof(t *testing.T) { request: &pb.SyncGetChangeProofRequest{ StartRootHash: startRoot[:], EndRootHash: endRoot[:], - KeyLimit: defaultRequestKeyLimit, - BytesLimit: defaultRequestByteSizeLimit, + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, + KeyLimit: defaultRequestKeyLimit, + BytesLimit: defaultRequestByteSizeLimit, }, modifyResponse: func(response *merkledb.ChangeProof) { response.KeyChanges = append(response.KeyChanges[:100], response.KeyChanges[101:]...) @@ -534,8 +639,14 @@ func TestGetChangeProof(t *testing.T) { request: &pb.SyncGetChangeProofRequest{ StartRootHash: startRoot[:], EndRootHash: endRoot[:], - KeyLimit: defaultRequestKeyLimit, - BytesLimit: defaultRequestByteSizeLimit, + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, + KeyLimit: defaultRequestKeyLimit, + BytesLimit: defaultRequestByteSizeLimit, }, modifyResponse: func(response *merkledb.ChangeProof) { response.StartProof = nil @@ -585,7 +696,13 @@ func TestRangeProofRetries(t *testing.T) { maxRequests := 4 request := &pb.SyncGetRangeProofRequest{ - RootHash: root[:], + RootHash: root[:], + StartKey: &pb.MaybeBytes{ + IsNothing: true, + }, + EndKey: &pb.MaybeBytes{ + IsNothing: true, + }, KeyLimit: uint32(keyCount), BytesLimit: defaultRequestByteSizeLimit, } diff --git a/x/sync/g_db/db_client.go b/x/sync/g_db/db_client.go index 358db39bd0b4..9d9d2ef81ae4 100644 --- a/x/sync/g_db/db_client.go +++ b/x/sync/g_db/db_client.go @@ -38,16 +38,22 @@ func (c *DBClient) GetChangeProof( ctx context.Context, startRootID ids.ID, endRootID ids.ID, - startKey []byte, - endKey []byte, + startKey merkledb.Maybe[[]byte], + endKey merkledb.Maybe[[]byte], keyLimit int, ) (*merkledb.ChangeProof, error) { resp, err := c.client.GetChangeProof(ctx, &pb.GetChangeProofRequest{ StartRootHash: startRootID[:], EndRootHash: endRootID[:], - StartKey: startKey, - EndKey: endKey, - KeyLimit: uint32(keyLimit), + StartKey: &pb.MaybeBytes{ + Value: startKey.Value(), + IsNothing: startKey.IsNothing(), + }, + EndKey: &pb.MaybeBytes{ + Value: endKey.Value(), + IsNothing: endKey.IsNothing(), + }, + KeyLimit: uint32(keyLimit), }) if err != nil { return nil, err @@ -62,14 +68,20 @@ func (c *DBClient) GetChangeProof( func (c *DBClient) VerifyChangeProof( ctx context.Context, proof *merkledb.ChangeProof, - startKey []byte, - endKey []byte, + startKey merkledb.Maybe[[]byte], + endKey merkledb.Maybe[[]byte], expectedRootID ids.ID, ) error { resp, err := c.client.VerifyChangeProof(ctx, &pb.VerifyChangeProofRequest{ - Proof: proof.ToProto(), - StartKey: startKey, - EndKey: endKey, + Proof: proof.ToProto(), + StartKey: &pb.MaybeBytes{ + Value: startKey.Value(), + IsNothing: startKey.IsNothing(), + }, + EndKey: &pb.MaybeBytes{ + Value: endKey.Value(), + IsNothing: endKey.IsNothing(), + }, ExpectedRootHash: expectedRootID[:], }) if err != nil { @@ -108,14 +120,20 @@ func (c *DBClient) GetProof(ctx context.Context, key []byte) (*merkledb.Proof, e func (c *DBClient) GetRangeProofAtRoot( ctx context.Context, rootID ids.ID, - startKey []byte, - endKey []byte, + startKey merkledb.Maybe[[]byte], + endKey merkledb.Maybe[[]byte], keyLimit int, ) (*merkledb.RangeProof, error) { resp, err := c.client.GetRangeProof(ctx, &pb.GetRangeProofRequest{ RootHash: rootID[:], - StartKey: startKey, - EndKey: endKey, + StartKey: &pb.MaybeBytes{ + Value: startKey.Value(), + IsNothing: startKey.IsNothing(), + }, + EndKey: &pb.MaybeBytes{ + Value: endKey.Value(), + IsNothing: endKey.IsNothing(), + }, KeyLimit: uint32(keyLimit), }) if err != nil { @@ -131,11 +149,14 @@ func (c *DBClient) GetRangeProofAtRoot( func (c *DBClient) CommitRangeProof( ctx context.Context, - startKey []byte, + startKey merkledb.Maybe[[]byte], proof *merkledb.RangeProof, ) error { _, err := c.client.CommitRangeProof(ctx, &pb.CommitRangeProofRequest{ - StartKey: startKey, + StartKey: &pb.MaybeBytes{ + Value: startKey.Value(), + IsNothing: startKey.IsNothing(), + }, RangeProof: proof.ToProto(), }) return err diff --git a/x/sync/g_db/db_server.go b/x/sync/g_db/db_server.go index c2a300c668f5..7313011cb463 100644 --- a/x/sync/g_db/db_server.go +++ b/x/sync/g_db/db_server.go @@ -52,12 +52,21 @@ func (s *DBServer) GetChangeProof( if err != nil { return nil, err } + + startKey := merkledb.Nothing[[]byte]() + if req.StartKey != nil && !req.StartKey.IsNothing { + startKey = merkledb.Some(req.StartKey.Value) + } + endKey := merkledb.Nothing[[]byte]() + if req.EndKey != nil && !req.EndKey.IsNothing { + endKey = merkledb.Some(req.EndKey.Value) + } changeProof, err := s.db.GetChangeProof( ctx, startRootID, endRootID, - req.StartKey, - req.EndKey, + startKey, + endKey, int(req.KeyLimit), ) if err != nil { @@ -82,7 +91,16 @@ func (s *DBServer) VerifyChangeProof( // TODO there's probably a better way to do this. var errString string - if err := s.db.VerifyChangeProof(ctx, &proof, req.StartKey, req.EndKey, rootID); err != nil { + + startKey := merkledb.Nothing[[]byte]() + if req.StartKey != nil && !req.StartKey.IsNothing { + startKey = merkledb.Some(req.StartKey.Value) + } + endKey := merkledb.Nothing[[]byte]() + if req.EndKey != nil && !req.EndKey.IsNothing { + endKey = merkledb.Some(req.EndKey.Value) + } + if err := s.db.VerifyChangeProof(ctx, &proof, startKey, endKey, rootID); err != nil { errString = err.Error() } return &pb.VerifyChangeProofResponse{ @@ -126,7 +144,16 @@ func (s *DBServer) GetRangeProof( return nil, err } - proof, err := s.db.GetRangeProofAtRoot(ctx, rootID, req.StartKey, req.EndKey, int(req.KeyLimit)) + startKey := merkledb.Nothing[[]byte]() + if req.StartKey != nil && !req.StartKey.IsNothing { + startKey = merkledb.Some(req.StartKey.Value) + } + endKey := merkledb.Nothing[[]byte]() + if req.EndKey != nil && !req.EndKey.IsNothing { + endKey = merkledb.Some(req.EndKey.Value) + } + + proof, err := s.db.GetRangeProofAtRoot(ctx, rootID, startKey, endKey, int(req.KeyLimit)) if err != nil { return nil, err } @@ -163,6 +190,11 @@ func (s *DBServer) CommitRangeProof( return nil, err } - err := s.db.CommitRangeProof(ctx, req.StartKey, &proof) + startKey := merkledb.Nothing[[]byte]() + if req.StartKey != nil && !req.StartKey.IsNothing { + startKey = merkledb.Some(req.StartKey.Value) + } + + err := s.db.CommitRangeProof(ctx, startKey, &proof) return &emptypb.Empty{}, err } diff --git a/x/sync/manager.go b/x/sync/manager.go index bd0917d07d8b..5315d29f0de5 100644 --- a/x/sync/manager.go +++ b/x/sync/manager.go @@ -11,6 +11,7 @@ import ( "sync" "go.uber.org/zap" + "golang.org/x/exp/slices" "github.com/ava-labs/avalanchego/ids" "github.com/ava-labs/avalanchego/utils/logging" @@ -49,13 +50,13 @@ const ( // [localRootID] is the ID of the root of this range in our database. // If we have no local root for this range, [localRootID] is ids.Empty. type workItem struct { - start []byte - end []byte + start merkledb.Maybe[[]byte] + end merkledb.Maybe[[]byte] priority priority localRootID ids.ID } -func newWorkItem(localRootID ids.ID, start, end []byte, priority priority) *workItem { +func newWorkItem(localRootID ids.ID, start, end merkledb.Maybe[[]byte], priority priority) *workItem { return &workItem{ localRootID: localRootID, start: start, @@ -145,7 +146,7 @@ func (m *Manager) Start(ctx context.Context) error { // Add work item to fetch the entire key range. // Note that this will be the first work item to be processed. - m.unprocessedWork.Insert(newWorkItem(ids.Empty, nil, nil, lowPriority)) + m.unprocessedWork.Insert(newWorkItem(ids.Empty, merkledb.Nothing[[]byte](), merkledb.Nothing[[]byte](), lowPriority)) m.syncing = true ctx, m.cancelCtx = context.WithCancel(ctx) @@ -258,10 +259,16 @@ func (m *Manager) getAndApplyChangeProof(ctx context.Context, work *workItem) { &pb.SyncGetChangeProofRequest{ StartRootHash: work.localRootID[:], EndRootHash: targetRootID[:], - StartKey: work.start, - EndKey: work.end, - KeyLimit: defaultRequestKeyLimit, - BytesLimit: defaultRequestByteSizeLimit, + StartKey: &pb.MaybeBytes{ + Value: work.start.Value(), + IsNothing: work.start.IsNothing(), + }, + EndKey: &pb.MaybeBytes{ + Value: work.end.Value(), + IsNothing: work.end.IsNothing(), + }, + KeyLimit: defaultRequestKeyLimit, + BytesLimit: defaultRequestByteSizeLimit, }, m.config.DB, ) @@ -293,7 +300,7 @@ func (m *Manager) getAndApplyChangeProof(ctx context.Context, work *workItem) { m.setError(err) return } - largestHandledKey = changeProof.KeyChanges[len(changeProof.KeyChanges)-1].Key + largestHandledKey = merkledb.Some(changeProof.KeyChanges[len(changeProof.KeyChanges)-1].Key) } m.completeWorkItem(ctx, work, largestHandledKey, targetRootID, changeProof.EndProof) @@ -305,9 +312,15 @@ func (m *Manager) getAndApplyRangeProof(ctx context.Context, work *workItem) { targetRootID := m.getTargetRoot() proof, err := m.config.Client.GetRangeProof(ctx, &pb.SyncGetRangeProofRequest{ - RootHash: targetRootID[:], - StartKey: work.start, - EndKey: work.end, + RootHash: targetRootID[:], + StartKey: &pb.MaybeBytes{ + Value: work.start.Value(), + IsNothing: work.start.IsNothing(), + }, + EndKey: &pb.MaybeBytes{ + Value: work.end.Value(), + IsNothing: work.end.IsNothing(), + }, KeyLimit: defaultRequestKeyLimit, BytesLimit: defaultRequestByteSizeLimit, }, @@ -332,36 +345,37 @@ func (m *Manager) getAndApplyRangeProof(ctx context.Context, work *workItem) { return } - largestHandledKey = proof.KeyValues[len(proof.KeyValues)-1].Key + largestHandledKey = merkledb.Some(proof.KeyValues[len(proof.KeyValues)-1].Key) } m.completeWorkItem(ctx, work, largestHandledKey, targetRootID, proof.EndProof) } -// findNextKey returns the start of the key range that should be fetched next. -// Returns nil if there are no more keys to fetch up to [rangeEnd]. -// -// If the last proof received contained at least one key-value pair, then -// [lastReceivedKey] is the greatest key in the key-value pairs received. -// Otherwise it's the end of the range for the last proof received. +// findNextKey returns the start of the key range that should be fetched next +// given that we just received a range/change proof that proved a range of +// key-value pairs ending at [lastReceivedKey]. // // [rangeEnd] is the end of the range that we want to fetch. // +// Returns Nothing if there are no more keys to fetch in [lastReceivedKey, rangeEnd]. +// // [endProof] is the end proof of the last proof received. -// Namely it's an inclusion/exclusion proof for [lastReceivedKey]. // // Invariant: [lastReceivedKey] < [rangeEnd]. +// If [rangeEnd] is Nothing it's considered > [lastReceivedKey]. func (m *Manager) findNextKey( ctx context.Context, lastReceivedKey []byte, - rangeEnd []byte, + rangeEnd merkledb.Maybe[[]byte], endProof []merkledb.ProofNode, -) ([]byte, error) { +) (merkledb.Maybe[[]byte], error) { if len(endProof) == 0 { // We try to find the next key to fetch by looking at the end proof. // If the end proof is empty, we have no information to use. // Start fetching from the next key after [lastReceivedKey]. - return append(lastReceivedKey, 0), nil + nextKey := lastReceivedKey + nextKey = append(nextKey, 0) + return merkledb.Some(nextKey), nil } // We want the first key larger than the [lastReceivedKey]. @@ -388,7 +402,7 @@ func (m *Manager) findNextKey( // get a proof for the same key as the received proof from the local db localProofOfKey, err := m.config.DB.GetProof(ctx, proofKeyPath.Value) if err != nil { - return nil, err + return merkledb.Nothing[[]byte](), err } localProofNodes := localProofOfKey.Path @@ -398,13 +412,13 @@ func (m *Manager) findNextKey( localProofNodes = localProofNodes[:len(localProofNodes)-1] } - var nextKey []byte + nextKey := merkledb.Nothing[[]byte]() localProofNodeIndex := len(localProofNodes) - 1 receivedProofNodeIndex := len(endProof) - 1 // traverse the two proofs from the deepest nodes up to the root until a difference is found - for localProofNodeIndex >= 0 && receivedProofNodeIndex >= 0 && nextKey == nil { + for localProofNodeIndex >= 0 && receivedProofNodeIndex >= 0 && nextKey.IsNothing() { localProofNode := localProofNodes[localProofNodeIndex] receivedProofNode := endProof[receivedProofNodeIndex] @@ -464,27 +478,25 @@ func (m *Manager) findNextKey( // determine if there are any differences in the children for the deepest unhandled node of the two proofs if childIndex, hasDifference := findChildDifference(deepestNode, deepestNodeFromOtherProof, startingChildNibble); hasDifference { - nextKey = deepestNode.KeyPath.AppendNibble(childIndex).Value + nextKey = merkledb.Some(deepestNode.KeyPath.AppendNibble(childIndex).Value) break } } - // If the nextKey is before or equal to the [lastReceivedKey] + // If the [nextKey] is before or equal to the [lastReceivedKey] // then we couldn't find a better answer than the [lastReceivedKey]. - // Set the nextKey to [lastReceivedKey] + 0, which is the first key in - // the open range (lastReceivedKey, rangeEnd). - if nextKey != nil && bytes.Compare(nextKey, lastReceivedKey) <= 0 { - nextKey = lastReceivedKey - nextKey = append(nextKey, 0) + // Set the nextKey to [lastReceivedKey] + 0, which is the first key in the open range (lastReceivedKey, rangeEnd) + if !nextKey.IsNothing() && bytes.Compare(nextKey.Value(), lastReceivedKey) <= 0 { + nextKeyVal := slices.Clone(lastReceivedKey) + nextKeyVal = append(nextKeyVal, 0) + nextKey = merkledb.Some(nextKeyVal) } - // If the nextKey is larger than the end of the range, - // return nil to signal that there is no next key in range. - if len(rangeEnd) > 0 && bytes.Compare(nextKey, rangeEnd) >= 0 { - return nil, nil + // If the [nextKey] is larger than the end of the range, return Nothing to signal that there is no next key in range + if !rangeEnd.IsNothing() && bytes.Compare(nextKey.Value(), rangeEnd.Value()) >= 0 { + return merkledb.Nothing[[]byte](), nil } - // the nextKey is within the open range (lastReceivedKey, rangeEnd), so return it return nextKey, nil } @@ -582,20 +594,42 @@ func (m *Manager) setError(err error) { go m.Close() } -// Mark the range [start, end] as synced up to [rootID]. +// Mark that we've fetched all the key-value pairs in the range +// [workItem.start, largestHandledKey] for the trie with root [rootID]. +// +// If [workItem.start] is Nothing, then we've fetched all the key-value +// pairs up to and including [largestHandledKey]. +// +// If [largestHandledKey] is Nothing, then we've fetched all the key-value +// pairs at and after [workItem.start]. +// +// [proofOfLargestKey] is the end proof for the range/change proof +// that gave us the range up to and including [largestHandledKey]. +// // Assumes [m.workLock] is not held. -func (m *Manager) completeWorkItem(ctx context.Context, work *workItem, largestHandledKey []byte, rootID ids.ID, proofOfLargestKey []merkledb.ProofNode) { - // if the last key is equal to the end, then the full range is completed - if !bytes.Equal(largestHandledKey, work.end) { - // find the next key to start querying by comparing the proofs for the last completed key - nextStartKey, err := m.findNextKey(ctx, largestHandledKey, work.end, proofOfLargestKey) +func (m *Manager) completeWorkItem( + ctx context.Context, + work *workItem, + largestHandledKey merkledb.Maybe[[]byte], + rootID ids.ID, + proofOfLargestKey []merkledb.ProofNode, +) { + if !merkledb.MaybeBytesEquals(largestHandledKey, work.end) { + // The largest handled key isn't equal to the end of the work item. + // Find the start of the next key range to fetch. + // Note that [largestHandledKey] can't be Nothing. + // Proof: Suppose it is. That means that we got a range/change proof that proved up to the + // greatest key-value pair in the database. That means we requested a proof with no upper + // bound. That is, [workItem.end] is Nothing. Since we're here, [bothNothing] is false, + // which means [workItem.end] isn't Nothing. Contradiction. + nextStartKey, err := m.findNextKey(ctx, largestHandledKey.Value(), work.end, proofOfLargestKey) if err != nil { m.setError(err) return } // nextStartKey being nil indicates that the entire range has been completed - if nextStartKey == nil { + if nextStartKey.IsNothing() { largestHandledKey = work.end } else { // the full range wasn't completed, so enqueue a new work item for the range [nextStartKey, workItem.end] @@ -622,8 +656,8 @@ func (m *Manager) completeWorkItem(ctx context.Context, work *workItem, largestH // completed the range [work.start, lastKey], log and record in the completed work heap m.config.Log.Info("completed range", - zap.Binary("start", work.start), - zap.Binary("end", largestHandledKey), + zap.Stringer("start", work.start), + zap.Stringer("end", largestHandledKey), zap.Stringer("rootID", rootID), zap.Bool("stale", stale), ) @@ -650,7 +684,7 @@ func (m *Manager) enqueueWork(work *workItem) { // Find the middle point. mid := midPoint(work.start, work.end) - if bytes.Equal(work.start, mid) || bytes.Equal(mid, work.end) { + if merkledb.MaybeBytesEquals(work.start, mid) || merkledb.MaybeBytesEquals(mid, work.end) { // The range is too small to split. // If we didn't have this check we would add work items // [start, start] and [start, end]. Since start <= end, this would @@ -670,15 +704,18 @@ func (m *Manager) enqueueWork(work *workItem) { } // find the midpoint between two keys -// nil on start is treated as all 0's -// nil on end is treated as all 255's -func midPoint(start, end []byte) []byte { +// Nothing/nil [start] is treated as all 0's +// Nothing/nil [end] is treated as all 255's +func midPoint(startMaybe, endMaybe merkledb.Maybe[[]byte]) merkledb.Maybe[[]byte] { + start := startMaybe.Value() + end := endMaybe.Value() + length := len(start) if len(end) > length { length = len(end) } if length == 0 { - return []byte{127} + return merkledb.Some([]byte{127}) } // This check deals with cases where the end has a 255(or is nil which is treated as all 255s) and the start key ends 255. @@ -732,7 +769,7 @@ func midPoint(start, end []byte) []byte { } else { midpoint = midpoint[0:length] } - return midpoint + return merkledb.Some(midpoint) } // findChildDifference returns the first child index that is different between node 1 and node 2 if one exists and diff --git a/x/sync/network_server.go b/x/sync/network_server.go index 01fc924106b3..4439264e1f1b 100644 --- a/x/sync/network_server.go +++ b/x/sync/network_server.go @@ -19,6 +19,7 @@ import ( "github.com/ava-labs/avalanchego/ids" "github.com/ava-labs/avalanchego/snow/engine/common" "github.com/ava-labs/avalanchego/utils/constants" + "github.com/ava-labs/avalanchego/utils/hashing" "github.com/ava-labs/avalanchego/utils/logging" "github.com/ava-labs/avalanchego/utils/units" "github.com/ava-labs/avalanchego/x/merkledb" @@ -39,7 +40,18 @@ const ( endProofSizeBufferAmount = 2 * units.KiB ) -var ErrMinProofSizeIsTooLarge = errors.New("cannot generate any proof within the requested limit") +var ( + ErrMinProofSizeIsTooLarge = errors.New("cannot generate any proof within the requested limit") + + errInvalidBytesLimit = errors.New("bytes limit must be greater than 0") + errInvalidKeyLimit = errors.New("key limit must be greater than 0") + errInvalidStartRootHash = fmt.Errorf("start root hash must have length %d", hashing.HashLen) + errInvalidEndRootHash = fmt.Errorf("end root hash must have length %d", hashing.HashLen) + errInvalidStartKey = errors.New("start key is Nothing but has value") + errInvalidEndKey = errors.New("end key is Nothing but has value") + errInvalidBounds = errors.New("start key is greater than end key") + errInvalidRootHash = fmt.Errorf("root hash must have length %d", hashing.HashLen) +) type NetworkServer struct { appSender common.AppSender // Used to respond to peer requests via AppResponse. @@ -138,16 +150,13 @@ func (s *NetworkServer) HandleChangeProofRequest( requestID uint32, req *pb.SyncGetChangeProofRequest, ) error { - if req.BytesLimit == 0 || - req.KeyLimit == 0 || - len(req.StartRootHash) != ids.IDLen || - len(req.EndRootHash) != ids.IDLen || - (len(req.EndKey) > 0 && bytes.Compare(req.StartKey, req.EndKey) > 0) { + if err := validateChangeProofRequest(req); err != nil { s.log.Debug( "dropping invalid change proof request", zap.Stringer("nodeID", nodeID), zap.Uint32("requestID", requestID), zap.Stringer("req", req), + zap.Error(err), ) return nil // dropping request } @@ -172,7 +181,16 @@ func (s *NetworkServer) HandleChangeProofRequest( if err != nil { return err } - changeProof, err := s.db.GetChangeProof(ctx, startRoot, endRoot, req.StartKey, req.EndKey, int(keyLimit)) + startKey := merkledb.Nothing[[]byte]() + if req.StartKey != nil && !req.StartKey.IsNothing { + startKey = merkledb.Some(req.StartKey.Value) + } + endKey := merkledb.Nothing[[]byte]() + if req.EndKey != nil && !req.EndKey.IsNothing { + endKey = merkledb.Some(req.EndKey.Value) + } + + changeProof, err := s.db.GetChangeProof(ctx, startRoot, endRoot, startKey, endKey, int(keyLimit)) if err != nil { // handle expected errors so clients cannot cause servers to spam warning logs. if errors.Is(err, merkledb.ErrRootIDNotPresent) || errors.Is(err, merkledb.ErrStartRootNotFound) { @@ -219,15 +237,13 @@ func (s *NetworkServer) HandleRangeProofRequest( requestID uint32, req *pb.SyncGetRangeProofRequest, ) error { - if req.BytesLimit == 0 || - req.KeyLimit == 0 || - len(req.RootHash) != ids.IDLen || - (len(req.EndKey) > 0 && bytes.Compare(req.StartKey, req.EndKey) > 0) { + if err := validateRangeProofRequest(req); err != nil { s.log.Debug( "dropping invalid range proof request", zap.Stringer("nodeID", nodeID), zap.Uint32("requestID", requestID), zap.Stringer("req", req), + zap.Error(err), ) return nil // dropping request } @@ -246,7 +262,15 @@ func (s *NetworkServer) HandleRangeProofRequest( if err != nil { return err } - rangeProof, err := s.db.GetRangeProofAtRoot(ctx, root, req.StartKey, req.EndKey, int(keyLimit)) + startKey := merkledb.Nothing[[]byte]() + if req.StartKey != nil && !req.StartKey.IsNothing { + startKey = merkledb.Some(req.StartKey.Value) + } + endKey := merkledb.Nothing[[]byte]() + if req.EndKey != nil && !req.EndKey.IsNothing { + endKey = merkledb.Some(req.EndKey.Value) + } + rangeProof, err := s.db.GetRangeProofAtRoot(ctx, root, startKey, endKey, int(keyLimit)) if err != nil { // handle expected errors so clients cannot cause servers to spam warning logs. if errors.Is(err, merkledb.ErrRootIDNotPresent) { @@ -288,3 +312,47 @@ func isTimeout(err error) bool { // otherwise, check for context.DeadlineExceeded directly return errors.Is(err, context.DeadlineExceeded) } + +// Returns nil iff [req] is well-formed. +func validateChangeProofRequest(req *pb.SyncGetChangeProofRequest) error { + switch { + case req.BytesLimit == 0: + return errInvalidBytesLimit + case req.KeyLimit == 0: + return errInvalidKeyLimit + case len(req.StartRootHash) != hashing.HashLen: + return errInvalidStartRootHash + case len(req.EndRootHash) != hashing.HashLen: + return errInvalidEndRootHash + case req.StartKey != nil && req.StartKey.IsNothing && len(req.StartKey.Value) > 0: + return errInvalidStartKey + case req.EndKey != nil && req.EndKey.IsNothing && len(req.EndKey.Value) > 0: + return errInvalidEndKey + case req.StartKey != nil && req.EndKey != nil && !req.StartKey.IsNothing && + !req.EndKey.IsNothing && bytes.Compare(req.StartKey.Value, req.EndKey.Value) > 0: + return errInvalidBounds + default: + return nil + } +} + +// Returns nil iff [req] is well-formed. +func validateRangeProofRequest(req *pb.SyncGetRangeProofRequest) error { + switch { + case req.BytesLimit == 0: + return errInvalidBytesLimit + case req.KeyLimit == 0: + return errInvalidKeyLimit + case len(req.RootHash) != ids.IDLen: + return errInvalidRootHash + case req.StartKey != nil && req.StartKey.IsNothing && len(req.StartKey.Value) > 0: + return errInvalidStartKey + case req.EndKey != nil && req.EndKey.IsNothing && len(req.EndKey.Value) > 0: + return errInvalidEndKey + case req.StartKey != nil && req.EndKey != nil && !req.StartKey.IsNothing && + !req.EndKey.IsNothing && bytes.Compare(req.StartKey.Value, req.EndKey.Value) > 0: + return errInvalidBounds + default: + return nil + } +} diff --git a/x/sync/network_server_test.go b/x/sync/network_server_test.go index 9e83d8fde051..8b2a3b1fca35 100644 --- a/x/sync/network_server_test.go +++ b/x/sync/network_server_test.go @@ -71,8 +71,12 @@ func Test_Server_GetRangeProof(t *testing.T) { RootHash: smallTrieRoot[:], KeyLimit: defaultRequestKeyLimit, BytesLimit: defaultRequestByteSizeLimit, - StartKey: []byte{1}, - EndKey: []byte{0}, + StartKey: &pb.MaybeBytes{ + Value: []byte{1}, + }, + EndKey: &pb.MaybeBytes{ + Value: []byte{0}, + }, }, proofNil: true, }, @@ -219,8 +223,12 @@ func Test_Server_GetChangeProof(t *testing.T) { EndRootHash: endRoot[:], KeyLimit: defaultRequestKeyLimit, BytesLimit: defaultRequestByteSizeLimit, - StartKey: []byte{1}, - EndKey: []byte{0}, + StartKey: &pb.MaybeBytes{ + Value: []byte{1}, + }, + EndKey: &pb.MaybeBytes{ + Value: []byte{0}, + }, }, proofNil: true, }, diff --git a/x/sync/sync_test.go b/x/sync/sync_test.go index 10ad6ff20c0b..e8c7ed26646b 100644 --- a/x/sync/sync_test.go +++ b/x/sync/sync_test.go @@ -46,7 +46,23 @@ func (client *mockClient) GetChangeProof(ctx context.Context, request *pb.SyncGe if err != nil { return nil, err } - return client.db.GetChangeProof(ctx, startRoot, endRoot, request.StartKey, request.EndKey, int(request.KeyLimit)) + startKey := merkledb.Nothing[[]byte]() + if !request.StartKey.IsNothing { + startKey = merkledb.Some(request.StartKey.Value) + } + endKey := merkledb.Nothing[[]byte]() + if !request.EndKey.IsNothing { + endKey = merkledb.Some(request.EndKey.Value) + } + + return client.db.GetChangeProof( + ctx, + startRoot, + endRoot, + startKey, + endKey, + int(request.KeyLimit), + ) } func (client *mockClient) GetRangeProof(ctx context.Context, request *pb.SyncGetRangeProofRequest) (*merkledb.RangeProof, error) { @@ -54,7 +70,15 @@ func (client *mockClient) GetRangeProof(ctx context.Context, request *pb.SyncGet if err != nil { return nil, err } - return client.db.GetRangeProofAtRoot(ctx, root, request.StartKey, request.EndKey, int(request.KeyLimit)) + startKey := merkledb.Nothing[[]byte]() + if !request.StartKey.IsNothing { + startKey = merkledb.Some(request.StartKey.Value) + } + endKey := merkledb.Nothing[[]byte]() + if !request.EndKey.IsNothing { + endKey = merkledb.Some(request.EndKey.Value) + } + return client.db.GetRangeProofAtRoot(ctx, root, startKey, endKey, int(request.KeyLimit)) } func Test_Creation(t *testing.T) { @@ -119,41 +143,41 @@ func Test_Completion(t *testing.T) { func Test_Midpoint(t *testing.T) { require := require.New(t) - mid := midPoint([]byte{1, 255}, []byte{2, 1}) - require.Equal([]byte{2, 0}, mid) + mid := midPoint(merkledb.Some([]byte{1, 255}), merkledb.Some([]byte{2, 1})) + require.Equal(merkledb.Some([]byte{2, 0}), mid) - mid = midPoint(nil, []byte{255, 255, 0}) - require.Equal([]byte{127, 255, 128}, mid) + mid = midPoint(merkledb.Nothing[[]byte](), merkledb.Some([]byte{255, 255, 0})) + require.Equal(merkledb.Some([]byte{127, 255, 128}), mid) - mid = midPoint([]byte{255, 255, 255}, []byte{255, 255}) - require.Equal([]byte{255, 255, 127, 128}, mid) + mid = midPoint(merkledb.Some([]byte{255, 255, 255}), merkledb.Some([]byte{255, 255})) + require.Equal(merkledb.Some([]byte{255, 255, 127, 128}), mid) - mid = midPoint(nil, []byte{255}) - require.Equal([]byte{127, 127}, mid) + mid = midPoint(merkledb.Nothing[[]byte](), merkledb.Some([]byte{255})) + require.Equal(merkledb.Some([]byte{127, 127}), mid) - mid = midPoint([]byte{1, 255}, []byte{255, 1}) - require.Equal([]byte{128, 128}, mid) + mid = midPoint(merkledb.Some([]byte{1, 255}), merkledb.Some([]byte{255, 1})) + require.Equal(merkledb.Some([]byte{128, 128}), mid) - mid = midPoint([]byte{140, 255}, []byte{141, 0}) - require.Equal([]byte{140, 255, 127}, mid) + mid = midPoint(merkledb.Some([]byte{140, 255}), merkledb.Some([]byte{141, 0})) + require.Equal(merkledb.Some([]byte{140, 255, 127}), mid) - mid = midPoint([]byte{126, 255}, []byte{127}) - require.Equal([]byte{126, 255, 127}, mid) + mid = midPoint(merkledb.Some([]byte{126, 255}), merkledb.Some([]byte{127})) + require.Equal(merkledb.Some([]byte{126, 255, 127}), mid) - mid = midPoint(nil, nil) - require.Equal([]byte{127}, mid) + mid = midPoint(merkledb.Nothing[[]byte](), merkledb.Nothing[[]byte]()) + require.Equal(merkledb.Some([]byte{127}), mid) - low := midPoint(nil, mid) - require.Equal([]byte{63, 127}, low) + low := midPoint(merkledb.Nothing[[]byte](), mid) + require.Equal(merkledb.Some([]byte{63, 127}), low) - high := midPoint(mid, nil) - require.Equal([]byte{191}, high) + high := midPoint(mid, merkledb.Nothing[[]byte]()) + require.Equal(merkledb.Some([]byte{191}), high) - mid = midPoint([]byte{255, 255}, nil) - require.Equal([]byte{255, 255, 127, 127}, mid) + mid = midPoint(merkledb.Some([]byte{255, 255}), merkledb.Nothing[[]byte]()) + require.Equal(merkledb.Some([]byte{255, 255, 127, 127}), mid) - mid = midPoint([]byte{255}, nil) - require.Equal([]byte{255, 127, 127}, mid) + mid = midPoint(merkledb.Some([]byte{255}), merkledb.Nothing[[]byte]()) + require.Equal(merkledb.Some([]byte{255, 127, 127}), mid) for i := 0; i < 5000; i++ { r := rand.New(rand.NewSource(int64(i))) // #nosec G404 @@ -175,9 +199,9 @@ func Test_Midpoint(t *testing.T) { start, end = end, start } - mid = midPoint(start, end) - require.Equal(-1, bytes.Compare(start, mid)) - require.Equal(-1, bytes.Compare(mid, end)) + mid = midPoint(merkledb.Some(start), merkledb.Some(end)) + require.Equal(-1, bytes.Compare(start, mid.Value())) + require.Equal(-1, bytes.Compare(mid.Value(), end)) } } @@ -212,27 +236,28 @@ func Test_Sync_FindNextKey_InSync(t *testing.T) { require.NoError(syncer.Start(context.Background())) require.NoError(syncer.Wait(context.Background())) - proof, err := dbToSync.GetRangeProof(context.Background(), nil, nil, 500) + proof, err := dbToSync.GetRangeProof(context.Background(), merkledb.Nothing[[]byte](), merkledb.Nothing[[]byte](), 500) require.NoError(err) // the two dbs should be in sync, so next key should be nil lastKey := proof.KeyValues[len(proof.KeyValues)-1].Key - nextKey, err := syncer.findNextKey(context.Background(), lastKey, nil, proof.EndProof) + nextKey, err := syncer.findNextKey(context.Background(), lastKey, merkledb.Nothing[[]byte](), proof.EndProof) require.NoError(err) - require.Nil(nextKey) + require.True(nextKey.IsNothing()) // add an extra value to sync db past the last key returned - newKey := midPoint(lastKey, nil) - require.NoError(db.Put(newKey, []byte{1})) + newKey := midPoint(merkledb.Some(lastKey), merkledb.Nothing[[]byte]()) + newKeyVal := newKey.Value() + require.NoError(db.Put(newKeyVal, []byte{1})) // create a range endpoint that is before the newly added key, but after the last key endPointBeforeNewKey := make([]byte, 0, 2) - for i := 0; i < len(newKey); i++ { - endPointBeforeNewKey = append(endPointBeforeNewKey, newKey[i]) + for i := 0; i < len(newKeyVal); i++ { + endPointBeforeNewKey = append(endPointBeforeNewKey, newKeyVal[i]) // we need the new key to be after the last key // don't subtract anything from the current byte if newkey and lastkey are equal - if lastKey[i] == newKey[i] { + if lastKey[i] == newKeyVal[i] { continue } @@ -249,11 +274,11 @@ func Test_Sync_FindNextKey_InSync(t *testing.T) { // both nibbles were 0, so move onto the next byte } - nextKey, err = syncer.findNextKey(context.Background(), lastKey, endPointBeforeNewKey, proof.EndProof) + nextKey, err = syncer.findNextKey(context.Background(), lastKey, merkledb.Some(endPointBeforeNewKey), proof.EndProof) require.NoError(err) // next key would be after the end of the range, so it returns nil instead - require.Nil(nextKey) + require.True(nextKey.IsNothing()) } } @@ -292,13 +317,13 @@ func Test_Sync_FindNextKey_Deleted(t *testing.T) { // there is now another value in the range that needs to be sync'ed require.NoError(db.Put([]byte{0x13}, []byte{3})) - nextKey, err := syncer.findNextKey(context.Background(), []byte{0x12}, []byte{0x20}, noExtraNodeProof.Path) + nextKey, err := syncer.findNextKey(context.Background(), []byte{0x12}, merkledb.Some([]byte{0x20}), noExtraNodeProof.Path) require.NoError(err) - require.Equal([]byte{0x13}, nextKey) + require.Equal(merkledb.Some([]byte{0x13}), nextKey) - nextKey, err = syncer.findNextKey(context.Background(), []byte{0x11}, []byte{0x20}, extraNodeProof.Path) + nextKey, err = syncer.findNextKey(context.Background(), []byte{0x11}, merkledb.Some([]byte{0x20}), extraNodeProof.Path) require.NoError(err) - require.Equal([]byte{0x13}, nextKey) + require.Equal(merkledb.Some([]byte{0x13}), nextKey) } func Test_Sync_FindNextKey_BranchInLocal(t *testing.T) { @@ -328,9 +353,9 @@ func Test_Sync_FindNextKey_BranchInLocal(t *testing.T) { require.NoError(err) require.NoError(db.Put([]byte{0x12}, []byte{4})) - nextKey, err := syncer.findNextKey(context.Background(), []byte{0x11, 0x11}, []byte{0x20}, proof.Path) + nextKey, err := syncer.findNextKey(context.Background(), []byte{0x11, 0x11}, merkledb.Some([]byte{0x20}), proof.Path) require.NoError(err) - require.Equal([]byte{0x12}, nextKey) + require.Equal(merkledb.Some([]byte{0x12}), nextKey) } func Test_Sync_FindNextKey_BranchInReceived(t *testing.T) { @@ -361,9 +386,9 @@ func Test_Sync_FindNextKey_BranchInReceived(t *testing.T) { require.NoError(err) require.NoError(db.Delete([]byte{0x12})) - nextKey, err := syncer.findNextKey(context.Background(), []byte{0x11, 0x11}, []byte{0x20}, proof.Path) + nextKey, err := syncer.findNextKey(context.Background(), []byte{0x11, 0x11}, merkledb.Some([]byte{0x20}), proof.Path) require.NoError(err) - require.Equal([]byte{0x12}, nextKey) + require.Equal(merkledb.Some([]byte{0x12}), nextKey) } func Test_Sync_FindNextKey_ExtraValues(t *testing.T) { @@ -397,36 +422,37 @@ func Test_Sync_FindNextKey_ExtraValues(t *testing.T) { require.NoError(syncer.Start(context.Background())) require.NoError(syncer.Wait(context.Background())) - proof, err := dbToSync.GetRangeProof(context.Background(), nil, nil, 500) + proof, err := dbToSync.GetRangeProof(context.Background(), merkledb.Nothing[[]byte](), merkledb.Nothing[[]byte](), 500) require.NoError(err) // add an extra value to local db lastKey := proof.KeyValues[len(proof.KeyValues)-1].Key - midpoint := midPoint(lastKey, nil) + midpoint := midPoint(merkledb.Some(lastKey), merkledb.Nothing[[]byte]()) + midPointVal := midpoint.Value() - require.NoError(db.Put(midpoint, []byte{1})) + require.NoError(db.Put(midPointVal, []byte{1})) // next key at prefix of newly added point - nextKey, err := syncer.findNextKey(context.Background(), lastKey, nil, proof.EndProof) + nextKey, err := syncer.findNextKey(context.Background(), lastKey, merkledb.Nothing[[]byte](), proof.EndProof) require.NoError(err) require.NotNil(nextKey) - require.True(isPrefix(midpoint, nextKey)) + require.True(isPrefix(midPointVal, nextKey.Value())) - require.NoError(db.Delete(midpoint)) + require.NoError(db.Delete(midPointVal)) - require.NoError(dbToSync.Put(midpoint, []byte{1})) + require.NoError(dbToSync.Put(midPointVal, []byte{1})) - proof, err = dbToSync.GetRangeProof(context.Background(), nil, lastKey, 500) + proof, err = dbToSync.GetRangeProof(context.Background(), merkledb.Nothing[[]byte](), merkledb.Some(lastKey), 500) require.NoError(err) // next key at prefix of newly added point - nextKey, err = syncer.findNextKey(context.Background(), lastKey, nil, proof.EndProof) + nextKey, err = syncer.findNextKey(context.Background(), lastKey, merkledb.Nothing[[]byte](), proof.EndProof) require.NoError(err) require.NotNil(nextKey) // deal with odd length key - require.True(isPrefix(midpoint, nextKey)) + require.True(isPrefix(midPointVal, nextKey.Value())) } } @@ -459,8 +485,13 @@ func TestFindNextKeyEmptyEndProof(t *testing.T) { _, _ = r.Read(lastReceivedKey) // #nosec G404 rangeEndLen := r.Intn(16) - rangeEnd := make([]byte, rangeEndLen) - _, _ = r.Read(rangeEnd) // #nosec G404 + rangeEndBytes := make([]byte, rangeEndLen) + _, _ = r.Read(rangeEndBytes) // #nosec G404 + + rangeEnd := merkledb.Nothing[[]byte]() + if rangeEndLen > 0 { + rangeEnd = merkledb.Some(rangeEndBytes) + } nextKey, err := syncer.findNextKey( context.Background(), @@ -469,7 +500,7 @@ func TestFindNextKeyEmptyEndProof(t *testing.T) { nil, /* endProof */ ) require.NoError(err) - require.Equal(append(lastReceivedKey, 0), nextKey) + require.Equal(merkledb.Some(append(lastReceivedKey, 0)), nextKey) } } @@ -516,7 +547,7 @@ func Test_Sync_FindNextKey_DifferentChild(t *testing.T) { require.NoError(syncer.Start(context.Background())) require.NoError(syncer.Wait(context.Background())) - proof, err := dbToSync.GetRangeProof(context.Background(), nil, nil, 100) + proof, err := dbToSync.GetRangeProof(context.Background(), merkledb.Nothing[[]byte](), merkledb.Nothing[[]byte](), 100) require.NoError(err) lastKey := proof.KeyValues[len(proof.KeyValues)-1].Key @@ -526,12 +557,12 @@ func Test_Sync_FindNextKey_DifferentChild(t *testing.T) { require.NoError(dbToSync.Put(lastKey, []byte{2})) - proof, err = dbToSync.GetRangeProof(context.Background(), nil, proof.KeyValues[len(proof.KeyValues)-1].Key, 100) + proof, err = dbToSync.GetRangeProof(context.Background(), merkledb.Nothing[[]byte](), merkledb.Some(proof.KeyValues[len(proof.KeyValues)-1].Key), 100) require.NoError(err) - nextKey, err := syncer.findNextKey(context.Background(), proof.KeyValues[len(proof.KeyValues)-1].Key, nil, proof.EndProof) + nextKey, err := syncer.findNextKey(context.Background(), proof.KeyValues[len(proof.KeyValues)-1].Key, merkledb.Nothing[[]byte](), proof.EndProof) require.NoError(err) - require.Equal(nextKey, lastKey) + require.Equal(nextKey, merkledb.Some(lastKey)) } } @@ -595,10 +626,19 @@ func TestFindNextKeyRandom(t *testing.T) { _, _ = rand.Read(rangeEnd) } + startKey := merkledb.Nothing[[]byte]() + if len(rangeStart) > 0 { + startKey = merkledb.Some(rangeStart) + } + endKey := merkledb.Nothing[[]byte]() + if len(rangeEnd) > 0 { + endKey = merkledb.Some(rangeEnd) + } + remoteProof, err := remoteDB.GetRangeProof( context.Background(), - rangeStart, - rangeEnd, + startKey, + endKey, rand.Intn(maxProofLen)+1, ) require.NoError(err) @@ -612,7 +652,7 @@ func TestFindNextKeyRandom(t *testing.T) { // in the actual syncer. require.NoError(localDB.CommitRangeProof( context.Background(), - rangeStart, + startKey, remoteProof, )) @@ -727,17 +767,20 @@ func TestFindNextKeyRandom(t *testing.T) { gotFirstDiff, err := syncer.findNextKey( context.Background(), lastReceivedKey, - rangeEnd, + endKey, remoteProof.EndProof, ) require.NoError(err) - if bytes.Compare(smallestDiffKey.Value, rangeEnd) >= 0 { + switch { + case bytes.Compare(smallestDiffKey.Value, rangeEnd) >= 0: // The smallest key which differs is after the range end so the - // next key to get should be nil because we're done fetching the range. - require.Nil(gotFirstDiff) - } else { - require.Equal(smallestDiffKey.Value, gotFirstDiff) + // next key to get should be Nothing because we're done fetching the range. + require.True(gotFirstDiff.IsNothing()) + case len(smallestDiffKey.Value) == 0: + require.True(gotFirstDiff.IsNothing()) + default: + require.Equal(merkledb.Some(smallestDiffKey.Value), gotFirstDiff) } } } @@ -897,7 +940,15 @@ func Test_Sync_Error_During_Sync(t *testing.T) { require.NoError(err) endRoot, err := ids.ToID(request.EndRootHash) require.NoError(err) - return dbToSync.GetChangeProof(ctx, startRoot, endRoot, request.StartKey, request.EndKey, int(request.KeyLimit)) + startKey := merkledb.Nothing[[]byte]() + if !request.StartKey.IsNothing { + startKey = merkledb.Some(request.StartKey.Value) + } + endKey := merkledb.Nothing[[]byte]() + if !request.EndKey.IsNothing { + endKey = merkledb.Some(request.EndKey.Value) + } + return dbToSync.GetChangeProof(ctx, startRoot, endRoot, startKey, endKey, int(request.KeyLimit)) }, ).AnyTimes() @@ -976,7 +1027,15 @@ func Test_Sync_Result_Correct_Root_Update_Root_During(t *testing.T) { <-updatedRootChan root, err := ids.ToID(request.RootHash) require.NoError(err) - return dbToSync.GetRangeProofAtRoot(ctx, root, request.StartKey, request.EndKey, int(request.KeyLimit)) + startKey := merkledb.Nothing[[]byte]() + if !request.StartKey.IsNothing { + startKey = merkledb.Some(request.StartKey.Value) + } + endKey := merkledb.Nothing[[]byte]() + if !request.EndKey.IsNothing { + endKey = merkledb.Some(request.EndKey.Value) + } + return dbToSync.GetRangeProofAtRoot(ctx, root, startKey, endKey, int(request.KeyLimit)) }, ).AnyTimes() client.EXPECT().GetChangeProof(gomock.Any(), gomock.Any(), gomock.Any()).DoAndReturn( @@ -986,7 +1045,15 @@ func Test_Sync_Result_Correct_Root_Update_Root_During(t *testing.T) { require.NoError(err) endRoot, err := ids.ToID(request.EndRootHash) require.NoError(err) - return dbToSync.GetChangeProof(ctx, startRoot, endRoot, request.StartKey, request.EndKey, int(request.KeyLimit)) + startKey := merkledb.Nothing[[]byte]() + if !request.StartKey.IsNothing { + startKey = merkledb.Some(request.StartKey.Value) + } + endKey := merkledb.Nothing[[]byte]() + if !request.EndKey.IsNothing { + endKey = merkledb.Some(request.EndKey.Value) + } + return dbToSync.GetChangeProof(ctx, startRoot, endRoot, startKey, endKey, int(request.KeyLimit)) }, ).AnyTimes() @@ -1043,8 +1110,8 @@ func Test_Sync_UpdateSyncTarget(t *testing.T) { // Populate [m.processWork] to ensure that UpdateSyncTarget // moves the work to [m.unprocessedWork]. item := &workItem{ - start: []byte{1}, - end: []byte{2}, + start: merkledb.Some([]byte{1}), + end: merkledb.Some([]byte{2}), localRootID: ids.GenerateTestID(), } m.processedWork.Insert(item) diff --git a/x/sync/workheap.go b/x/sync/workheap.go index 239728af9ded..b01f75ad574e 100644 --- a/x/sync/workheap.go +++ b/x/sync/workheap.go @@ -8,6 +8,7 @@ import ( "container/heap" "github.com/ava-labs/avalanchego/utils/math" + "github.com/ava-labs/avalanchego/x/merkledb" "github.com/google/btree" ) @@ -40,7 +41,19 @@ func newWorkHeap() *workHeap { sortedItems: btree.NewG( 2, func(a, b *heapItem) bool { - return bytes.Compare(a.workItem.start, b.workItem.start) < 0 + aNothing := a.workItem.start.IsNothing() + bNothing := b.workItem.start.IsNothing() + if aNothing { + // [a] is Nothing, so if [b] is Nothing, they're equal. + // Otherwise, [b] is greater. + return !bNothing + } + if bNothing { + // [a] has a value and [b] doesn't so [a] is greater. + return false + } + // [a] and [b] both contain values. Compare the values. + return bytes.Compare(a.workItem.start.Value(), b.workItem.start.Value()) < 0 }, ), } @@ -98,13 +111,15 @@ func (wh *workHeap) MergeInsert(item *workItem) { wh.sortedItems.DescendLessOrEqual( searchItem, func(beforeItem *heapItem) bool { - if item.localRootID == beforeItem.workItem.localRootID && bytes.Equal(beforeItem.workItem.end, item.start) { - // [beforeItem.start, beforeItem.end] and [item.start, item.end] are - // merged into [beforeItem.start, item.end] - beforeItem.workItem.end = item.end - beforeItem.workItem.priority = math.Max(item.priority, beforeItem.workItem.priority) - heap.Fix(&wh.innerHeap, beforeItem.heapIndex) - mergedBefore = beforeItem + if item.localRootID == beforeItem.workItem.localRootID { + if merkledb.MaybeBytesEquals(item.start, beforeItem.workItem.end) { + // [beforeItem.start, beforeItem.end] and [item.start, item.end] are + // merged into [beforeItem.start, item.end] + beforeItem.workItem.end = item.end + beforeItem.workItem.priority = math.Max(item.priority, beforeItem.workItem.priority) + heap.Fix(&wh.innerHeap, beforeItem.heapIndex) + mergedBefore = beforeItem + } } return false }) @@ -114,13 +129,15 @@ func (wh *workHeap) MergeInsert(item *workItem) { wh.sortedItems.AscendGreaterOrEqual( searchItem, func(afterItem *heapItem) bool { - if item.localRootID == afterItem.workItem.localRootID && bytes.Equal(afterItem.workItem.start, item.end) { - // [item.start, item.end] and [afterItem.start, afterItem.end] are merged into - // [item.start, afterItem.end]. - afterItem.workItem.start = item.start - afterItem.workItem.priority = math.Max(item.priority, afterItem.workItem.priority) - heap.Fix(&wh.innerHeap, afterItem.heapIndex) - mergedAfter = afterItem + if item.localRootID == afterItem.workItem.localRootID { + if merkledb.MaybeBytesEquals(item.end, afterItem.workItem.start) { + // [item.start, item.end] and [afterItem.start, afterItem.end] are merged into + // [item.start, afterItem.end]. + afterItem.workItem.start = item.start + afterItem.workItem.priority = math.Max(item.priority, afterItem.workItem.priority) + heap.Fix(&wh.innerHeap, afterItem.heapIndex) + mergedAfter = afterItem + } } return false }) diff --git a/x/sync/workheap_test.go b/x/sync/workheap_test.go index b6456f4b8257..ce472744bf58 100644 --- a/x/sync/workheap_test.go +++ b/x/sync/workheap_test.go @@ -9,6 +9,7 @@ import ( "github.com/stretchr/testify/require" "github.com/ava-labs/avalanchego/ids" + "github.com/ava-labs/avalanchego/x/merkledb" ) // Tests heap.Interface methods Push, Pop, Swap, Len, Less. @@ -17,8 +18,8 @@ func Test_WorkHeap_InnerHeap(t *testing.T) { lowPriorityItem := &heapItem{ workItem: &workItem{ - start: []byte{1}, - end: []byte{2}, + start: merkledb.Some([]byte{1}), + end: merkledb.Some([]byte{2}), priority: lowPriority, localRootID: ids.GenerateTestID(), }, @@ -26,8 +27,8 @@ func Test_WorkHeap_InnerHeap(t *testing.T) { mediumPriorityItem := &heapItem{ workItem: &workItem{ - start: []byte{3}, - end: []byte{4}, + start: merkledb.Some([]byte{3}), + end: merkledb.Some([]byte{4}), priority: medPriority, localRootID: ids.GenerateTestID(), }, @@ -35,8 +36,8 @@ func Test_WorkHeap_InnerHeap(t *testing.T) { highPriorityItem := &heapItem{ workItem: &workItem{ - start: []byte{5}, - end: []byte{6}, + start: merkledb.Some([]byte{5}), + end: merkledb.Some([]byte{6}), priority: highPriority, localRootID: ids.GenerateTestID(), }, @@ -113,20 +114,20 @@ func Test_WorkHeap_Insert_GetWork(t *testing.T) { h := newWorkHeap() lowPriorityItem := &workItem{ - start: []byte{4}, - end: []byte{5}, + start: merkledb.Some([]byte{4}), + end: merkledb.Some([]byte{5}), priority: lowPriority, localRootID: ids.GenerateTestID(), } mediumPriorityItem := &workItem{ - start: []byte{0}, - end: []byte{1}, + start: merkledb.Some([]byte{0}), + end: merkledb.Some([]byte{1}), priority: medPriority, localRootID: ids.GenerateTestID(), } highPriorityItem := &workItem{ - start: []byte{2}, - end: []byte{3}, + start: merkledb.Some([]byte{2}), + end: merkledb.Some([]byte{3}), priority: highPriority, localRootID: ids.GenerateTestID(), } @@ -167,22 +168,22 @@ func Test_WorkHeap_remove(t *testing.T) { h := newWorkHeap() lowPriorityItem := &workItem{ - start: []byte{0}, - end: []byte{1}, + start: merkledb.Some([]byte{0}), + end: merkledb.Some([]byte{1}), priority: lowPriority, localRootID: ids.GenerateTestID(), } mediumPriorityItem := &workItem{ - start: []byte{2}, - end: []byte{3}, + start: merkledb.Some([]byte{2}), + end: merkledb.Some([]byte{3}), priority: medPriority, localRootID: ids.GenerateTestID(), } highPriorityItem := &workItem{ - start: []byte{4}, - end: []byte{5}, + start: merkledb.Some([]byte{4}), + end: merkledb.Some([]byte{5}), priority: highPriority, localRootID: ids.GenerateTestID(), } @@ -230,42 +231,42 @@ func Test_WorkHeap_Merge_Insert(t *testing.T) { // merge with range before syncHeap := newWorkHeap() - syncHeap.MergeInsert(&workItem{start: nil, end: []byte{63}}) + syncHeap.MergeInsert(&workItem{start: merkledb.Nothing[[]byte](), end: merkledb.Some([]byte{63})}) require.Equal(t, 1, syncHeap.Len()) - syncHeap.MergeInsert(&workItem{start: []byte{127}, end: []byte{192}}) + syncHeap.MergeInsert(&workItem{start: merkledb.Some([]byte{127}), end: merkledb.Some([]byte{192})}) require.Equal(t, 2, syncHeap.Len()) - syncHeap.MergeInsert(&workItem{start: []byte{193}, end: nil}) + syncHeap.MergeInsert(&workItem{start: merkledb.Some([]byte{193}), end: merkledb.Nothing[[]byte]()}) require.Equal(t, 3, syncHeap.Len()) - syncHeap.MergeInsert(&workItem{start: []byte{63}, end: []byte{126}, priority: lowPriority}) + syncHeap.MergeInsert(&workItem{start: merkledb.Some([]byte{63}), end: merkledb.Some([]byte{126}), priority: lowPriority}) require.Equal(t, 3, syncHeap.Len()) // merge with range after syncHeap = newWorkHeap() - syncHeap.MergeInsert(&workItem{start: nil, end: []byte{63}}) + syncHeap.MergeInsert(&workItem{start: merkledb.Nothing[[]byte](), end: merkledb.Some([]byte{63})}) require.Equal(t, 1, syncHeap.Len()) - syncHeap.MergeInsert(&workItem{start: []byte{127}, end: []byte{192}}) + syncHeap.MergeInsert(&workItem{start: merkledb.Some([]byte{127}), end: merkledb.Some([]byte{192})}) require.Equal(t, 2, syncHeap.Len()) - syncHeap.MergeInsert(&workItem{start: []byte{193}, end: nil}) + syncHeap.MergeInsert(&workItem{start: merkledb.Some([]byte{193}), end: merkledb.Nothing[[]byte]()}) require.Equal(t, 3, syncHeap.Len()) - syncHeap.MergeInsert(&workItem{start: []byte{64}, end: []byte{127}, priority: lowPriority}) + syncHeap.MergeInsert(&workItem{start: merkledb.Some([]byte{64}), end: merkledb.Some([]byte{127}), priority: lowPriority}) require.Equal(t, 3, syncHeap.Len()) // merge both sides at the same time syncHeap = newWorkHeap() - syncHeap.MergeInsert(&workItem{start: nil, end: []byte{63}}) + syncHeap.MergeInsert(&workItem{start: merkledb.Nothing[[]byte](), end: merkledb.Some([]byte{63})}) require.Equal(t, 1, syncHeap.Len()) - syncHeap.MergeInsert(&workItem{start: []byte{127}, end: nil}) + syncHeap.MergeInsert(&workItem{start: merkledb.Some([]byte{127}), end: merkledb.Nothing[[]byte]()}) require.Equal(t, 2, syncHeap.Len()) - syncHeap.MergeInsert(&workItem{start: []byte{63}, end: []byte{127}, priority: lowPriority}) + syncHeap.MergeInsert(&workItem{start: merkledb.Some([]byte{63}), end: merkledb.Some([]byte{127}), priority: lowPriority}) require.Equal(t, 1, syncHeap.Len()) }