Skip to content

Commit

Permalink
Merge pull request #4947 from multiversx/fix_storage_pruning_node_typ…
Browse files Browse the repository at this point in the history
…e_selection

Fix storage pruning node type selection
  • Loading branch information
sstanculeanu committed Feb 22, 2023
2 parents 833f3ed + 65d6096 commit a46893c
Show file tree
Hide file tree
Showing 48 changed files with 639 additions and 330 deletions.
22 changes: 22 additions & 0 deletions common/interface.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"github.com/multiversx/mx-chain-core-go/core"
"github.com/multiversx/mx-chain-core-go/data"
"github.com/multiversx/mx-chain-core-go/data/block"
crypto "github.com/multiversx/mx-chain-crypto-go"
"github.com/multiversx/mx-chain-go/trie/statistics"
)

Expand Down Expand Up @@ -339,3 +340,24 @@ type EnableEpochsHandler interface {

IsInterfaceNil() bool
}

// ManagedPeersHolder defines the operations of an entity that holds managed identities for a node
type ManagedPeersHolder interface {
AddManagedPeer(privateKeyBytes []byte) error
GetPrivateKey(pkBytes []byte) (crypto.PrivateKey, error)
GetP2PIdentity(pkBytes []byte) ([]byte, core.PeerID, error)
GetMachineID(pkBytes []byte) (string, error)
GetNameAndIdentity(pkBytes []byte) (string, string, error)
IncrementRoundsWithoutReceivedMessages(pkBytes []byte)
ResetRoundsWithoutReceivedMessages(pkBytes []byte)
GetManagedKeysByCurrentNode() map[string]crypto.PrivateKey
IsKeyManagedByCurrentNode(pkBytes []byte) bool
IsKeyRegistered(pkBytes []byte) bool
IsPidManagedByCurrentNode(pid core.PeerID) bool
IsKeyValidator(pkBytes []byte) bool
SetValidatorState(pkBytes []byte, state bool)
GetNextPeerAuthenticationTime(pkBytes []byte) (time.Time, error)
SetNextPeerAuthenticationTime(pkBytes []byte, nextTime time.Time)
IsMultiKeyMode() bool
IsInterfaceNil() bool
}
3 changes: 3 additions & 0 deletions epochStart/bootstrap/common.go
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,9 @@ func checkArguments(args ArgsEpochStartBootstrap) error {
if args.GeneralConfig.TrieSync.NumConcurrentTrieSyncers < 1 {
return fmt.Errorf("%s: %w", baseErrorMessage, epochStart.ErrInvalidNumConcurrentTrieSyncers)
}
if check.IfNil(args.CryptoComponentsHolder.ManagedPeersHolder()) {
return fmt.Errorf("%s: %w", baseErrorMessage, epochStart.ErrNilManagedPeersHolder)
}

return nil
}
2 changes: 2 additions & 0 deletions epochStart/bootstrap/metaStorageHandler.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ func NewMetaStorageHandler(
currentEpoch uint32,
uint64Converter typeConverters.Uint64ByteSliceConverter,
nodeTypeProvider NodeTypeProviderHandler,
managedPeersHolder common.ManagedPeersHolder,
) (*metaStorageHandler, error) {
epochStartNotifier := &disabled.EpochStartNotifier{}
storageFactory, err := factory.NewStorageServiceFactory(
Expand All @@ -49,6 +50,7 @@ func NewMetaStorageHandler(
CurrentEpoch: currentEpoch,
StorageType: factory.BootstrapStorageService,
CreateTrieEpochRootHashStorer: false,
ManagedPeersHolder: managedPeersHolder,
},
)
if err != nil {
Expand Down
21 changes: 14 additions & 7 deletions epochStart/bootstrap/metaStorageHandler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,9 @@ func TestNewMetaStorageHandler_InvalidConfigErr(t *testing.T) {
hasher := &hashingMocks.HasherMock{}
uit64Cvt := &mock.Uint64ByteSliceConverterMock{}
nodeTypeProvider := &nodeTypeProviderMock.NodeTypeProviderStub{}
managedPeersHolder := &testscommon.ManagedPeersHolderStub{}

mtStrHandler, err := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider)
mtStrHandler, err := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider, managedPeersHolder)
assert.True(t, check.IfNil(mtStrHandler))
assert.NotNil(t, err)
}
Expand All @@ -51,7 +52,8 @@ func TestNewMetaStorageHandler_CreateForMetaErr(t *testing.T) {
hasher := &hashingMocks.HasherMock{}
uit64Cvt := &mock.Uint64ByteSliceConverterMock{}
nodeTypeProvider := &nodeTypeProviderMock.NodeTypeProviderStub{}
mtStrHandler, err := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider)
managedPeersHolder := &testscommon.ManagedPeersHolderStub{}
mtStrHandler, err := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider, managedPeersHolder)
assert.False(t, check.IfNil(mtStrHandler))
assert.Nil(t, err)
}
Expand All @@ -69,8 +71,9 @@ func TestMetaStorageHandler_saveLastHeader(t *testing.T) {
hasher := &hashingMocks.HasherMock{}
uit64Cvt := &mock.Uint64ByteSliceConverterMock{}
nodeTypeProvider := &nodeTypeProviderMock.NodeTypeProviderStub{}
managedPeersHolder := &testscommon.ManagedPeersHolderStub{}

mtStrHandler, _ := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider)
mtStrHandler, _ := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider, managedPeersHolder)

header := &block.MetaBlock{Nonce: 0}

Expand All @@ -97,8 +100,9 @@ func TestMetaStorageHandler_saveLastCrossNotarizedHeaders(t *testing.T) {
hasher := &hashingMocks.HasherMock{}
uit64Cvt := &mock.Uint64ByteSliceConverterMock{}
nodeTypeProvider := &nodeTypeProviderMock.NodeTypeProviderStub{}
managedPeersHolder := &testscommon.ManagedPeersHolderStub{}

mtStrHandler, _ := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider)
mtStrHandler, _ := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider, managedPeersHolder)

hdr1 := &block.Header{Nonce: 1}
hdr2 := &block.Header{Nonce: 2}
Expand Down Expand Up @@ -131,8 +135,9 @@ func TestMetaStorageHandler_saveTriggerRegistry(t *testing.T) {
hasher := &hashingMocks.HasherMock{}
uit64Cvt := &mock.Uint64ByteSliceConverterMock{}
nodeTypeProvider := &nodeTypeProviderMock.NodeTypeProviderStub{}
managedPeersHolder := &testscommon.ManagedPeersHolderStub{}

mtStrHandler, _ := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider)
mtStrHandler, _ := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider, managedPeersHolder)

components := &ComponentsNeededForBootstrap{
EpochStartMetaBlock: &block.MetaBlock{Nonce: 3},
Expand All @@ -156,8 +161,9 @@ func TestMetaStorageHandler_saveDataToStorage(t *testing.T) {
hasher := &hashingMocks.HasherMock{}
uit64Cvt := &mock.Uint64ByteSliceConverterMock{}
nodeTypeProvider := &nodeTypeProviderMock.NodeTypeProviderStub{}
managedPeersHolder := &testscommon.ManagedPeersHolderStub{}

mtStrHandler, _ := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider)
mtStrHandler, _ := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider, managedPeersHolder)

components := &ComponentsNeededForBootstrap{
EpochStartMetaBlock: &block.MetaBlock{Nonce: 3},
Expand Down Expand Up @@ -198,8 +204,9 @@ func testMetaWithMissingStorer(missingUnit dataRetriever.UnitType, atCallNumber
hasher := &hashingMocks.HasherMock{}
uit64Cvt := &mock.Uint64ByteSliceConverterMock{}
nodeTypeProvider := &nodeTypeProviderMock.NodeTypeProviderStub{}
managedPeersHolder := &testscommon.ManagedPeersHolderStub{}

mtStrHandler, _ := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider)
mtStrHandler, _ := NewMetaStorageHandler(gCfg, prefsConfig, coordinator, pathManager, marshalizer, hasher, 1, uit64Cvt, nodeTypeProvider, managedPeersHolder)
counter := 0
mtStrHandler.storageService = &storageStubs.ChainStorerStub{
GetStorerCalled: func(unitType dataRetriever.UnitType) (storage.Storer, error) {
Expand Down
3 changes: 3 additions & 0 deletions epochStart/bootstrap/process.go
Original file line number Diff line number Diff line change
Expand Up @@ -762,6 +762,7 @@ func (e *epochStartBootstrap) requestAndProcessForMeta(peerMiniBlocks []*block.M
e.epochStartMeta.GetEpoch(),
e.coreComponentsHolder.Uint64ByteSliceConverter(),
e.coreComponentsHolder.NodeTypeProvider(),
e.cryptoComponentsHolder.ManagedPeersHolder(),
)
if err != nil {
return err
Expand Down Expand Up @@ -928,6 +929,7 @@ func (e *epochStartBootstrap) requestAndProcessForShard(peerMiniBlocks []*block.
e.baseData.lastEpoch,
e.coreComponentsHolder.Uint64ByteSliceConverter(),
e.coreComponentsHolder.NodeTypeProvider(),
e.cryptoComponentsHolder.ManagedPeersHolder(),
)
if err != nil {
return err
Expand Down Expand Up @@ -1109,6 +1111,7 @@ func (e *epochStartBootstrap) createStorageService(
CurrentEpoch: startEpoch,
StorageType: storageFactory.BootstrapStorageService,
CreateTrieEpochRootHashStorer: createTrieEpochRootHashStorer,
ManagedPeersHolder: e.cryptoComponentsHolder.ManagedPeersHolder(),
})
if err != nil {
return nil, err
Expand Down
12 changes: 12 additions & 0 deletions epochStart/bootstrap/process_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ func createComponentsForEpochStart() (*mock.CoreComponentsMock, *mock.CryptoComp
BlKeyGen: &cryptoMocks.KeyGenStub{},
TxKeyGen: &cryptoMocks.KeyGenStub{},
PeerSignHandler: &cryptoMocks.PeerSignatureHandlerStub{},
ManagedPeers: &testscommon.ManagedPeersHolderStub{},
}
}

Expand Down Expand Up @@ -584,6 +585,17 @@ func TestNewEpochStartBootstrap_NilArgsChecks(t *testing.T) {
assert.Equal(t, storage.ErrNotSupportedCacheType, err)
assert.Nil(t, epochStartProvider)
})
t.Run("nil managed peers holder", func(t *testing.T) {
t.Parallel()

coreComp, cryptoComp := createComponentsForEpochStart()
cryptoComp.ManagedPeers = nil
args := createMockEpochStartBootstrapArgs(coreComp, cryptoComp)

epochStartProvider, err := NewEpochStartBootstrap(args)
require.Nil(t, epochStartProvider)
require.True(t, errors.Is(err, epochStart.ErrNilManagedPeersHolder))
})
}

func TestNewEpochStartBootstrap(t *testing.T) {
Expand Down
2 changes: 2 additions & 0 deletions epochStart/bootstrap/shardStorageHandler.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ func NewShardStorageHandler(
currentEpoch uint32,
uint64Converter typeConverters.Uint64ByteSliceConverter,
nodeTypeProvider core.NodeTypeProviderHandler,
managedPeersHolder common.ManagedPeersHolder,
) (*shardStorageHandler, error) {
epochStartNotifier := &disabled.EpochStartNotifier{}
storageFactory, err := factory.NewStorageServiceFactory(
Expand All @@ -53,6 +54,7 @@ func NewShardStorageHandler(
CurrentEpoch: currentEpoch,
StorageType: factory.BootstrapStorageService,
CreateTrieEpochRootHashStorer: false,
ManagedPeersHolder: managedPeersHolder,
},
)
if err != nil {
Expand Down

0 comments on commit a46893c

Please sign in to comment.