Skip to content

Commit

Permalink
Merge 0a981e6 into b7dd36b
Browse files Browse the repository at this point in the history
  • Loading branch information
sstanculeanu committed May 25, 2023
2 parents b7dd36b + 0a981e6 commit ce62550
Show file tree
Hide file tree
Showing 30 changed files with 2,399 additions and 750 deletions.
12 changes: 12 additions & 0 deletions dataRetriever/blockchain/blockchain_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"testing"

"github.com/multiversx/mx-chain-core-go/core/check"
"github.com/multiversx/mx-chain-core-go/data"
"github.com/multiversx/mx-chain-core-go/data/block"
"github.com/multiversx/mx-chain-core-go/data/mock"
"github.com/stretchr/testify/assert"
Expand Down Expand Up @@ -81,3 +82,14 @@ func TestBlockChain_SettersAndGettersNilValues(t *testing.T) {
assert.Nil(t, bc.GetCurrentBlockHeader())
assert.Empty(t, bc.GetCurrentBlockRootHash())
}

func TestBlockChain_SettersInvalidValues(t *testing.T) {
t.Parallel()

bc, _ := NewBlockChain(&mock.AppStatusHandlerStub{})
err := bc.SetGenesisHeader(&block.MetaBlock{})
assert.Equal(t, err, data.ErrInvalidHeaderType)

err = bc.SetCurrentBlockHeaderAndRootHash(&block.MetaBlock{}, []byte("root hash"))
assert.Equal(t, err, data.ErrInvalidHeaderType)
}
11 changes: 11 additions & 0 deletions dataRetriever/blockchain/metachain_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -82,3 +82,14 @@ func TestMetaChain_SettersAndGettersNilValues(t *testing.T) {
assert.Nil(t, mc.GetCurrentBlockHeader())
assert.Empty(t, mc.GetCurrentBlockRootHash())
}

func TestMetaChain_SettersInvalidValues(t *testing.T) {
t.Parallel()

bc, _ := NewMetaChain(&mock.AppStatusHandlerStub{})
err := bc.SetGenesisHeader(&block.Header{})
assert.Equal(t, err, ErrWrongTypeInSet)

err = bc.SetCurrentBlockHeaderAndRootHash(&block.Header{}, []byte("root hash"))
assert.Equal(t, err, ErrWrongTypeInSet)
}
3 changes: 3 additions & 0 deletions dataRetriever/chainStorer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,9 @@ func TestBlockChain_GetStorer(t *testing.T) {
assert.True(t, peerBlockUnit == storer)
storer, _ = b.GetStorer(4)
assert.True(t, headerUnit == storer)
storer, err := b.GetStorer(5)
assert.True(t, errors.Is(err, dataRetriever.ErrStorerNotFound))
assert.Nil(t, storer)
}

func TestBlockChain_GetAllStorers(t *testing.T) {
Expand Down
1 change: 1 addition & 0 deletions dataRetriever/dataPool/dataPool_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,7 @@ func TestNewDataPool_OkValsShouldWork(t *testing.T) {
assert.True(t, args.SmartContracts == tdp.SmartContracts())
assert.True(t, args.PeerAuthentications == tdp.PeerAuthentications())
assert.True(t, args.Heartbeats == tdp.Heartbeats())
assert.True(t, args.ValidatorsInfo == tdp.ValidatorsInfo())
}

func TestNewDataPool_Close(t *testing.T) {
Expand Down
2 changes: 1 addition & 1 deletion dataRetriever/dataPool/headersCache/headersCache.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ func (cache *headersCache) addHeader(headerHash []byte, header data.HeaderHandle
return true
}

//tryToDoEviction will check if pool is full and if it is will do eviction
// tryToDoEviction will check if pool is full and if so, it will do the eviction
func (cache *headersCache) tryToDoEviction(shardId uint32) {
numHeaders := cache.getNumHeaders(shardId)
if int(numHeaders) >= cache.maxHeadersPerShard {
Expand Down
71 changes: 70 additions & 1 deletion dataRetriever/dataPool/headersCache/headersPool_test.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package headersCache_test

import (
"errors"
"fmt"
"sort"
"sync"
Expand All @@ -16,6 +17,45 @@ import (
"github.com/stretchr/testify/require"
)

func TestNewHeadersCacher(t *testing.T) {
t.Parallel()

t.Run("invalid MaxHeadersPerShard should error", testNewHeadersCacher(
config.HeadersPoolConfig{
MaxHeadersPerShard: 0,
}))
t.Run("invalid NumElementsToRemoveOnEviction should error", testNewHeadersCacher(
config.HeadersPoolConfig{
MaxHeadersPerShard: 1,
NumElementsToRemoveOnEviction: 0,
}))
t.Run("invalid config should error", testNewHeadersCacher(
config.HeadersPoolConfig{
MaxHeadersPerShard: 1,
NumElementsToRemoveOnEviction: 3,
}))
t.Run("should work", func(t *testing.T) {
t.Parallel()

headersCacher, err := headersCache.NewHeadersPool(config.HeadersPoolConfig{
MaxHeadersPerShard: 2,
NumElementsToRemoveOnEviction: 1,
})
require.NoError(t, err)
require.NotNil(t, headersCacher)
})
}

func testNewHeadersCacher(cfg config.HeadersPoolConfig) func(t *testing.T) {
return func(t *testing.T) {
t.Parallel()

headersCacher, err := headersCache.NewHeadersPool(cfg)
require.True(t, errors.Is(err, headersCache.ErrInvalidHeadersCacheParameter))
require.Nil(t, headersCacher)
}
}

func TestNewHeadersCacher_AddHeadersInCache(t *testing.T) {
t.Parallel()

Expand All @@ -28,11 +68,16 @@ func TestNewHeadersCacher_AddHeadersInCache(t *testing.T) {
nonce := uint64(1)
shardId := uint32(0)

headers, _, err := headersCacher.GetHeadersByNonceAndShardId(nonce, shardId)
require.Equal(t, headersCache.ErrHeaderNotFound, err)
require.Nil(t, headers)

headerHash1 := []byte("hash1")
headerHash2 := []byte("hash2")
testHdr1 := &block.Header{Nonce: nonce, ShardID: shardId}
testHdr2 := &block.Header{Nonce: nonce, ShardID: shardId, Round: 100}

headersCacher.AddHeader([]byte("nil header hash"), nil) // coverage
headersCacher.AddHeader(headerHash1, testHdr1)
headersCacher.AddHeader(headerHash2, testHdr2)

Expand All @@ -45,7 +90,7 @@ func TestNewHeadersCacher_AddHeadersInCache(t *testing.T) {
require.Equal(t, testHdr2, header)

expectedHeaders := []data.HeaderHandler{testHdr1, testHdr2}
headers, _, err := headersCacher.GetHeadersByNonceAndShardId(nonce, shardId)
headers, _, err = headersCacher.GetHeadersByNonceAndShardId(nonce, shardId)
require.Nil(t, err)
require.Equal(t, expectedHeaders, headers)
}
Expand All @@ -70,6 +115,8 @@ func Test_RemoveHeaderByHash(t *testing.T) {
headersCacher.AddHeader(headerHash1, testHdr1)
headersCacher.AddHeader(headerHash2, testHdr2)

headersCacher.RemoveHeaderByHash([]byte(""))
headersCacher.RemoveHeaderByHash([]byte("missing hash"))
headersCacher.RemoveHeaderByHash(headerHash1)
header, err := headersCacher.GetHeaderByHash(headerHash1)
require.Nil(t, header)
Expand Down Expand Up @@ -101,6 +148,8 @@ func TestHeadersCacher_AddHeadersInCacheAndRemoveByNonceAndShardId(t *testing.T)
headersCacher.AddHeader(headerHash1, testHdr1)
headersCacher.AddHeader(headerHash2, testHdr2)

headersCacher.RemoveHeaderByNonceAndShardId(nonce, 100)
headersCacher.RemoveHeaderByNonceAndShardId(100, shardId)
headersCacher.RemoveHeaderByNonceAndShardId(nonce, shardId)
header, err := headersCacher.GetHeaderByHash(headerHash1)
require.Nil(t, header)
Expand Down Expand Up @@ -577,6 +626,7 @@ func TestHeadersPool_RegisterHandler(t *testing.T) {
wasCalled = true
wg.Done()
}
headersCacher.RegisterHandler(nil)
headersCacher.RegisterHandler(handler)
header, hash := createASliceOfHeaders(1, 0)
headersCacher.AddHeader(hash[0], &header[0])
Expand All @@ -603,6 +653,25 @@ func TestHeadersPool_Clear(t *testing.T) {
require.Equal(t, 0, headersCacher.GetNumHeaders(0))
}

func TestHeadersPool_IsInterfaceNil(t *testing.T) {
t.Parallel()

headersCacher, _ := headersCache.NewHeadersPool(
config.HeadersPoolConfig{
MaxHeadersPerShard: 0,
},
)
require.True(t, headersCacher.IsInterfaceNil())

headersCacher, _ = headersCache.NewHeadersPool(
config.HeadersPoolConfig{
MaxHeadersPerShard: 1000,
NumElementsToRemoveOnEviction: 10,
},
)
require.False(t, headersCacher.IsInterfaceNil())
}

func createASliceOfHeaders(numHeaders int, shardId uint32) ([]block.Header, [][]byte) {
headers := make([]block.Header, 0)
headersHashes := make([][]byte, 0)
Expand Down
6 changes: 3 additions & 3 deletions dataRetriever/factory/containers/resolversContainer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ func TestResolversContainer_IterateNilHandlerShouldNotPanic(t *testing.T) {
defer func() {
r := recover()
if r != nil {
assert.Fail(t, "should not have paniced")
assert.Fail(t, "should not have panicked")
}
}()

Expand All @@ -269,7 +269,7 @@ func TestResolversContainer_IterateNotAValidKeyShouldWorkAndNotPanic(t *testing.
defer func() {
r := recover()
if r != nil {
assert.Fail(t, "should not have paniced")
assert.Fail(t, "should not have panicked")
}
}()

Expand All @@ -292,7 +292,7 @@ func TestResolversContainer_IterateNotAValidValueShouldWorkAndNotPanic(t *testin
defer func() {
r := recover()
if r != nil {
assert.Fail(t, "should not have paniced")
assert.Fail(t, "should not have panicked")
}
}()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ func TestCreateCurrentEpochProvider_NilCurrentEpochProvider(t *testing.T) {
assert.IsType(t, disabled.NewEpochProvider(), cnep)
}

func TestCreateCurrentEpochProvider_ArithemticEpochProvider(t *testing.T) {
func TestCreateCurrentEpochProvider_ArithmeticEpochProvider(t *testing.T) {
t.Parallel()

cnep, err := CreateCurrentEpochProvider(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"strings"
"testing"

"github.com/multiversx/mx-chain-core-go/core"
"github.com/multiversx/mx-chain-go/common"
"github.com/multiversx/mx-chain-go/dataRetriever"
"github.com/multiversx/mx-chain-go/dataRetriever/factory/resolverscontainer"
Expand Down Expand Up @@ -88,6 +89,17 @@ func createTriesHolderForMeta() common.TriesHolder {

// ------- NewResolversContainerFactory

func TestNewMetaResolversContainerFactory_NewNumGoRoutinesThrottlerFailsShouldErr(t *testing.T) {
t.Parallel()

args := getArgumentsMeta()
args.NumConcurrentResolvingJobs = 0
rcf, err := resolverscontainer.NewMetaResolversContainerFactory(args)

assert.Nil(t, rcf)
assert.Equal(t, core.ErrNotPositiveValue, err)
}

func TestNewMetaResolversContainerFactory_NilShardCoordinatorShouldErr(t *testing.T) {
t.Parallel()

Expand Down Expand Up @@ -280,6 +292,18 @@ func TestMetaResolversContainerFactory_With4ShardsShouldWork(t *testing.T) {
assert.Equal(t, totalResolvers+noOfShards, container.Len())
}

func TestMetaResolversContainerFactory_IsInterfaceNil(t *testing.T) {
t.Parallel()

args := getArgumentsMeta()
args.ShardCoordinator = nil
rcf, _ := resolverscontainer.NewMetaResolversContainerFactory(args)
assert.True(t, rcf.IsInterfaceNil())

rcf, _ = resolverscontainer.NewMetaResolversContainerFactory(getArgumentsMeta())
assert.False(t, rcf.IsInterfaceNil())
}

func getArgumentsMeta() resolverscontainer.FactoryArgs {
return resolverscontainer.FactoryArgs{
ShardCoordinator: mock.NewOneShardCoordinatorMock(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"strings"
"testing"

"github.com/multiversx/mx-chain-core-go/core"
"github.com/multiversx/mx-chain-go/common"
"github.com/multiversx/mx-chain-go/dataRetriever"
"github.com/multiversx/mx-chain-go/dataRetriever/factory/resolverscontainer"
Expand Down Expand Up @@ -94,6 +95,17 @@ func createTriesHolderForShard() common.TriesHolder {

// ------- NewResolversContainerFactory

func TestNewShardResolversContainerFactory_NewNumGoRoutinesThrottlerFailsShouldErr(t *testing.T) {
t.Parallel()

args := getArgumentsShard()
args.NumConcurrentResolvingJobs = 0
rcf, err := resolverscontainer.NewShardResolversContainerFactory(args)

assert.Nil(t, rcf)
assert.Equal(t, core.ErrNotPositiveValue, err)
}

func TestNewShardResolversContainerFactory_NilShardCoordinatorShouldErr(t *testing.T) {
t.Parallel()

Expand Down Expand Up @@ -336,6 +348,18 @@ func TestShardResolversContainerFactory_With4ShardsShouldWork(t *testing.T) {
assert.Equal(t, totalResolvers, container.Len())
}

func TestShardResolversContainerFactory_IsInterfaceNil(t *testing.T) {
t.Parallel()

args := getArgumentsShard()
args.ShardCoordinator = nil
rcf, _ := resolverscontainer.NewShardResolversContainerFactory(args)
assert.True(t, rcf.IsInterfaceNil())

rcf, _ = resolverscontainer.NewShardResolversContainerFactory(getArgumentsMeta())
assert.False(t, rcf.IsInterfaceNil())
}

func getArgumentsShard() resolverscontainer.FactoryArgs {
return resolverscontainer.FactoryArgs{
ShardCoordinator: mock.NewOneShardCoordinatorMock(),
Expand Down
2 changes: 1 addition & 1 deletion dataRetriever/interface.go
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ type WhiteListHandler interface {
IsInterfaceNil() bool
}

// DebugHandler defines an interface for debugging the reqested-resolved data
// DebugHandler defines an interface for debugging the requested-resolved data
type DebugHandler interface {
LogRequestedData(topic string, hashes [][]byte, numReqIntra int, numReqCross int)
LogFailedToResolveData(topic string, hash []byte, err error)
Expand Down
2 changes: 1 addition & 1 deletion dataRetriever/mock/marshalizerMock.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ func (mm *MarshalizerMock) Unmarshal(obj interface{}, buff []byte) error {
}

if obj == nil {
return errors.New("nil object to serilize to")
return errors.New("nil object to serialize to")
}

if buff == nil {
Expand Down
14 changes: 13 additions & 1 deletion dataRetriever/provider/miniBlocks_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package provider_test

import (
"bytes"
"errors"
"fmt"
"testing"

Expand Down Expand Up @@ -240,13 +241,24 @@ func TestMiniBlockProvider_GetMiniBlocksFromStorerShouldBeFoundInStorage(t *test
existingHashes := [][]byte{
[]byte("hash1"),
[]byte("hash2"),
[]byte("hash3"),
}
requestedHashes := existingHashes

cnt := 0
arg := createMockMiniblockProviderArgs(nil, existingHashes)
arg.Marshalizer = &testscommon.MarshalizerStub{
UnmarshalCalled: func(obj interface{}, buff []byte) error {
cnt++
if cnt == 1 {
return errors.New("unmarshal fails for coverage")
}
return nil
},
}
mbp, _ := provider.NewMiniBlockProvider(arg)

miniBlocksAndHashes, missingHashes := mbp.GetMiniBlocksFromStorer(requestedHashes)
assert.Equal(t, 2, len(miniBlocksAndHashes))
assert.Equal(t, 0, len(missingHashes))
assert.Equal(t, 1, len(missingHashes))
}

0 comments on commit ce62550

Please sign in to comment.