Skip to content

Commit

Permalink
Merge pull request #739 from onflow/m4ksio/5472-verify-events-hash
Browse files Browse the repository at this point in the history
Verify events hash [Verifiable events part 2]
  • Loading branch information
m4ksio committed Jun 23, 2021
2 parents 0079a62 + af7e716 commit 969737e
Show file tree
Hide file tree
Showing 6 changed files with 153 additions and 14 deletions.
41 changes: 37 additions & 4 deletions engine/execution/testutil/fixtures.go
Expand Up @@ -10,6 +10,8 @@ import (

"github.com/onflow/cadence"
jsoncdc "github.com/onflow/cadence/encoding/json"
"github.com/onflow/cadence/runtime"
"github.com/onflow/cadence/runtime/interpreter"
"github.com/rs/zerolog"
"github.com/stretchr/testify/require"

Expand All @@ -25,17 +27,48 @@ import (
)

func CreateContractDeploymentTransaction(contractName string, contract string, authorizer flow.Address, chain flow.Chain) *flow.TransactionBody {

encoded := hex.EncodeToString([]byte(contract))

return flow.NewTransactionBody().
SetScript([]byte(fmt.Sprintf(`transaction {
script := []byte(fmt.Sprintf(`transaction {
prepare(signer: AuthAccount, service: AuthAccount) {
signer.contracts.add(name: "%s", code: "%s".decodeHex())
}
}`, contractName, encoded)),
).
}`, contractName, encoded))

txBody := flow.NewTransactionBody().
SetScript(script).
AddAuthorizer(authorizer).
AddAuthorizer(chain.ServiceAddress())

// to synthetically generate event using Cadence code we would need a lot of
// copying, so its easier to just hardcode the json string
// TODO - extract parts of Cadence to make exporting events easy without interpreter

interpreterHash := runtime.CodeToHashValue(script)
hashElements := interpreterHash.Elements()

valueStrings := make([]string, len(hashElements))

for i, value := range hashElements {
uint8 := value.(interpreter.UInt8Value)
valueStrings[i] = fmt.Sprintf("{\"type\":\"UInt8\",\"value\":\"%d\"}", uint8)
}

//hashValue := strings.Join(valueStrings, ",")

//payload := fmt.Sprintf("{\"type\":\"Event\",\"value\":{\"id\":\"flow.AccountContractAdded\",\"fields\":[{\"name\":\"address\",\"value\":{\"type\":\"Address\",\"value\":\"%s\"}},{\"name\":\"codeHash\",\"value\":{\"type\":\"Array\",\"value\":[%s]}\"]}},{\"name\":\"contract\",\"value\":{\"type\":\"String\",\"value\":\"%s\"}}]}}",
// authorizer, hashValue, contractName)

//event := flow.Event{
// Type: "flow.AccountContractAdded",
// TransactionID: flow.Identifier{},
// TransactionIndex: 0,
// EventIndex: 0,
// Payload: []byte(payload),
//}

return txBody
}

func UpdateContractDeploymentTransaction(contractName string, contract string, authorizer flow.Address, chain flow.Chain) *flow.TransactionBody {
Expand Down
7 changes: 6 additions & 1 deletion engine/verification/utils/unittest/fixture.go
Expand Up @@ -185,10 +185,12 @@ func CompleteExecutionReceiptFixture(t *testing.T, chunks int, chain flow.Chain,
// for that result.
func ExecutionResultFixture(t *testing.T, chunkCount int, chain flow.Chain, refBlkHeader *flow.Header) (*flow.ExecutionResult,
*ExecutionReceiptData) {

// setups up the first collection of block consists of three transactions
tx1 := testutil.DeployCounterContractTransaction(chain.ServiceAddress(), chain)
err := testutil.SignTransactionAsServiceAccount(tx1, 0, chain)
require.NoError(t, err)

tx2 := testutil.CreateCounterTransaction(chain.ServiceAddress(), chain.ServiceAddress())
err = testutil.SignTransactionAsServiceAccount(tx2, 1, chain)
require.NoError(t, err)
Expand Down Expand Up @@ -311,14 +313,17 @@ func ExecutionResultFixture(t *testing.T, chunkCount int, chain flow.Chain, refB
collectionID = flow.ZeroID
}

eventsHash, err := computationResult.Events[i].Hash()
require.NoError(t, err)

chunk := &flow.Chunk{
ChunkBody: flow.ChunkBody{
CollectionIndex: uint(i),
StartState: startStateCommitment,
// TODO: include real, event collection hash, currently using the collection ID to generate a different Chunk ID
// Otherwise, the chances of there being chunks with the same ID before all these TODOs are done is large, since
// startState stays the same if blocks are empty
EventCollection: collectionID,
EventCollection: eventsHash,
BlockID: executableBlock.ID(),
// TODO: record gas used
TotalComputationUsed: 0,
Expand Down
3 changes: 3 additions & 0 deletions engine/verification/verifier/engine.go
Expand Up @@ -205,6 +205,9 @@ func (e *Engine) verify(ctx context.Context, originID flow.Identifier,
case *chmodels.CFInvalidVerifiableChunk:
// TODO raise challenge
e.log.Error().Msg(chFault.String())
case *chmodels.CFInvalidEventsCollection:
// TODO raise challenge
e.log.Error().Msg(chFault.String())
default:
return engine.NewInvalidInputErrorf("unknown type of chunk fault is received (type: %T) : %v",
chFault, chFault.String())
Expand Down
29 changes: 29 additions & 0 deletions model/chunks/chunkFaults.go
Expand Up @@ -79,6 +79,35 @@ func NewCFNonMatchingFinalState(expected flow.StateCommitment, computed flow.Sta
execResID: execResID}
}

// CFInvalidEventsCollection is returned when computed events collection hash is different from the chunk's one
type CFInvalidEventsCollection struct {
expected flow.Identifier
computed flow.Identifier
chunkIndex uint64
resultID flow.Identifier
}

func NewCFInvalidEventsCollection(expected flow.Identifier, computed flow.Identifier, chInx uint64, execResID flow.Identifier) *CFInvalidEventsCollection {
return &CFInvalidEventsCollection{
expected: expected,
computed: computed,
chunkIndex: chInx,
resultID: execResID,
}
}

func (c *CFInvalidEventsCollection) ChunkIndex() uint64 {
return c.chunkIndex
}

func (c *CFInvalidEventsCollection) ExecutionResultID() flow.Identifier {
return c.resultID
}

func (c *CFInvalidEventsCollection) String() string {
return fmt.Sprintf("events collection hash differs, got %x expected %x for chunk %d with result ID %s", c.computed, c.expected, c.chunkIndex, c.resultID)
}

// CFInvalidVerifiableChunk is returned when a verifiable chunk is invalid
// this includes cases that code fails to construct a partial trie,
// collection hashes doesn't match
Expand Down
12 changes: 12 additions & 0 deletions module/chunks/chunkVerifier.go
Expand Up @@ -100,6 +100,8 @@ func (fcv *ChunkVerifier) verifyTransactionsInContext(context fvm.Context, chunk
return nil, nil, fmt.Errorf("missing chunk data pack")
}

events := make(flow.EventsList, 0)

// constructing a partial trie given chunk data package
psmt, err := partial.NewLedger(chunkDataPack.Proof, ledger.State(chunkDataPack.StartState), partial.DefaultPathFinderVersion)

Expand Down Expand Up @@ -166,6 +168,8 @@ func (fcv *ChunkVerifier) verifyTransactionsInContext(context fvm.Context, chunk
return nil, nil, fmt.Errorf("failed to execute transaction: %d (%w)", i, err)
}

events = append(events, tx.Events...)

// always merge back the tx view (fvm is responsible for changes on tx errors)
err = chunkView.MergeView(txView)
if err != nil {
Expand All @@ -182,6 +186,14 @@ func (fcv *ChunkVerifier) verifyTransactionsInContext(context fvm.Context, chunk
return nil, chmodels.NewCFMissingRegisterTouch(missingRegs, chIndex, execResID), nil
}

eventsHash, err := events.Hash()
if err != nil {
return nil, nil, fmt.Errorf("cannot calculate events collection hash: %w", err)
}
if chunk.EventCollection != eventsHash {
return nil, chmodels.NewCFInvalidEventsCollection(chunk.EventCollection, eventsHash, chIndex, execResID), nil
}

// applying chunk delta (register updates at chunk level) to the partial trie
// this returns the expected end state commitment after updates and the list of
// register keys that was not provided by the chunk data package (err).
Expand Down
75 changes: 66 additions & 9 deletions module/chunks/chunkVerifier_test.go
Expand Up @@ -27,6 +27,23 @@ import (
"github.com/onflow/flow-go/utils/unittest"
)

var eventsList = flow.EventsList{
{
Type: "event.someType",
TransactionID: flow.Identifier{2, 3, 2, 3},
TransactionIndex: 1,
EventIndex: 2,
Payload: []byte{7, 3, 1, 2},
},
{
Type: "event.otherType",
TransactionID: flow.Identifier{3, 3, 3},
TransactionIndex: 4,
EventIndex: 4,
Payload: []byte{7, 3, 1, 2},
},
}

type ChunkVerifierTestSuite struct {
suite.Suite
verifier *chunks.ChunkVerifier
Expand All @@ -51,7 +68,7 @@ func TestChunkVerifier(t *testing.T) {

// TestHappyPath tests verification of the baseline verifiable chunk
func (s *ChunkVerifierTestSuite) TestHappyPath() {
vch := GetBaselineVerifiableChunk(s.T(), []byte{})
vch := GetBaselineVerifiableChunk(s.T(), "")
assert.NotNil(s.T(), vch)
spockSecret, chFaults, err := s.verifier.Verify(vch)
assert.Nil(s.T(), err)
Expand All @@ -63,7 +80,7 @@ func (s *ChunkVerifierTestSuite) TestHappyPath() {
func (s *ChunkVerifierTestSuite) TestMissingRegisterTouchForUpdate() {
s.T().Skip("Check new partial ledger for missing keys")

vch := GetBaselineVerifiableChunk(s.T(), []byte(""))
vch := GetBaselineVerifiableChunk(s.T(), "")
assert.NotNil(s.T(), vch)
// remove the second register touch
//vch.ChunkDataPack.RegisterTouches = vch.ChunkDataPack.RegisterTouches[:1]
Expand All @@ -78,7 +95,7 @@ func (s *ChunkVerifierTestSuite) TestMissingRegisterTouchForUpdate() {
// TestMissingRegisterTouchForRead tests verification given a chunkdatapack missing a register touch (read)
func (s *ChunkVerifierTestSuite) TestMissingRegisterTouchForRead() {
s.T().Skip("Check new partial ledger for missing keys")
vch := GetBaselineVerifiableChunk(s.T(), []byte(""))
vch := GetBaselineVerifiableChunk(s.T(), "")
assert.NotNil(s.T(), vch)
// remove the second register touch
//vch.ChunkDataPack.RegisterTouches = vch.ChunkDataPack.RegisterTouches[1:]
Expand All @@ -94,7 +111,7 @@ func (s *ChunkVerifierTestSuite) TestMissingRegisterTouchForRead() {
// the state commitment computed after updating the partial trie
// doesn't match the one provided by the chunks
func (s *ChunkVerifierTestSuite) TestWrongEndState() {
vch := GetBaselineVerifiableChunk(s.T(), []byte("wrongEndState"))
vch := GetBaselineVerifiableChunk(s.T(), "wrongEndState")
assert.NotNil(s.T(), vch)
spockSecret, chFaults, err := s.verifier.Verify(vch)
assert.Nil(s.T(), err)
Expand All @@ -108,14 +125,25 @@ func (s *ChunkVerifierTestSuite) TestWrongEndState() {
// of failed transaction. if a transaction fails, it should
// still change the state commitment.
func (s *ChunkVerifierTestSuite) TestFailedTx() {
vch := GetBaselineVerifiableChunk(s.T(), []byte("failedTx"))
vch := GetBaselineVerifiableChunk(s.T(), "failedTx")
assert.NotNil(s.T(), vch)
spockSecret, chFaults, err := s.verifier.Verify(vch)
assert.Nil(s.T(), err)
assert.Nil(s.T(), chFaults)
assert.NotNil(s.T(), spockSecret)
}

// TestEventsMismatch tests verification behavior in case
// of emitted events not matching chunks
func (s *ChunkVerifierTestSuite) TestEventsMismatch() {
vch := GetBaselineVerifiableChunk(s.T(), "eventsMismatch")
assert.NotNil(s.T(), vch)
_, chFault, err := s.verifier.Verify(vch)
assert.Nil(s.T(), err)
assert.NotNil(s.T(), chFault)
assert.IsType(s.T(), &chunksmodels.CFInvalidEventsCollection{}, chFault)
}

// TestVerifyWrongChunkType evaluates that following invocations return an error:
// - verifying a system chunk with Verify method.
// - verifying a non-system chunk with SystemChunkVerify method.
Expand All @@ -140,11 +168,13 @@ func (s *ChunkVerifierTestSuite) TestVerifyWrongChunkType() {
// TestEmptyCollection tests verification behaviour if a
// collection doesn't have any transaction.
func (s *ChunkVerifierTestSuite) TestEmptyCollection() {
vch := GetBaselineVerifiableChunk(s.T(), []byte{})
vch := GetBaselineVerifiableChunk(s.T(), "")
assert.NotNil(s.T(), vch)
col := unittest.CollectionFixture(0)
vch.Collection = &col
vch.EndState = vch.ChunkDataPack.StartState
emptyListHash, _ := flow.EventsList{}.Hash()
vch.Chunk.EventCollection = emptyListHash //empty collection emits no events
spockSecret, chFaults, err := s.verifier.Verify(vch)
assert.Nil(s.T(), err)
assert.Nil(s.T(), chFaults)
Expand All @@ -154,12 +184,14 @@ func (s *ChunkVerifierTestSuite) TestEmptyCollection() {
// GetBaselineVerifiableChunk returns a verifiable chunk and sets the script
// of a transaction in the middle of the collection to some value to signal the
// mocked vm on what to return as tx exec outcome.
func GetBaselineVerifiableChunk(t *testing.T, script []byte) *verification.VerifiableChunkData {
func GetBaselineVerifiableChunk(t *testing.T, script string) *verification.VerifiableChunkData {

// Collection setup

coll := unittest.CollectionFixture(5)
coll.Transactions[3] = &flow.TransactionBody{Script: script}
collectionSize := 5
magicTxIndex := 3
coll := unittest.CollectionFixture(collectionSize)
coll.Transactions[magicTxIndex] = &flow.TransactionBody{Script: []byte(script)}

guarantee := coll.Guarantee()

Expand Down Expand Up @@ -228,12 +260,27 @@ func GetBaselineVerifiableChunk(t *testing.T, script []byte) *verification.Verif
endState, err := f.Set(update)
require.NoError(t, err)

// events
chunkEvents := make(flow.EventsList, 0)
for i := 0; i < collectionSize; i++ {
if i == magicTxIndex {
switch script {
case "failedTx":
continue
}
}
chunkEvents = append(chunkEvents, eventsList...)
}
eventsListHash, err := chunkEvents.Hash()
require.NoError(t, err)

// Chunk setup
chunk := flow.Chunk{
ChunkBody: flow.ChunkBody{
CollectionIndex: 0,
StartState: flow.StateCommitment(startState),
BlockID: blockID,
EventCollection: eventsListHash,
},
Index: 0,
}
Expand Down Expand Up @@ -276,15 +323,25 @@ func (vm *vmMock) Run(ctx fvm.Context, proc fvm.Procedure, led state.View, progr
// add updates to the ledger
_ = led.Set("00", "", "", []byte{'F'})
tx.Logs = []string{"log1", "log2"}
tx.Events = eventsList
case "failedTx":
// add updates to the ledger
_ = led.Set("05", "", "", []byte{'B'})
tx.Err = &fvmErrors.CadenceRuntimeError{} // inside the runtime (e.g. div by zero, access account)
case "eventsMismatch":
tx.Events = append(eventsList, flow.Event{
Type: "event.Extra",
TransactionID: flow.Identifier{2, 3},
TransactionIndex: 0,
EventIndex: 0,
Payload: []byte{88},
})
default:
_, _ = led.Get("00", "", "")
_, _ = led.Get("05", "", "")
_ = led.Set("05", "", "", []byte{'B'})
tx.Logs = []string{"log1", "log2"}
tx.Events = eventsList
}

return nil
Expand Down

0 comments on commit 969737e

Please sign in to comment.