Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add fuzz tests for hollow account completion #4855

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
eed52ff
test: add fuzz tests for hollow account completion
stoyan-lime Feb 2, 2023
d554360
fix: code smells
stoyan-lime Feb 6, 2023
15b0be6
fix: move random operations signed by hollow account
stoyan-lime Feb 16, 2023
39f2840
feat: add deletion
stoyan-lime Feb 16, 2023
95ffa95
fix: create dedicated account deletion for hollow fuzz tests
stoyan-lime Feb 21, 2023
43d3e2a
docs: added docs
stoyan-lime Feb 21, 2023
8fc4b86
refactor: rename classes and add more docs and comments
stoyan-lime Feb 22, 2023
c07e039
Merge branch 'develop' into 04633-add-fuzzing-tests-for-hollow-accoun…
stoyan-lime Feb 22, 2023
ee8a20b
refactor: move duplicate txn details to RandomOperationSignedBy
stoyan-lime Feb 22, 2023
27dc8fe
refactor: remove wildcard type
stoyan-lime Feb 22, 2023
4e984ae
refactor: add generic type to RandomOperationSignedBy
stoyan-lime Feb 22, 2023
9d2aa67
Merge branch 'develop' into 04633-add-fuzzing-tests-for-hollow-accoun…
stoyan-lime Feb 28, 2023
7f65c0d
Merge branch 'develop' into 04633-add-fuzzing-tests-for-hollow-accoun…
stoyan-lime Mar 16, 2023
7ab95b9
Merge branch 'develop' into 04633-add-fuzzing-tests-for-hollow-accoun…
stoyan-lime Apr 13, 2023
f4dff20
Merge branch 'develop' into 04633-add-fuzzing-tests-for-hollow-accoun…
MiroslavGatsanoga Apr 19, 2023
19f8fc2
Merge branch 'develop' into 04633-add-fuzzing-tests-for-hollow-accoun…
MiroslavGatsanoga Apr 20, 2023
95e2e8b
Replace RandomHollowAccount with TransferToRandomEVMAddress
MiroslavGatsanoga Apr 26, 2023
72c3269
Generate random keys during initialization
MiroslavGatsanoga Apr 28, 2023
5999b94
Update BiasedDelegatingProvider.configureDefaults to not overwrite payer
MiroslavGatsanoga Apr 28, 2023
814aa0e
Update RandomHollow account op generation
MiroslavGatsanoga Apr 28, 2023
8cf2590
Update RandomTokenAssociate to work with account deletions
MiroslavGatsanoga Apr 28, 2023
13d8b22
Update properties file
MiroslavGatsanoga Apr 28, 2023
47730ca
Update RandomOperationSignedBy to work with account deletions
MiroslavGatsanoga Apr 28, 2023
08d1e73
Add HollowAccountCompletionFuzzing to eet
MiroslavGatsanoga Apr 28, 2023
9b476e2
Fix code smell
MiroslavGatsanoga May 2, 2023
aa3b42d
Add comment for logic for setting default payer for ops
MiroslavGatsanoga May 3, 2023
a15bfbc
Address possible race condition in RandomHollowAccount
MiroslavGatsanoga May 3, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
5 changes: 4 additions & 1 deletion hedera-node/test-clients/src/eet/java/EndToEndTests.java
Expand Up @@ -25,6 +25,7 @@
import com.hedera.services.bdd.suites.records.FileRecordsSanityCheckSuite;
import com.hedera.services.bdd.suites.records.RecordCreationSuite;
import com.hedera.services.bdd.suites.regression.AddressAliasIdFuzzing;
import com.hedera.services.bdd.suites.regression.HollowAccountCompletionFuzzing;
import com.hedera.services.bdd.suites.regression.TargetNetworkPrep;
import com.hedera.services.bdd.suites.regression.UmbrellaRedux;
import com.hedera.services.bdd.suites.schedule.ScheduleCreateSpecs;
Expand Down Expand Up @@ -433,7 +434,9 @@ Collection<DynamicContainer> regression() {
return List.of(
// extractSpecsFromSuite(SplittingThrottlesWorks::new),
// extractSpecsFromSuite(SteadyStateThrottlingCheck::new),
extractSpecsFromSuite(UmbrellaRedux::new), extractSpecsFromSuite(AddressAliasIdFuzzing::new));
extractSpecsFromSuite(UmbrellaRedux::new),
extractSpecsFromSuite(AddressAliasIdFuzzing::new),
extractSpecsFromSuite(HollowAccountCompletionFuzzing::new));
}

@Tag("throttling")
Expand Down
Expand Up @@ -127,11 +127,23 @@ private void configureDefaults(HapiSpecOperation op) {
if (shouldAlwaysDefer && isTxnOp) {
((HapiTxnOp) op).deferStatusResolution();
}

// if we have not set the payer when we created the operation e.g. `RandomAccountDeletion`, we should default to
// `UNIQUE_PAYER_ACCOUNT`
// for the cases when we have set the payer e.g. `RandomOperationSignedBy`, do not overwrite it
if (op.getPayer().isEmpty()) {
MiroslavGatsanoga marked this conversation as resolved.
Show resolved Hide resolved
if (isTxnOp) {
((HapiTxnOp) op).payingWith(UNIQUE_PAYER_ACCOUNT).fee(TRANSACTION_FEE);
} else if (isQueryOp(op)) {
((HapiQueryOp) op).payingWith(UNIQUE_PAYER_ACCOUNT);
}
}

if (!shouldLogNormalFlow) {
if (isTxnOp) {
((HapiTxnOp) op).noLogging().payingWith(UNIQUE_PAYER_ACCOUNT).fee(TRANSACTION_FEE);
((HapiTxnOp) op).noLogging();
} else if (isQueryOp(op)) {
((HapiQueryOp) op).noLogging().payingWith(UNIQUE_PAYER_ACCOUNT);
((HapiQueryOp) op).noLogging();
}
}
}
Expand Down
@@ -0,0 +1,38 @@
/*
* Copyright (C) 2023 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow;

import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall;
import static com.hedera.services.bdd.suites.regression.factories.AccountCompletionFuzzingFactory.CONTRACT;

import com.hedera.services.bdd.spec.infrastructure.HapiSpecRegistry;
import com.hedera.services.bdd.spec.infrastructure.providers.names.RegistrySourcedNameProvider;
import com.hedera.services.bdd.spec.transactions.HapiTxnOp;
import com.hedera.services.bdd.spec.transactions.contract.HapiContractCall;
import com.hederahashgraph.api.proto.java.AccountID;

public class RandomContractCallSignedBy extends RandomOperationSignedBy<HapiContractCall> {

public RandomContractCallSignedBy(HapiSpecRegistry registry, RegistrySourcedNameProvider<AccountID> accounts) {
super(registry, accounts);
}

@Override
protected HapiTxnOp<HapiContractCall> hapiTxnOp(String keyName) {
return contractCall(CONTRACT);
}
}
@@ -0,0 +1,38 @@
/*
* Copyright (C) 2023 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow;

import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate;
import static com.hedera.services.bdd.suites.regression.factories.AccountCompletionFuzzingFactory.CONTRACT;

import com.hedera.services.bdd.spec.infrastructure.HapiSpecRegistry;
import com.hedera.services.bdd.spec.infrastructure.providers.names.RegistrySourcedNameProvider;
import com.hedera.services.bdd.spec.transactions.HapiTxnOp;
import com.hedera.services.bdd.spec.transactions.contract.HapiContractCreate;
import com.hederahashgraph.api.proto.java.AccountID;

public class RandomContractCreateSignedBy extends RandomOperationSignedBy<HapiContractCreate> {

public RandomContractCreateSignedBy(HapiSpecRegistry registry, RegistrySourcedNameProvider<AccountID> accounts) {
super(registry, accounts);
}

@Override
protected HapiTxnOp<HapiContractCreate> hapiTxnOp(String keyName) {
return contractCreate(CONTRACT);
}
}
@@ -0,0 +1,112 @@
/*
* Copyright (C) 2023 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow;

import static com.hedera.node.app.service.evm.utils.EthSigsUtils.recoverAddressFromPubKey;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord;
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer;
import static com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoTransfer.tinyBarsFromTo;
import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext;
import static com.hedera.services.bdd.suites.HapiSuite.GENESIS;
import static com.hedera.services.bdd.suites.HapiSuite.ONE_HUNDRED_HBARS;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_DELETED;

import com.google.protobuf.ByteString;
import com.hedera.services.bdd.spec.HapiSpecOperation;
import com.hedera.services.bdd.spec.infrastructure.HapiSpecRegistry;
import com.hedera.services.bdd.spec.infrastructure.OpProvider;
import com.hedera.services.bdd.spec.infrastructure.providers.names.RegistrySourcedNameProvider;
import com.hedera.services.bdd.spec.queries.meta.HapiGetTxnRecord;
import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.Key;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicLong;

public class RandomHollowAccount implements OpProvider {
// Added to hollow account names to differentiate them from the keys created for them
public static final String ACCOUNT_SUFFIX = "#";
public static final String KEY_PREFIX = "Fuzz#";
public static final int DEFAULT_CEILING_NUM = 100;
public static final long INITIAL_BALANCE = 1_000_000_000L;
public static final String LAZY_CREATE = "LAZY_CREATE";
private int ceilingNum = DEFAULT_CEILING_NUM;
private final HapiSpecRegistry registry;

private final RegistrySourcedNameProvider<Key> keys;
private final RegistrySourcedNameProvider<AccountID> accounts;

private final AtomicLong lazyCreateNum = new AtomicLong(0L);

public RandomHollowAccount(
HapiSpecRegistry registry,
RegistrySourcedNameProvider<Key> keys,
RegistrySourcedNameProvider<AccountID> accounts) {
this.registry = registry;
this.keys = keys;
this.accounts = accounts;
}

public RandomHollowAccount ceiling(int n) {
ceilingNum = n;
return this;
}

@Override
public Optional<HapiSpecOperation> get() {
// doubling ceilingNum as keys are also saved in accounts registry when account is created
if (accounts.numPresent() >= ceilingNum * 2) {
return Optional.empty();
}

return randomKey().map(this::generateHollowAccount);
}

private Optional<String> randomKey() {
return keys.getQualifying()
.filter(k -> !k.endsWith(ACCOUNT_SUFFIX))
.filter(k -> k.startsWith(KEY_PREFIX))
.filter(k -> !registry.hasAccountId(k + ACCOUNT_SUFFIX));
}

private HapiSpecOperation generateHollowAccount(String keyName) {
return withOpContext((spec, opLog) -> {
final var evmAddress = getEvmAddress(keyName);
final var currentLazyCreateNum = lazyCreateNum.getAndIncrement();
final var txnName = LAZY_CREATE + currentLazyCreateNum;
final var op = cryptoTransfer(tinyBarsFromTo(GENESIS, evmAddress, ONE_HUNDRED_HBARS))
.hasKnownStatusFrom(standardOutcomesAnd(ACCOUNT_DELETED))
.via(txnName);

final HapiGetTxnRecord hapiGetTxnRecord =
getTxnRecord(txnName).andAllChildRecords().assertingNothingAboutHashes();

allRunFor(spec, op, hapiGetTxnRecord);

if (!hapiGetTxnRecord.getChildRecords().isEmpty()) {
final AccountID newAccountID =
hapiGetTxnRecord.getChildRecord(0).getReceipt().getAccountID();
spec.registry().saveAccountId(keyName + ACCOUNT_SUFFIX, newAccountID);
}
});
}

private ByteString getEvmAddress(String keyName) {
final var ecdsaKey = this.registry.getKey(keyName).getECDSASecp256K1().toByteArray();
return ByteString.copyFrom(recoverAddressFromPubKey(ecdsaKey));
}
}
@@ -0,0 +1,54 @@
/*
* Copyright (C) 2023 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow;

import static com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow.RandomHollowAccount.ACCOUNT_SUFFIX;
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoDelete;
import static com.hedera.services.bdd.suites.crypto.AutoAccountCreationSuite.CRYPTO_TRANSFER_RECEIVER;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_DELETED;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ACCOUNT_ID;

import com.hedera.services.bdd.spec.HapiSpecOperation;
import com.hedera.services.bdd.spec.infrastructure.OpProvider;
import com.hedera.services.bdd.spec.infrastructure.providers.names.RegistrySourcedNameProvider;
import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.ResponseCodeEnum;
import java.util.Optional;

public class RandomHollowAccountDeletion implements OpProvider {
private final RegistrySourcedNameProvider<AccountID> accounts;
private final ResponseCodeEnum[] permissiblePrechecks = standardPrechecksAnd(ACCOUNT_DELETED, INVALID_ACCOUNT_ID);
private final ResponseCodeEnum[] permissibleOutcomes = standardOutcomesAnd(ACCOUNT_DELETED, INVALID_ACCOUNT_ID);

public RandomHollowAccountDeletion(RegistrySourcedNameProvider<AccountID> accounts) {
this.accounts = accounts;
}

@Override
public Optional<HapiSpecOperation> get() {
return accounts.getQualifying().filter(a -> a.endsWith(ACCOUNT_SUFFIX)).map(this::accountDeleteOp);
}

private HapiSpecOperation accountDeleteOp(String account) {
return cryptoDelete(account)
.purging()
.transfer(CRYPTO_TRANSFER_RECEIVER)
.hasPrecheckFrom(permissiblePrechecks)
.hasKnownStatusFrom(permissibleOutcomes)
.noLogging();
}
}
@@ -0,0 +1,80 @@
/*
* Copyright (C) 2023 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow;

import static com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow.RandomHollowAccount.ACCOUNT_SUFFIX;
import static com.hedera.services.bdd.spec.keys.TrieSigMapGenerator.uniqueWithFullPrefixesFor;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_DELETED;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.PAYER_ACCOUNT_DELETED;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.PAYER_ACCOUNT_NOT_FOUND;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT;

import com.hedera.services.bdd.spec.HapiSpecOperation;
import com.hedera.services.bdd.spec.infrastructure.HapiSpecRegistry;
import com.hedera.services.bdd.spec.infrastructure.OpProvider;
import com.hedera.services.bdd.spec.infrastructure.providers.names.RegistrySourcedNameProvider;
import com.hedera.services.bdd.spec.transactions.HapiTxnOp;
import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.ResponseCodeEnum;
import java.util.Optional;

/**
* Operation getting a random account key and signing transaction with it, completing hollow accounts in the process.
*/
abstract class RandomOperationSignedBy<T extends HapiTxnOp<T>> implements OpProvider {
private final HapiSpecRegistry registry;

private final RegistrySourcedNameProvider<AccountID> accounts;

private final ResponseCodeEnum[] permissiblePrechecks =
standardPrechecksAnd(PAYER_ACCOUNT_NOT_FOUND, ACCOUNT_DELETED);
private final ResponseCodeEnum[] permissibleOutcomes =
standardOutcomesAnd(TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT, ACCOUNT_DELETED, PAYER_ACCOUNT_DELETED);

protected RandomOperationSignedBy(HapiSpecRegistry registry, RegistrySourcedNameProvider<AccountID> accounts) {
this.registry = registry;
this.accounts = accounts;
}

@Override
public Optional<HapiSpecOperation> get() {
return randomHollowAccountKey().map(this::generateOpSignedBy);
}

private Optional<String> randomHollowAccountKey() {
return accounts.getQualifying().filter(a -> a.endsWith(ACCOUNT_SUFFIX)).map(this::keyFromAccount);
}

private String keyFromAccount(String account) {
final var key = account.replaceAll(ACCOUNT_SUFFIX + "$", "");
final AccountID fromAccount = registry.getAccountID(account);
registry.saveAccountId(key, fromAccount);
registry.saveKey(account, registry.getKey(key)); // needed for HapiTokenAssociate.defaultSigners()
return key;
}

private HapiSpecOperation generateOpSignedBy(String keyName) {
return hapiTxnOp(keyName)
.payingWith(keyName)
.sigMapPrefixes(uniqueWithFullPrefixesFor(keyName))
.hasPrecheckFrom(permissiblePrechecks)
.hasKnownStatusFrom(permissibleOutcomes)
.noLogging();
}

protected abstract HapiTxnOp<T> hapiTxnOp(String keyName);
}