Skip to content

Commit

Permalink
Add fuzz tests for hollow account completion (#4855)
Browse files Browse the repository at this point in the history
Signed-off-by: Stoyan Stoyanov <stoyan.stoyanov@limechain.tech>
Signed-off-by: Miroslav Gatsanoga <miroslav.gatsanoga@limechain.tech>
Co-authored-by: Miroslav Gatsanoga <miroslav.gatsanoga@limechain.tech>
  • Loading branch information
stoyan-lime and MiroslavGatsanoga committed May 5, 2023
1 parent b45b422 commit bd3e30e
Show file tree
Hide file tree
Showing 14 changed files with 630 additions and 5 deletions.
5 changes: 4 additions & 1 deletion hedera-node/test-clients/src/eet/java/EndToEndTests.java
Expand Up @@ -25,6 +25,7 @@
import com.hedera.services.bdd.suites.records.FileRecordsSanityCheckSuite;
import com.hedera.services.bdd.suites.records.RecordCreationSuite;
import com.hedera.services.bdd.suites.regression.AddressAliasIdFuzzing;
import com.hedera.services.bdd.suites.regression.HollowAccountCompletionFuzzing;
import com.hedera.services.bdd.suites.regression.TargetNetworkPrep;
import com.hedera.services.bdd.suites.regression.UmbrellaRedux;
import com.hedera.services.bdd.suites.schedule.ScheduleCreateSpecs;
Expand Down Expand Up @@ -433,7 +434,9 @@ Collection<DynamicContainer> regression() {
return List.of(
// extractSpecsFromSuite(SplittingThrottlesWorks::new),
// extractSpecsFromSuite(SteadyStateThrottlingCheck::new),
extractSpecsFromSuite(UmbrellaRedux::new), extractSpecsFromSuite(AddressAliasIdFuzzing::new));
extractSpecsFromSuite(UmbrellaRedux::new),
extractSpecsFromSuite(AddressAliasIdFuzzing::new),
extractSpecsFromSuite(HollowAccountCompletionFuzzing::new));
}

@Tag("throttling")
Expand Down
Expand Up @@ -127,11 +127,23 @@ private void configureDefaults(HapiSpecOperation op) {
if (shouldAlwaysDefer && isTxnOp) {
((HapiTxnOp) op).deferStatusResolution();
}

// if we have not set the payer when we created the operation e.g. `RandomAccountDeletion`, we should default to
// `UNIQUE_PAYER_ACCOUNT`
// for the cases when we have set the payer e.g. `RandomOperationSignedBy`, do not overwrite it
if (op.getPayer().isEmpty()) {
if (isTxnOp) {
((HapiTxnOp) op).payingWith(UNIQUE_PAYER_ACCOUNT).fee(TRANSACTION_FEE);
} else if (isQueryOp(op)) {
((HapiQueryOp) op).payingWith(UNIQUE_PAYER_ACCOUNT);
}
}

if (!shouldLogNormalFlow) {
if (isTxnOp) {
((HapiTxnOp) op).noLogging().payingWith(UNIQUE_PAYER_ACCOUNT).fee(TRANSACTION_FEE);
((HapiTxnOp) op).noLogging();
} else if (isQueryOp(op)) {
((HapiQueryOp) op).noLogging().payingWith(UNIQUE_PAYER_ACCOUNT);
((HapiQueryOp) op).noLogging();
}
}
}
Expand Down
@@ -0,0 +1,38 @@
/*
* Copyright (C) 2023 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow;

import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall;
import static com.hedera.services.bdd.suites.regression.factories.AccountCompletionFuzzingFactory.CONTRACT;

import com.hedera.services.bdd.spec.infrastructure.HapiSpecRegistry;
import com.hedera.services.bdd.spec.infrastructure.providers.names.RegistrySourcedNameProvider;
import com.hedera.services.bdd.spec.transactions.HapiTxnOp;
import com.hedera.services.bdd.spec.transactions.contract.HapiContractCall;
import com.hederahashgraph.api.proto.java.AccountID;

public class RandomContractCallSignedBy extends RandomOperationSignedBy<HapiContractCall> {

public RandomContractCallSignedBy(HapiSpecRegistry registry, RegistrySourcedNameProvider<AccountID> accounts) {
super(registry, accounts);
}

@Override
protected HapiTxnOp<HapiContractCall> hapiTxnOp(String keyName) {
return contractCall(CONTRACT);
}
}
@@ -0,0 +1,38 @@
/*
* Copyright (C) 2023 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow;

import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate;
import static com.hedera.services.bdd.suites.regression.factories.AccountCompletionFuzzingFactory.CONTRACT;

import com.hedera.services.bdd.spec.infrastructure.HapiSpecRegistry;
import com.hedera.services.bdd.spec.infrastructure.providers.names.RegistrySourcedNameProvider;
import com.hedera.services.bdd.spec.transactions.HapiTxnOp;
import com.hedera.services.bdd.spec.transactions.contract.HapiContractCreate;
import com.hederahashgraph.api.proto.java.AccountID;

public class RandomContractCreateSignedBy extends RandomOperationSignedBy<HapiContractCreate> {

public RandomContractCreateSignedBy(HapiSpecRegistry registry, RegistrySourcedNameProvider<AccountID> accounts) {
super(registry, accounts);
}

@Override
protected HapiTxnOp<HapiContractCreate> hapiTxnOp(String keyName) {
return contractCreate(CONTRACT);
}
}
@@ -0,0 +1,112 @@
/*
* Copyright (C) 2023 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow;

import static com.hedera.node.app.service.evm.utils.EthSigsUtils.recoverAddressFromPubKey;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord;
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer;
import static com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoTransfer.tinyBarsFromTo;
import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext;
import static com.hedera.services.bdd.suites.HapiSuite.GENESIS;
import static com.hedera.services.bdd.suites.HapiSuite.ONE_HUNDRED_HBARS;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_DELETED;

import com.google.protobuf.ByteString;
import com.hedera.services.bdd.spec.HapiSpecOperation;
import com.hedera.services.bdd.spec.infrastructure.HapiSpecRegistry;
import com.hedera.services.bdd.spec.infrastructure.OpProvider;
import com.hedera.services.bdd.spec.infrastructure.providers.names.RegistrySourcedNameProvider;
import com.hedera.services.bdd.spec.queries.meta.HapiGetTxnRecord;
import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.Key;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicLong;

public class RandomHollowAccount implements OpProvider {
// Added to hollow account names to differentiate them from the keys created for them
public static final String ACCOUNT_SUFFIX = "#";
public static final String KEY_PREFIX = "Fuzz#";
public static final int DEFAULT_CEILING_NUM = 100;
public static final long INITIAL_BALANCE = 1_000_000_000L;
public static final String LAZY_CREATE = "LAZY_CREATE";
private int ceilingNum = DEFAULT_CEILING_NUM;
private final HapiSpecRegistry registry;

private final RegistrySourcedNameProvider<Key> keys;
private final RegistrySourcedNameProvider<AccountID> accounts;

private final AtomicLong lazyCreateNum = new AtomicLong(0L);

public RandomHollowAccount(
HapiSpecRegistry registry,
RegistrySourcedNameProvider<Key> keys,
RegistrySourcedNameProvider<AccountID> accounts) {
this.registry = registry;
this.keys = keys;
this.accounts = accounts;
}

public RandomHollowAccount ceiling(int n) {
ceilingNum = n;
return this;
}

@Override
public Optional<HapiSpecOperation> get() {
// doubling ceilingNum as keys are also saved in accounts registry when account is created
if (accounts.numPresent() >= ceilingNum * 2) {
return Optional.empty();
}

return randomKey().map(this::generateHollowAccount);
}

private Optional<String> randomKey() {
return keys.getQualifying()
.filter(k -> !k.endsWith(ACCOUNT_SUFFIX))
.filter(k -> k.startsWith(KEY_PREFIX))
.filter(k -> !registry.hasAccountId(k + ACCOUNT_SUFFIX));
}

private HapiSpecOperation generateHollowAccount(String keyName) {
return withOpContext((spec, opLog) -> {
final var evmAddress = getEvmAddress(keyName);
final var currentLazyCreateNum = lazyCreateNum.getAndIncrement();
final var txnName = LAZY_CREATE + currentLazyCreateNum;
final var op = cryptoTransfer(tinyBarsFromTo(GENESIS, evmAddress, ONE_HUNDRED_HBARS))
.hasKnownStatusFrom(standardOutcomesAnd(ACCOUNT_DELETED))
.via(txnName);

final HapiGetTxnRecord hapiGetTxnRecord =
getTxnRecord(txnName).andAllChildRecords().assertingNothingAboutHashes();

allRunFor(spec, op, hapiGetTxnRecord);

if (!hapiGetTxnRecord.getChildRecords().isEmpty()) {
final AccountID newAccountID =
hapiGetTxnRecord.getChildRecord(0).getReceipt().getAccountID();
spec.registry().saveAccountId(keyName + ACCOUNT_SUFFIX, newAccountID);
}
});
}

private ByteString getEvmAddress(String keyName) {
final var ecdsaKey = this.registry.getKey(keyName).getECDSASecp256K1().toByteArray();
return ByteString.copyFrom(recoverAddressFromPubKey(ecdsaKey));
}
}
@@ -0,0 +1,54 @@
/*
* Copyright (C) 2023 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow;

import static com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow.RandomHollowAccount.ACCOUNT_SUFFIX;
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoDelete;
import static com.hedera.services.bdd.suites.crypto.AutoAccountCreationSuite.CRYPTO_TRANSFER_RECEIVER;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_DELETED;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ACCOUNT_ID;

import com.hedera.services.bdd.spec.HapiSpecOperation;
import com.hedera.services.bdd.spec.infrastructure.OpProvider;
import com.hedera.services.bdd.spec.infrastructure.providers.names.RegistrySourcedNameProvider;
import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.ResponseCodeEnum;
import java.util.Optional;

public class RandomHollowAccountDeletion implements OpProvider {
private final RegistrySourcedNameProvider<AccountID> accounts;
private final ResponseCodeEnum[] permissiblePrechecks = standardPrechecksAnd(ACCOUNT_DELETED, INVALID_ACCOUNT_ID);
private final ResponseCodeEnum[] permissibleOutcomes = standardOutcomesAnd(ACCOUNT_DELETED, INVALID_ACCOUNT_ID);

public RandomHollowAccountDeletion(RegistrySourcedNameProvider<AccountID> accounts) {
this.accounts = accounts;
}

@Override
public Optional<HapiSpecOperation> get() {
return accounts.getQualifying().filter(a -> a.endsWith(ACCOUNT_SUFFIX)).map(this::accountDeleteOp);
}

private HapiSpecOperation accountDeleteOp(String account) {
return cryptoDelete(account)
.purging()
.transfer(CRYPTO_TRANSFER_RECEIVER)
.hasPrecheckFrom(permissiblePrechecks)
.hasKnownStatusFrom(permissibleOutcomes)
.noLogging();
}
}
@@ -0,0 +1,80 @@
/*
* Copyright (C) 2023 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow;

import static com.hedera.services.bdd.spec.infrastructure.providers.ops.hollow.RandomHollowAccount.ACCOUNT_SUFFIX;
import static com.hedera.services.bdd.spec.keys.TrieSigMapGenerator.uniqueWithFullPrefixesFor;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_DELETED;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.PAYER_ACCOUNT_DELETED;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.PAYER_ACCOUNT_NOT_FOUND;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT;

import com.hedera.services.bdd.spec.HapiSpecOperation;
import com.hedera.services.bdd.spec.infrastructure.HapiSpecRegistry;
import com.hedera.services.bdd.spec.infrastructure.OpProvider;
import com.hedera.services.bdd.spec.infrastructure.providers.names.RegistrySourcedNameProvider;
import com.hedera.services.bdd.spec.transactions.HapiTxnOp;
import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.ResponseCodeEnum;
import java.util.Optional;

/**
* Operation getting a random account key and signing transaction with it, completing hollow accounts in the process.
*/
abstract class RandomOperationSignedBy<T extends HapiTxnOp<T>> implements OpProvider {
private final HapiSpecRegistry registry;

private final RegistrySourcedNameProvider<AccountID> accounts;

private final ResponseCodeEnum[] permissiblePrechecks =
standardPrechecksAnd(PAYER_ACCOUNT_NOT_FOUND, ACCOUNT_DELETED);
private final ResponseCodeEnum[] permissibleOutcomes =
standardOutcomesAnd(TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT, ACCOUNT_DELETED, PAYER_ACCOUNT_DELETED);

protected RandomOperationSignedBy(HapiSpecRegistry registry, RegistrySourcedNameProvider<AccountID> accounts) {
this.registry = registry;
this.accounts = accounts;
}

@Override
public Optional<HapiSpecOperation> get() {
return randomHollowAccountKey().map(this::generateOpSignedBy);
}

private Optional<String> randomHollowAccountKey() {
return accounts.getQualifying().filter(a -> a.endsWith(ACCOUNT_SUFFIX)).map(this::keyFromAccount);
}

private String keyFromAccount(String account) {
final var key = account.replaceAll(ACCOUNT_SUFFIX + "$", "");
final AccountID fromAccount = registry.getAccountID(account);
registry.saveAccountId(key, fromAccount);
registry.saveKey(account, registry.getKey(key)); // needed for HapiTokenAssociate.defaultSigners()
return key;
}

private HapiSpecOperation generateOpSignedBy(String keyName) {
return hapiTxnOp(keyName)
.payingWith(keyName)
.sigMapPrefixes(uniqueWithFullPrefixesFor(keyName))
.hasPrecheckFrom(permissiblePrechecks)
.hasKnownStatusFrom(permissibleOutcomes)
.noLogging();
}

protected abstract HapiTxnOp<T> hapiTxnOp(String keyName);
}

0 comments on commit bd3e30e

Please sign in to comment.