diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/RecordListBuilder.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/RecordListBuilder.java index 03a55348e36c..beb9e23f4fd2 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/RecordListBuilder.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/RecordListBuilder.java @@ -253,19 +253,21 @@ private SingleTransactionRecordBuilderImpl doAddChild( } // Make sure we have not created so many that we have run out of slots. - final var childCount = childRecordBuilders.size(); - final var consensusConfig = configuration.getConfigData(ConsensusConfig.class); + final int childCount = childRecordBuilders.size(); + final ConsensusConfig consensusConfig = configuration.getConfigData(ConsensusConfig.class); if (childCount >= consensusConfig.handleMaxFollowingRecords()) { throw new HandleException(ResponseCodeEnum.MAX_CHILD_RECORDS_EXCEEDED); } - // The consensus timestamp of the first item in the child list is T+1, where T is the time of the user tx - final var parentConsensusTimestamp = userTxnRecordBuilder.consensusNow(); - final var prevConsensusNow = childRecordBuilders.isEmpty() - ? userTxnRecordBuilder.consensusNow() + // The consensus timestamp of the first item in the child list is T+K (in nanoseconds), + // where T is the time of the user tx and K is the maximum number of "preceding" records + // defined for the current configuration. + final long maxPrecedingRecords = consensusConfig.handleMaxPrecedingRecords(); + final Instant parentConsensusTimestamp = userTxnRecordBuilder.consensusNow(); + final Instant prevConsensusNow = childRecordBuilders.isEmpty() + ? userTxnRecordBuilder.consensusNow().plusNanos(maxPrecedingRecords) : childRecordBuilders.get(childRecordBuilders.size() - 1).consensusNow(); - final var consensusNow = prevConsensusNow.plusNanos(1L); - // Note we do not repeat exchange rates for child transactions + final Instant consensusNow = prevConsensusNow.plusNanos(1L); final var recordBuilder = new SingleTransactionRecordBuilderImpl(consensusNow, reversingBehavior, customizer) .parentConsensus(parentConsensusTimestamp); if (!customizer.shouldSuppressRecord()) { diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/record/RecordListBuilderTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/record/RecordListBuilderTest.java index 46636be70e29..a2453869793f 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/record/RecordListBuilderTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/record/RecordListBuilderTest.java @@ -25,11 +25,13 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.hedera.hapi.node.base.ResponseCodeEnum; +import com.hedera.hapi.node.base.Timestamp; import com.hedera.hapi.node.base.Transaction; import com.hedera.hapi.node.base.TransactionID; import com.hedera.node.app.AppTestBase; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.state.SingleTransactionRecord; +import com.hedera.node.config.data.ConsensusConfig; import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import com.swirlds.config.api.Configuration; import edu.umd.cs.findbugs.annotations.NonNull; @@ -48,6 +50,8 @@ class RecordListBuilderTest extends AppTestBase { .withValue("consensus.message.maxPrecedingRecords", MAX_PRECEDING) .withValue("consensus.message.maxFollowingRecords", MAX_CHILDREN) .getOrCreateConfig(); + private static final int EXPECTED_CHILD_NANO_INCREMENT = + Math.toIntExact(CONFIGURATION.getConfigData(ConsensusConfig.class).handleMaxPrecedingRecords()); @SuppressWarnings("ConstantConditions") @Test @@ -885,10 +889,11 @@ TransactionRecordAssertions nanosBefore(final int nanos, @NonNull final SingleTr } TransactionRecordAssertions nanosAfter(final int nanos, @NonNull final SingleTransactionRecord otherRecord) { - final var otherTimestamp = otherRecord.transactionRecord().consensusTimestampOrThrow(); - final var expectedTimestamp = otherTimestamp + final Timestamp otherTimestamp = otherRecord.transactionRecord().consensusTimestampOrThrow(); + final int actualOffset = EXPECTED_CHILD_NANO_INCREMENT + nanos; + final Timestamp expectedTimestamp = otherTimestamp .copyBuilder() - .nanos(otherTimestamp.nanos() + nanos) + .nanos(otherTimestamp.nanos() + actualOffset) .build(); assertThat(record.transactionRecord().consensusTimestampOrThrow()).isEqualTo(expectedTimestamp); return this; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleExecutionSpecs.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleExecutionSpecs.java index f18640762b98..4066dfd479ce 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleExecutionSpecs.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleExecutionSpecs.java @@ -73,6 +73,7 @@ import static com.hedera.services.bdd.suites.freeze.UpgradeSuite.standardUpdateFile; import static com.hedera.services.bdd.suites.schedule.ScheduleLongTermExecutionSpecs.withAndWithoutLongTermEnabled; import static com.hedera.services.bdd.suites.schedule.ScheduleRecordSpecs.scheduledVersionOf; +import static com.hedera.services.bdd.suites.schedule.ScheduleUtils.WHITELIST_MINIMUM; import static com.hedera.services.bdd.suites.utils.sysfiles.serdes.ThrottleDefsLoader.protoDefsFromResource; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_DELETED; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_FROZEN_FOR_TOKEN; @@ -108,6 +109,8 @@ import com.google.protobuf.ByteString; import com.hedera.hapi.node.base.HederaFunctionality; +import com.hedera.node.config.data.ConsensusConfig; +import com.hedera.services.bdd.junit.HapiTest; import com.hedera.services.bdd.junit.HapiTestSuite; import com.hedera.services.bdd.spec.HapiSpec; import com.hedera.services.bdd.spec.HapiSpecOperation; @@ -119,11 +122,7 @@ import com.hederahashgraph.api.proto.java.AccountID; import com.hederahashgraph.api.proto.java.TokenType; import com.hederahashgraph.api.proto.java.TransactionID; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; +import com.swirlds.test.framework.config.TestConfigBuilder; import java.util.Arrays; import java.util.LinkedList; import java.util.List; @@ -182,10 +181,13 @@ public class ScheduleExecutionSpecs extends HapiSuite { HapiSpecSetup.getDefaultNodeProps().get(SCHEDULING_WHITELIST); /** - * This is ConsensusTimeTracker.MAX_PRECEDING_RECORDS_REMAINING_TXN + 1. It is not guaranteed to be this. If there - * are any following records generated by the txn then it could be different. + * This is matched to ConsensusTimeTracker.MAX_PRECEDING_RECORDS_REMAINING_TXN + 1. + * It is not guaranteed to remain thus. If the configuration changes or there are any + * following records generated by the txn before the scheduled transaction then + * it could be different. */ - private static final long normalTriggeredTxnTimestampOffset = 4; + private final long normalTriggeredTxnTimestampOffset = + getTestConfig(ConsensusConfig.class).handleMaxPrecedingRecords() + 1; private static final String successTxn = "successTxn"; private static final String signTxn = "signTxn"; @@ -208,55 +210,55 @@ public List getSpecsInSuite() { aSuiteSetup(), // Note: Order matters here, e2e tests fail if reordered. // There are odd stateful assumptions throughout these tests. - executionWithDefaultPayerWorks(), - executionWithCustomPayerWorks(), - executionWithCustomPayerWorksWithLastSigBeingCustomPayer(), - executionWithCustomPayerWhoSignsAtCreationAsPayerWorks(), + executionTriggersOnceTopicHasSatisfiedSubmitKey(), + executionTriggersWithWeirdlyRepeatedKey(), + executionWithCryptoInsufficientAccountBalanceFails(), + executionWithCryptoSenderDeletedFails(), executionWithCustomPayerAndAdminKeyWorks(), - executionWithDefaultPayerButNoFundsFails(), + executionWithCustomPayerButAccountDeletedFails(), executionWithCustomPayerButNoFundsFails(), + executionWithCustomPayerWhoSignsAtCreationAsPayerWorks(), + executionWithCustomPayerWorks(), + executionWithCustomPayerWorksWithLastSigBeingCustomPayer(), executionWithDefaultPayerButAccountDeletedFails(), - executionWithCustomPayerButAccountDeletedFails(), + executionWithDefaultPayerButNoFundsFails(), + executionWithDefaultPayerWorks(), executionWithInvalidAccountAmountsFails(), - executionWithCryptoInsufficientAccountBalanceFails(), - executionWithCryptoSenderDeletedFails(), executionWithTokenInsufficientAccountBalanceFails(), - executionTriggersWithWeirdlyRepeatedKey(), - executionTriggersOnceTopicHasSatisfiedSubmitKey(), - scheduledSubmitThatWouldFailWithTopicDeletedCannotBeSigned(), - scheduledSubmitFailedWithInvalidChunkNumberStillPaysServiceFeeButHasNoImpact(), - scheduledSubmitFailedWithInvalidChunkTxnIdStillPaysServiceFeeButHasNoImpact(), - scheduledSubmitThatWouldFailWithInvalidTopicIdCannotBeScheduled(), - scheduledSubmitFailedWithMsgSizeTooLargeStillPaysServiceFeeButHasNoImpact(), - scheduledXferFailingWithEmptyTokenTransferAccountAmountsPaysServiceFeeButNoImpact(), - scheduledXferFailingWithRepeatedTokenIdPaysServiceFeeButNoImpact(), - scheduledXferFailingWithNonNetZeroTokenTransferPaysServiceFeeButNoImpact(), - scheduledXferFailingWithUnassociatedAccountTransferPaysServiceFeeButNoImpact(), - scheduledXferFailingWithNonKycedAccountTransferPaysServiceFeeButNoImpact(), - scheduledXferFailingWithFrozenAccountTransferPaysServiceFeeButNoImpact(), - scheduledXferFailingWithDeletedTokenPaysServiceFeeButNoImpact(), - scheduledXferFailingWithDeletedAccountPaysServiceFeeButNoImpact(), - scheduledMintExecutesProperly(), - scheduledUniqueMintExecutesProperly(), - scheduledUniqueMintFailsWithInvalidBatchSize(), - scheduledUniqueMintFailsWithInvalidMetadata(), - scheduledMintFailsWithInvalidAmount(), - scheduledMintWithInvalidTokenThrowsUnresolvableSigners(), - scheduledMintFailsWithInvalidTxBody(), scheduledBurnExecutesProperly(), - scheduledUniqueBurnExecutesProperly(), - scheduledUniqueBurnFailsWithInvalidBatchSize(), - scheduledUniqueBurnFailsWithInvalidNftId(), + scheduledBurnFailsWithInvalidTxBody(), scheduledBurnForUniqueFailsWithInvalidAmount(), scheduledBurnForUniqueSucceedsWithExistingAmount(), - scheduledBurnFailsWithInvalidTxBody(), - scheduledFreezeWorksAsExpected(), scheduledFreezeWithUnauthorizedPayerFails(isLongTermEnabled), - scheduledPermissionedFileUpdateWorksAsExpected(), + scheduledFreezeWorksAsExpected(), + scheduledMintExecutesProperly(), + scheduledMintFailsWithInvalidAmount(), + scheduledMintFailsWithInvalidTxBody(), + scheduledMintWithInvalidTokenThrowsUnresolvableSigners(), scheduledPermissionedFileUpdateUnauthorizedPayerFails(), - scheduledSystemDeleteWorksAsExpected(), + scheduledPermissionedFileUpdateWorksAsExpected(), + scheduledSubmitFailedWithInvalidChunkNumberStillPaysServiceFeeButHasNoImpact(), + scheduledSubmitFailedWithInvalidChunkTxnIdStillPaysServiceFeeButHasNoImpact(), + scheduledSubmitFailedWithMsgSizeTooLargeStillPaysServiceFeeButHasNoImpact(), + scheduledSubmitThatWouldFailWithInvalidTopicIdCannotBeScheduled(), + scheduledSubmitThatWouldFailWithTopicDeletedCannotBeSigned(), scheduledSystemDeleteUnauthorizedPayerFails(isLongTermEnabled), - // congestionPricingAffectsImmediateScheduleExecution(), + scheduledSystemDeleteWorksAsExpected(), + scheduledUniqueBurnExecutesProperly(), + scheduledUniqueBurnFailsWithInvalidBatchSize(), + scheduledUniqueBurnFailsWithInvalidNftId(), + scheduledUniqueMintExecutesProperly(), + scheduledUniqueMintFailsWithInvalidBatchSize(), + scheduledUniqueMintFailsWithInvalidMetadata(), + scheduledXferFailingWithDeletedAccountPaysServiceFeeButNoImpact(), + scheduledXferFailingWithDeletedTokenPaysServiceFeeButNoImpact(), + scheduledXferFailingWithEmptyTokenTransferAccountAmountsPaysServiceFeeButNoImpact(), + scheduledXferFailingWithFrozenAccountTransferPaysServiceFeeButNoImpact(), + scheduledXferFailingWithNonKycedAccountTransferPaysServiceFeeButNoImpact(), + scheduledXferFailingWithNonNetZeroTokenTransferPaysServiceFeeButNoImpact(), + scheduledXferFailingWithRepeatedTokenIdPaysServiceFeeButNoImpact(), + scheduledXferFailingWithUnassociatedAccountTransferPaysServiceFeeButNoImpact(), + // congestionPricingAffectsImmediateScheduleExecution(), zSuiteCleanup())); } @@ -284,9 +286,11 @@ private HapiSpec aSuiteSetup() { .overridingProps(Map.of(SCHEDULING_WHITELIST, whitelistAll))); } + // @todo('9970') Currently this cannot be run for modular service due to key gathering limitations. private HapiSpec scheduledBurnFailsWithInvalidTxBody() { return defaultHapiSpec("ScheduledBurnFailsWithInvalidTxBody") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(TREASURY), cryptoCreate(SCHEDULE_PAYER), newKeyNamed(SUPPLY_KEY), @@ -306,9 +310,11 @@ private HapiSpec scheduledBurnFailsWithInvalidTxBody() { .hasPriority(recordWith().status(INVALID_TRANSACTION_BODY))); } + // @todo('9970') Currently this cannot be run for modular service due to key gathering limitations. private HapiSpec scheduledMintFailsWithInvalidTxBody() { return defaultHapiSpec("ScheduledMintFailsWithInvalidTxBody") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(TREASURY), cryptoCreate(SCHEDULE_PAYER), newKeyNamed(SUPPLY_KEY), @@ -332,9 +338,10 @@ private HapiSpec scheduledMintFailsWithInvalidTxBody() { getTokenInfo(A_TOKEN).hasTotalSupply(0)); } + @HapiTest private HapiSpec scheduledMintWithInvalidTokenThrowsUnresolvableSigners() { return defaultHapiSpec("ScheduledMintWithInvalidTokenThrowsUnresolvableSigners") - .given(cryptoCreate(SCHEDULE_PAYER)) + .given(overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(SCHEDULE_PAYER)) .when(scheduleCreate( A_SCHEDULE, mintToken("0.0.123231", List.of(ByteString.copyFromUtf8("m1"))) @@ -344,9 +351,11 @@ private HapiSpec scheduledMintWithInvalidTokenThrowsUnresolvableSigners() { .then(); } + @HapiTest private HapiSpec scheduledUniqueBurnFailsWithInvalidBatchSize() { return defaultHapiSpec("ScheduledUniqueBurnFailsWithInvalidBatchSize") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(TREASURY), cryptoCreate(SCHEDULE_PAYER), newKeyNamed(SUPPLY_KEY), @@ -375,9 +384,11 @@ private HapiSpec scheduledUniqueBurnFailsWithInvalidBatchSize() { getTokenInfo(A_TOKEN).hasTotalSupply(1)); } + @HapiTest private HapiSpec scheduledUniqueBurnExecutesProperly() { return defaultHapiSpec("ScheduledUniqueBurnExecutesProperly") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(TREASURY), cryptoCreate(SCHEDULE_PAYER), newKeyNamed(SUPPLY_KEY), @@ -449,9 +460,11 @@ private HapiSpec scheduledUniqueBurnExecutesProperly() { getTokenInfo(A_TOKEN).hasTotalSupply(0)); } + @HapiTest private HapiSpec scheduledUniqueMintFailsWithInvalidMetadata() { return defaultHapiSpec("ScheduledUniqueMintFailsWithInvalidMetadata") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate("payer"), cryptoCreate(TREASURY), cryptoCreate(SCHEDULE_PAYER), @@ -475,9 +488,11 @@ private HapiSpec scheduledUniqueMintFailsWithInvalidMetadata() { getTokenInfo(A_TOKEN).hasTotalSupply(0)); } + @HapiTest private HapiSpec scheduledUniqueBurnFailsWithInvalidNftId() { return defaultHapiSpec("ScheduledUniqueBurnFailsWithInvalidNftId") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(TREASURY), cryptoCreate(SCHEDULE_PAYER), newKeyNamed(SUPPLY_KEY), @@ -497,9 +512,11 @@ private HapiSpec scheduledUniqueBurnFailsWithInvalidNftId() { .hasPriority(recordWith().status(INVALID_NFT_ID))); } + @HapiTest private HapiSpec scheduledBurnForUniqueSucceedsWithExistingAmount() { return defaultHapiSpec("scheduledBurnForUniqueSucceedsWithExistingAmount") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(TREASURY), cryptoCreate(SCHEDULE_PAYER), newKeyNamed(SUPPLY_KEY), @@ -521,9 +538,11 @@ private HapiSpec scheduledBurnForUniqueSucceedsWithExistingAmount() { getTokenInfo(A_TOKEN).hasTotalSupply(0)); } + // @todo('9970') Currently this cannot be run for modular service due to key gathering limitations. private HapiSpec scheduledBurnForUniqueFailsWithInvalidAmount() { return defaultHapiSpec("ScheduledBurnForUniqueFailsWithInvalidAmount") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(TREASURY), cryptoCreate(SCHEDULE_PAYER), newKeyNamed(SUPPLY_KEY), @@ -555,10 +574,12 @@ private byte[] genRandomBytes(int numBytes) { return contents; } + @HapiTest private HapiSpec scheduledUniqueMintFailsWithInvalidBatchSize() { return defaultHapiSpec("ScheduledUniqueMintFailsWithInvalidBatchSize") .given( overriding(TOKENS_NFTS_MAX_BATCH_SIZE_MINT, "5"), + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(TREASURY), cryptoCreate(SCHEDULE_PAYER), newKeyNamed(SUPPLY_KEY), @@ -591,10 +612,12 @@ private HapiSpec scheduledUniqueMintFailsWithInvalidBatchSize() { overriding(TOKENS_NFTS_MAX_BATCH_SIZE_MINT, defaultMaxBatchSizeMint)); } + // @todo('9970') Currently this cannot be run for modular service due to key gathering limitations. private HapiSpec scheduledMintFailsWithInvalidAmount() { final var zeroAmountTxn = "zeroAmountTxn"; return defaultHapiSpec("ScheduledMintFailsWithInvalidAmount") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(TREASURY), cryptoCreate(SCHEDULE_PAYER), newKeyNamed(SUPPLY_KEY), @@ -618,9 +641,11 @@ private HapiSpec scheduledMintFailsWithInvalidAmount() { getTokenInfo(A_TOKEN).hasTotalSupply(101)); } + @HapiTest private HapiSpec scheduledUniqueMintExecutesProperly() { return defaultHapiSpec("ScheduledUniqueMintExecutesProperly") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(TREASURY), cryptoCreate(SCHEDULE_PAYER), newKeyNamed(SUPPLY_KEY), @@ -695,9 +720,11 @@ private HapiSpec scheduledUniqueMintExecutesProperly() { getTokenInfo(A_TOKEN).hasTotalSupply(2)); } + @HapiTest private HapiSpec scheduledMintExecutesProperly() { return defaultHapiSpec("ScheduledMintExecutesProperly") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(TREASURY), cryptoCreate(SCHEDULE_PAYER), newKeyNamed(SUPPLY_KEY), @@ -765,9 +792,11 @@ private HapiSpec scheduledMintExecutesProperly() { getTokenInfo(A_TOKEN).hasTotalSupply(111)); } + @HapiTest private HapiSpec scheduledBurnExecutesProperly() { return defaultHapiSpec("ScheduledBurnExecutesProperly") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(TREASURY), cryptoCreate(SCHEDULE_PAYER), newKeyNamed(SUPPLY_KEY), @@ -836,6 +865,7 @@ private HapiSpec scheduledBurnExecutesProperly() { getTokenInfo(A_TOKEN).hasTotalSupply(91)); } + @HapiTest private HapiSpec scheduledXferFailingWithDeletedAccountPaysServiceFeeButNoImpact() { final String xToken = "XXX"; final String validSchedule = "withLiveAccount"; @@ -851,6 +881,7 @@ private HapiSpec scheduledXferFailingWithDeletedAccountPaysServiceFeeButNoImpact return defaultHapiSpec("ScheduledXferFailingWithDeletedTokenPaysServiceFeeButNoImpact") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(schedulePayer), cryptoCreate(xTreasury), cryptoCreate(xCivilian), @@ -885,6 +916,7 @@ private HapiSpec scheduledXferFailingWithDeletedAccountPaysServiceFeeButNoImpact assertBasicallyIdentical(successFeesObs.get(), failureFeesObs.get(), 1.0))); } + @HapiTest private HapiSpec scheduledXferFailingWithDeletedTokenPaysServiceFeeButNoImpact() { String xToken = "XXX"; String validSchedule = "withLiveToken"; @@ -899,6 +931,7 @@ private HapiSpec scheduledXferFailingWithDeletedTokenPaysServiceFeeButNoImpact() return defaultHapiSpec("ScheduledXferFailingWithDeletedTokenPaysServiceFeeButNoImpact") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), newKeyNamed(ADMIN), cryptoCreate(schedulePayer), cryptoCreate(xTreasury), @@ -937,6 +970,7 @@ private HapiSpec scheduledXferFailingWithDeletedTokenPaysServiceFeeButNoImpact() assertBasicallyIdentical(successFeesObs.get(), failureFeesObs.get(), 1.0))); } + @HapiTest private HapiSpec scheduledXferFailingWithFrozenAccountTransferPaysServiceFeeButNoImpact() { String xToken = "XXX"; String validSchedule = "withUnfrozenAccount"; @@ -951,6 +985,7 @@ private HapiSpec scheduledXferFailingWithFrozenAccountTransferPaysServiceFeeButN return defaultHapiSpec("ScheduledXferFailingWithFrozenAccountTransferPaysServiceFeeButNoImpact") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), newKeyNamed("freeze"), cryptoCreate(schedulePayer), cryptoCreate(xTreasury), @@ -991,6 +1026,7 @@ private HapiSpec scheduledXferFailingWithFrozenAccountTransferPaysServiceFeeButN assertBasicallyIdentical(successFeesObs.get(), failureFeesObs.get(), 1.0))); } + @HapiTest private HapiSpec scheduledXferFailingWithNonKycedAccountTransferPaysServiceFeeButNoImpact() { String xToken = "XXX"; String validSchedule = "withKycedToken"; @@ -1005,6 +1041,7 @@ private HapiSpec scheduledXferFailingWithNonKycedAccountTransferPaysServiceFeeBu return defaultHapiSpec("ScheduledXferFailingWithNonKycedAccountTransferPaysServiceFeeButNoImpact") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), newKeyNamed("kyc"), cryptoCreate(schedulePayer), cryptoCreate(xTreasury), @@ -1044,6 +1081,7 @@ private HapiSpec scheduledXferFailingWithNonKycedAccountTransferPaysServiceFeeBu assertBasicallyIdentical(successFeesObs.get(), failureFeesObs.get(), 1.0))); } + @HapiTest private HapiSpec scheduledXferFailingWithUnassociatedAccountTransferPaysServiceFeeButNoImpact() { String xToken = "XXX"; String validSchedule = "withAssociatedToken"; @@ -1059,6 +1097,7 @@ private HapiSpec scheduledXferFailingWithUnassociatedAccountTransferPaysServiceF return defaultHapiSpec("ScheduledXferFailingWithUnassociatedAccountTransferPaysServiceFeeButNoImpact") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(schedulePayer), cryptoCreate(xTreasury), cryptoCreate(xCivilian), @@ -1092,6 +1131,7 @@ private HapiSpec scheduledXferFailingWithUnassociatedAccountTransferPaysServiceF assertBasicallyIdentical(successFeesObs.get(), failureFeesObs.get(), 1.0))); } + // @todo('9970') Currently this cannot be run for modular service due to key gathering limitations. private HapiSpec scheduledXferFailingWithNonNetZeroTokenTransferPaysServiceFeeButNoImpact() { String xToken = "XXX"; String validSchedule = "withZeroNetTokenChange"; @@ -1106,6 +1146,7 @@ private HapiSpec scheduledXferFailingWithNonNetZeroTokenTransferPaysServiceFeeBu return defaultHapiSpec("ScheduledXferFailingWithRepeatedTokenIdPaysServiceFeeButNoImpact") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(schedulePayer), cryptoCreate(xTreasury), cryptoCreate(xCivilian), @@ -1139,6 +1180,7 @@ private HapiSpec scheduledXferFailingWithNonNetZeroTokenTransferPaysServiceFeeBu assertBasicallyIdentical(successFeesObs.get(), failureFeesObs.get(), 1.0))); } + // @todo('9970') Currently this cannot be run for modular service due to key gathering limitations. private HapiSpec scheduledXferFailingWithRepeatedTokenIdPaysServiceFeeButNoImpact() { String xToken = "XXX"; String yToken = "YYY"; @@ -1154,6 +1196,7 @@ private HapiSpec scheduledXferFailingWithRepeatedTokenIdPaysServiceFeeButNoImpac return defaultHapiSpec("ScheduledXferFailingWithRepeatedTokenIdPaysServiceFeeButNoImpact") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(schedulePayer), cryptoCreate(xTreasury), cryptoCreate(yTreasury), @@ -1195,6 +1238,7 @@ private HapiSpec scheduledXferFailingWithRepeatedTokenIdPaysServiceFeeButNoImpac assertBasicallyIdentical(successFeesObs.get(), failureFeesObs.get(), 1.0))); } + // @todo('9970') Currently this cannot be run for modular service due to key gathering limitations. private HapiSpec scheduledXferFailingWithEmptyTokenTransferAccountAmountsPaysServiceFeeButNoImpact() { String xToken = "XXX"; String yToken = "YYY"; @@ -1211,6 +1255,7 @@ private HapiSpec scheduledXferFailingWithEmptyTokenTransferAccountAmountsPaysSer return defaultHapiSpec("ScheduledXferFailingWithEmptyTokenTransferAccountAmountsPaysServiceFeeButNoImpact") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(schedulePayer), cryptoCreate(xTreasury), cryptoCreate(yTreasury), @@ -1253,6 +1298,7 @@ private HapiSpec scheduledXferFailingWithEmptyTokenTransferAccountAmountsPaysSer assertBasicallyIdentical(successFeesObs.get(), failureFeesObs.get(), 1.0))); } + @HapiTest private HapiSpec scheduledSubmitFailedWithMsgSizeTooLargeStillPaysServiceFeeButHasNoImpact() { String immutableTopic = "XXX"; String validSchedule = "withValidSize"; @@ -1265,7 +1311,10 @@ private HapiSpec scheduledSubmitFailedWithMsgSizeTooLargeStillPaysServiceFeeButH var maxValidLen = HapiSpecSetup.getDefaultNodeProps().getInteger("consensus.message.maxBytesAllowed"); return defaultHapiSpec("ScheduledSubmitFailedWithMsgSizeTooLargeStillPaysServiceFeeButHasNoImpact") - .given(createTopic(immutableTopic), cryptoCreate(schedulePayer)) + .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), + createTopic(immutableTopic), + cryptoCreate(schedulePayer)) .when( scheduleCreate( validSchedule, @@ -1291,6 +1340,7 @@ private HapiSpec scheduledSubmitFailedWithMsgSizeTooLargeStillPaysServiceFeeButH assertBasicallyIdentical(successFeesObs.get(), failureFeesObs.get(), 1.0))); } + @HapiTest private HapiSpec scheduledSubmitFailedWithInvalidChunkTxnIdStillPaysServiceFeeButHasNoImpact() { String immutableTopic = "XXX"; String validSchedule = "withValidChunkTxnId"; @@ -1304,7 +1354,10 @@ private HapiSpec scheduledSubmitFailedWithInvalidChunkTxnIdStillPaysServiceFeeBu AtomicReference irrelevantTxnId = new AtomicReference<>(); return defaultHapiSpec("ScheduledSubmitFailedWithInvalidChunkTxnIdStillPaysServiceFeeButHasNoImpact") - .given(createTopic(immutableTopic), cryptoCreate(schedulePayer)) + .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), + createTopic(immutableTopic), + cryptoCreate(schedulePayer)) .when( withOpContext((spec, opLog) -> { var subOp = usableTxnIdNamed(successTx).payerId(schedulePayer); @@ -1340,6 +1393,7 @@ private HapiSpec scheduledSubmitFailedWithInvalidChunkTxnIdStillPaysServiceFeeBu (spec, opLog) -> Assertions.assertEquals(successFeesObs.get(), failureFeesObs.get()))); } + @HapiTest private HapiSpec scheduledSubmitFailedWithInvalidChunkNumberStillPaysServiceFeeButHasNoImpact() { String immutableTopic = "XXX"; String validSchedule = "withValidChunkNumber"; @@ -1352,7 +1406,10 @@ private HapiSpec scheduledSubmitFailedWithInvalidChunkNumberStillPaysServiceFeeB AtomicReference initialTxnId = new AtomicReference<>(); return defaultHapiSpec("ScheduledSubmitFailedWithInvalidChunkNumberStillPaysServiceFeeButHasNoImpact") - .given(createTopic(immutableTopic), cryptoCreate(schedulePayer)) + .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), + createTopic(immutableTopic), + cryptoCreate(schedulePayer)) .when( withOpContext((spec, opLog) -> { var subOp = usableTxnIdNamed(successTx).payerId(schedulePayer); @@ -1384,13 +1441,17 @@ private HapiSpec scheduledSubmitFailedWithInvalidChunkNumberStillPaysServiceFeeB (spec, opLog) -> Assertions.assertEquals(successFeesObs.get(), failureFeesObs.get()))); } + @HapiTest private HapiSpec scheduledSubmitThatWouldFailWithInvalidTopicIdCannotBeScheduled() { String civilianPayer = PAYER; AtomicReference> successFeesObs = new AtomicReference<>(); AtomicReference> failureFeesObs = new AtomicReference<>(); return defaultHapiSpec("ScheduledSubmitThatWouldFailWithInvalidTopicIdCannotBeScheduled") - .given(cryptoCreate(civilianPayer), createTopic("fascinating")) + .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), + cryptoCreate(civilianPayer), + createTopic("fascinating")) .when( scheduleCreate("yup", submitMessageTo("fascinating").message(RANDOM_MSG)) .payingWith(civilianPayer) @@ -1418,6 +1479,8 @@ private void assertBasicallyIdentical( } } + // @todo('UNRESOLVABLE_SIGNER') Need to work out why this succeeds instead + // of failing with UNRESOLVABLE_REQUIRED_SIGNERS private HapiSpec scheduledSubmitThatWouldFailWithTopicDeletedCannotBeSigned() { String adminKey = ADMIN; String mutableTopic = "XXX"; @@ -1427,6 +1490,7 @@ private HapiSpec scheduledSubmitThatWouldFailWithTopicDeletedCannotBeSigned() { return defaultHapiSpec("ScheduledSubmitThatWouldFailWithTopicDeletedCannotBeSigned") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), newKeyNamed(adminKey), createTopic(mutableTopic).adminKeyName(adminKey), cryptoCreate(schedulePayer), @@ -1442,6 +1506,7 @@ private HapiSpec scheduledSubmitThatWouldFailWithTopicDeletedCannotBeSigned() { .hasKnownStatus(UNRESOLVABLE_REQUIRED_SIGNERS)); } + // @todo('NOT_EXEC') Need to work out why this does not actually execute private HapiSpec executionTriggersOnceTopicHasSatisfiedSubmitKey() { String adminKey = ADMIN; String submitKey = "submit"; @@ -1450,6 +1515,7 @@ private HapiSpec executionTriggersOnceTopicHasSatisfiedSubmitKey() { return defaultHapiSpec("ExecutionTriggersOnceTopicHasNoSubmitKey") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), newKeyNamed(adminKey), newKeyNamed(submitKey), createTopic(mutableTopic).adminKeyName(adminKey).submitKeyName(submitKey), @@ -1478,11 +1544,13 @@ private HapiSpec executionTriggersOnceTopicHasSatisfiedSubmitKey() { getTopicInfo(mutableTopic).hasSeqNo(1L)); } + // @todo('NOT_EXEC') Need to work out why this does not actually execute private HapiSpec executionTriggersWithWeirdlyRepeatedKey() { String schedule = "dupKeyXfer"; return defaultHapiSpec("ExecutionTriggersWithWeirdlyRepeatedKey") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(WEIRDLY_POPULAR_KEY), cryptoCreate(SENDER_1).key(WEIRDLY_POPULAR_KEY).balance(1L), cryptoCreate(SENDER_2).key(WEIRDLY_POPULAR_KEY).balance(1L), @@ -1510,10 +1578,12 @@ private HapiSpec executionTriggersWithWeirdlyRepeatedKey() { getTxnRecord("repeatedSigning").logged()); } + // @todo('XFER_LIST') Need to work out why this does not produce the expected transfer list private HapiSpec executionWithDefaultPayerWorks() { long transferAmount = 1; return defaultHapiSpec("ExecutionWithDefaultPayerWorks") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(SENDER), cryptoCreate(RECEIVER), cryptoCreate(PAYING_ACCOUNT), @@ -1566,12 +1636,14 @@ private HapiSpec executionWithDefaultPayerWorks() { })); } + // @todo('WRONG_RESULT') Need to figure out why the ending balance does not match expected private HapiSpec executionWithDefaultPayerButNoFundsFails() { long balance = 10_000_000L; long noBalance = 0L; long transferAmount = 1L; return defaultHapiSpec("ExecutionWithDefaultPayerButNoFundsFails") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT).balance(balance), cryptoCreate(LUCKY_RECEIVER), cryptoCreate(SENDER).balance(transferAmount), @@ -1602,11 +1674,13 @@ private HapiSpec executionWithDefaultPayerButNoFundsFails() { })); } + // @todo('9974') Need to work out why this generates INVALID_SIGNATURE private HapiSpec executionWithCustomPayerWorksWithLastSigBeingCustomPayer() { long noBalance = 0L; long transferAmount = 1; return defaultHapiSpec("ExecutionWithCustomPayerWorksWithLastSigBeingCustomPayer") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT), cryptoCreate(SENDER).balance(transferAmount), cryptoCreate(RECEIVER).balance(noBalance), @@ -1638,12 +1712,14 @@ private HapiSpec executionWithCustomPayerWorksWithLastSigBeingCustomPayer() { getAccountBalance(RECEIVER).hasTinyBars(transferAmount)); } + // @todo('WRONG_RESULT') Need to figure out why the ending balance does not match expected private HapiSpec executionWithCustomPayerButNoFundsFails() { long balance = 0L; long noBalance = 0L; long transferAmount = 1; return defaultHapiSpec("ExecutionWithCustomPayerButNoFundsFails") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT).balance(balance), cryptoCreate(SENDER).balance(transferAmount), cryptoCreate(RECEIVER).balance(noBalance), @@ -1669,12 +1745,14 @@ private HapiSpec executionWithCustomPayerButNoFundsFails() { })); } + // @todo('WRONG_RESULT') Need to figure out why the ending balance does not match expected private HapiSpec executionWithDefaultPayerButAccountDeletedFails() { long balance = 10_000_000L; long noBalance = 0L; long transferAmount = 1L; return defaultHapiSpec("ExecutionWithDefaultPayerButAccountDeletedFails") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT).balance(balance), cryptoCreate(LUCKY_RECEIVER), cryptoCreate(SENDER).balance(transferAmount), @@ -1695,12 +1773,14 @@ private HapiSpec executionWithDefaultPayerButAccountDeletedFails() { .hasPriority(recordWith().status(INSUFFICIENT_PAYER_BALANCE))); } + // @todo('WRONG_RESULT') Need to figure out why the ending balance does not match expected private HapiSpec executionWithCustomPayerButAccountDeletedFails() { long balance = 10_000_000L; long noBalance = 0L; long transferAmount = 1; return defaultHapiSpec("ExecutionWithCustomPayerButAccountDeletedFails") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT).balance(balance), cryptoCreate(SENDER).balance(transferAmount), cryptoCreate(RECEIVER).balance(noBalance), @@ -1730,6 +1810,7 @@ private HapiSpec executionWithCustomPayerButAccountDeletedFails() { })); } + @HapiTest private HapiSpec executionWithCryptoInsufficientAccountBalanceFails() { long noBalance = 0L; long senderBalance = 100L; @@ -1737,6 +1818,7 @@ private HapiSpec executionWithCryptoInsufficientAccountBalanceFails() { long payerBalance = 1_000_000L; return defaultHapiSpec("ExecutionWithCryptoInsufficientAccountBalanceFails") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT).balance(payerBalance), cryptoCreate(SENDER).balance(senderBalance), cryptoCreate(RECEIVER).balance(noBalance), @@ -1762,6 +1844,7 @@ private HapiSpec executionWithCryptoInsufficientAccountBalanceFails() { })); } + // @todo('NOT_EXEC') Need to work out why this does not actually execute private HapiSpec executionWithCryptoSenderDeletedFails() { long noBalance = 0L; long senderBalance = 100L; @@ -1769,6 +1852,7 @@ private HapiSpec executionWithCryptoSenderDeletedFails() { long payerBalance = 1_000_000L; return defaultHapiSpec("ExecutionWithCryptoSenderDeletedFails") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT).balance(payerBalance), cryptoCreate(SENDER).balance(senderBalance), cryptoCreate(RECEIVER).balance(noBalance), @@ -1796,6 +1880,8 @@ private HapiSpec executionWithCryptoSenderDeletedFails() { })); } + // ExecutionWithTokenInsufficientAccountBalanceFails + @HapiTest private HapiSpec executionWithTokenInsufficientAccountBalanceFails() { String xToken = "XXX"; String invalidSchedule = "withInsufficientTokenTransfer"; @@ -1805,6 +1891,7 @@ private HapiSpec executionWithTokenInsufficientAccountBalanceFails() { String failedTxn = "bad"; return defaultHapiSpec("ExecutionWithTokenInsufficientAccountBalanceFails") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), newKeyNamed(ADMIN), cryptoCreate(schedulePayer), cryptoCreate(xTreasury), @@ -1828,6 +1915,7 @@ private HapiSpec executionWithTokenInsufficientAccountBalanceFails() { getAccountBalance(xTreasury).hasTokenBalance(xToken, 100)); } + // @todo('9970') Currently this cannot be run for modular service due to key gathering limitations. private HapiSpec executionWithInvalidAccountAmountsFails() { long transferAmount = 100; long senderBalance = 1000L; @@ -1835,6 +1923,7 @@ private HapiSpec executionWithInvalidAccountAmountsFails() { long noBalance = 0L; return defaultHapiSpec("ExecutionWithInvalidAccountAmountsFails") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT).balance(payingAccountBalance), cryptoCreate(SENDER).balance(senderBalance), cryptoCreate(RECEIVER).balance(noBalance), @@ -1863,10 +1952,12 @@ private HapiSpec executionWithInvalidAccountAmountsFails() { })); } + // @todo('XFER_LIST') Need to work out why this does not produce the expected transfer list private HapiSpec executionWithCustomPayerWorks() { long transferAmount = 1; return defaultHapiSpec("ExecutionWithCustomPayerWorks") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT), cryptoCreate(SENDER), cryptoCreate(RECEIVER), @@ -1927,10 +2018,12 @@ private HapiSpec executionWithCustomPayerWorks() { })); } + // @todo('XFER_LIST') Need to work out why this does not produce the expected transfer list private HapiSpec executionWithCustomPayerAndAdminKeyWorks() { long transferAmount = 1; return defaultHapiSpec("ExecutionWithCustomPayerAndAdminKeyWorks") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), newKeyNamed("adminKey"), cryptoCreate(PAYING_ACCOUNT), cryptoCreate(SENDER), @@ -1993,10 +2086,12 @@ private HapiSpec executionWithCustomPayerAndAdminKeyWorks() { })); } + // @todo('XFER_LIST') Need to work out why this does not produce the expected transfer list private HapiSpec executionWithCustomPayerWhoSignsAtCreationAsPayerWorks() { long transferAmount = 1; return defaultHapiSpec("ExecutionWithCustomPayerWhoSignsAtCreationAsPayerWorks") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT), cryptoCreate(SENDER), cryptoCreate(RECEIVER), @@ -2084,12 +2179,13 @@ public static boolean transferListCheck( return amountHasBeenTransferred && payerHasPaid; } + // Currently this cannot be run as HapiTest because it stops the captive nodes. private HapiSpec scheduledFreezeWorksAsExpected() { - - final byte[] poeticUpgradeHash = getPoeticUpgradeHash(); + final byte[] poeticUpgradeHash = ScheduleUtils.getPoeticUpgradeHash(); return defaultHapiSpec("ScheduledFreezeWorksAsExpected") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT), overriding(SCHEDULING_WHITELIST, "Freeze"), fileUpdate(standardUpdateFile) @@ -2125,14 +2221,14 @@ private HapiSpec scheduledFreezeWorksAsExpected() { })); } + // Currently this cannot be run as HapiTest because it stops the captive nodes. private HapiSpec scheduledFreezeWithUnauthorizedPayerFails(boolean isLongTermEnabled) { - - final byte[] poeticUpgradeHash = getPoeticUpgradeHash(); + final byte[] poeticUpgradeHash = ScheduleUtils.getPoeticUpgradeHash(); if (isLongTermEnabled) { - return defaultHapiSpec("ScheduledFreezeWithUnauthorizedPayerFails") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT), cryptoCreate(PAYING_ACCOUNT_2), overriding(SCHEDULING_WHITELIST, "Freeze"), @@ -2157,9 +2253,9 @@ private HapiSpec scheduledFreezeWithUnauthorizedPayerFails(boolean isLongTermEna .hasPrecheck(BUSY), overriding(SCHEDULING_WHITELIST, defaultWhitelist)); } - return defaultHapiSpec("ScheduledFreezeWithUnauthorizedPayerFails") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT), cryptoCreate(PAYING_ACCOUNT_2), overriding(SCHEDULING_WHITELIST, "Freeze"), @@ -2196,11 +2292,12 @@ private HapiSpec scheduledFreezeWithUnauthorizedPayerFails(boolean isLongTermEna })); } + // @todo('9973') Need to work out why this does not actually execute private HapiSpec scheduledPermissionedFileUpdateWorksAsExpected() { return defaultHapiSpec("ScheduledPermissionedFileUpdateWorksAsExpected") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT), - overriding(SCHEDULING_WHITELIST, "FileUpdate"), scheduleCreate( A_SCHEDULE, fileUpdate(standardUpdateFile).contents("fooo!")) @@ -2227,13 +2324,14 @@ private HapiSpec scheduledPermissionedFileUpdateWorksAsExpected() { })); } + // @todo('9973') Work out permissioned file update issues private HapiSpec scheduledPermissionedFileUpdateUnauthorizedPayerFails() { return defaultHapiSpec("ScheduledPermissionedFileUpdateUnauthorizedPayerFails") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT), cryptoCreate(PAYING_ACCOUNT_2), - overriding(SCHEDULING_WHITELIST, "FileUpdate"), scheduleCreate( A_SCHEDULE, fileUpdate(standardUpdateFile).contents("fooo!")) @@ -2260,13 +2358,14 @@ private HapiSpec scheduledPermissionedFileUpdateUnauthorizedPayerFails() { })); } + // @todo('9973') Work out permissioned file update issues private HapiSpec scheduledSystemDeleteWorksAsExpected() { return defaultHapiSpec("ScheduledSystemDeleteWorksAsExpected") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT), fileCreate("misc").lifetime(THREE_MONTHS_IN_SECONDS).contents(ORIG_FILE), - overriding(SCHEDULING_WHITELIST, "SystemDelete"), scheduleCreate(A_SCHEDULE, systemFileDelete("misc").updatingExpiry(1L)) .withEntityMemo(randomUppercase(100)) .designatingPayer(SYSTEM_DELETE_ADMIN) @@ -2292,16 +2391,17 @@ private HapiSpec scheduledSystemDeleteWorksAsExpected() { })); } + @HapiTest private HapiSpec scheduledSystemDeleteUnauthorizedPayerFails(boolean isLongTermEnabled) { if (isLongTermEnabled) { return defaultHapiSpec("ScheduledSystemDeleteUnauthorizedPayerFails") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT), cryptoCreate(PAYING_ACCOUNT_2), - fileCreate("misc").lifetime(THREE_MONTHS_IN_SECONDS).contents(ORIG_FILE), - overriding(SCHEDULING_WHITELIST, "SystemDelete")) + fileCreate("misc").lifetime(THREE_MONTHS_IN_SECONDS).contents(ORIG_FILE)) .when() .then( scheduleCreate(A_SCHEDULE, systemFileDelete("misc").updatingExpiry(1L)) @@ -2319,6 +2419,7 @@ private HapiSpec scheduledSystemDeleteUnauthorizedPayerFails(boolean isLongTermE return defaultHapiSpec("ScheduledSystemDeleteUnauthorizedPayerFails") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(PAYING_ACCOUNT), cryptoCreate(PAYING_ACCOUNT_2), fileCreate("misc").lifetime(THREE_MONTHS_IN_SECONDS).contents(ORIG_FILE), @@ -2348,6 +2449,8 @@ private HapiSpec scheduledSystemDeleteUnauthorizedPayerFails(boolean isLongTermE })); } + // @todo('') Work out why we get `PLATFORM_TRANSACTION_NOT_CREATED` instead of `OK` for UncheckedSubmit... + // This appears to come from `blockingOrder` call in `when` clause private HapiSpec congestionPricingAffectsImmediateScheduleExecution() { var artificialLimits = protoDefsFromResource("testSystemFiles/artificial-limits-congestion.json"); var defaultThrottles = protoDefsFromResource("testSystemFiles/throttles-dev.json"); @@ -2360,8 +2463,8 @@ private HapiSpec congestionPricingAffectsImmediateScheduleExecution() { return defaultHapiSpec("CongestionPricingAffectsImmediateScheduleExecution") .given( + overriding(SCHEDULING_WHITELIST, WHITELIST_MINIMUM), cryptoCreate(ACCOUNT).payingWith(GENESIS).balance(ONE_MILLION_HBARS), - overriding(SCHEDULING_WHITELIST, "ContractCall"), uploadInitCode(contract), contractCreate(contract), scheduleCreate( @@ -2450,15 +2553,8 @@ private HapiSpec congestionPricingAffectsImmediateScheduleExecution() { })); } - public static byte[] getPoeticUpgradeHash() { - final byte[] poeticUpgradeHash; - try { - final var sha384 = MessageDigest.getInstance("SHA-384"); - final var poeticUpgrade = Files.readAllBytes(Paths.get(poeticUpgradeLoc)); - poeticUpgradeHash = sha384.digest(poeticUpgrade); - } catch (NoSuchAlgorithmException | IOException e) { - throw new IllegalStateException("scheduledFreezeWorksAsExpected environment is unsuitable", e); - } - return poeticUpgradeHash; + private T getTestConfig(Class configClass) { + final TestConfigBuilder builder = new TestConfigBuilder(configClass); + return builder.getOrCreateConfig().getConfigData(configClass); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleLongTermExecutionSpecs.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleLongTermExecutionSpecs.java index 524b3a498b07..5899f32c87d0 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleLongTermExecutionSpecs.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleLongTermExecutionSpecs.java @@ -48,8 +48,8 @@ import static com.hedera.services.bdd.suites.freeze.UpgradeSuite.poeticUpgradeLoc; import static com.hedera.services.bdd.suites.freeze.UpgradeSuite.standardUpdateFile; import static com.hedera.services.bdd.suites.schedule.ScheduleExecutionSpecs.ORIG_FILE; -import static com.hedera.services.bdd.suites.schedule.ScheduleExecutionSpecs.getPoeticUpgradeHash; import static com.hedera.services.bdd.suites.schedule.ScheduleExecutionSpecs.transferListCheck; +import static com.hedera.services.bdd.suites.schedule.ScheduleUtils.getPoeticUpgradeHash; import static com.hedera.services.bdd.suites.utils.sysfiles.serdes.ThrottleDefsLoader.protoDefsFromResource; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_DELETED; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.AUTHORIZATION_FAILED; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleRecordSpecs.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleRecordSpecs.java index e782b1a07efa..8954c31606db 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleRecordSpecs.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleRecordSpecs.java @@ -197,6 +197,7 @@ public HapiSpec canScheduleChunkedMessages() { String ofGeneralInterest = "Scotch"; AtomicReference initialTxnId = new AtomicReference<>(); + // validation here is checking fees and staking, not message creation on the topic... return defaultHapiSpec("CanScheduleChunkedMessages") .given( overridingAllOf(Map.of( diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleUtils.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleUtils.java index 94426ed2c086..f121d10fa819 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleUtils.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/schedule/ScheduleUtils.java @@ -16,10 +16,17 @@ package com.hedera.services.bdd.suites.schedule; +import static com.hedera.services.bdd.suites.freeze.UpgradeSuite.poeticUpgradeLoc; + import com.hedera.hapi.node.base.HederaFunctionality; import com.hederahashgraph.api.proto.java.SchedulableTransactionBody; import com.hederahashgraph.api.proto.java.SchedulableTransactionBody.Builder; import com.hederahashgraph.api.proto.java.TransactionBody; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; import java.util.Collections; import java.util.LinkedList; import java.util.List; @@ -92,6 +99,7 @@ public final class ScheduleUtils { private ScheduleUtils() {} + // public because this is used in HapiScheduleCreate public static SchedulableTransactionBody fromOrdinary(TransactionBody txn) { Builder scheduleBuilder = SchedulableTransactionBody.newBuilder(); scheduleBuilder.setTransactionFee(txn.getTransactionFee()); @@ -167,6 +175,18 @@ public static SchedulableTransactionBody fromOrdinary(TransactionBody txn) { return scheduleBuilder.build(); } + static byte[] getPoeticUpgradeHash() { + final byte[] poeticUpgradeHash; + try { + final var sha384 = MessageDigest.getInstance("SHA-384"); + final var poeticUpgrade = Files.readAllBytes(Paths.get(poeticUpgradeLoc)); + poeticUpgradeHash = sha384.digest(poeticUpgrade); + } catch (NoSuchAlgorithmException | IOException e) { + throw new IllegalStateException("scheduledFreezeWorksAsExpected environment is unsuitable", e); + } + return poeticUpgradeHash; + } + private static String getWhitelistAll() { final List whitelistNames = new LinkedList<>(); for (final HederaFunctionality enumValue : HederaFunctionality.values()) { diff --git a/hedera-node/test-clients/src/main/java/module-info.java b/hedera-node/test-clients/src/main/java/module-info.java index f771cc3cf0f2..5803f970819c 100644 --- a/hedera-node/test-clients/src/main/java/module-info.java +++ b/hedera-node/test-clients/src/main/java/module-info.java @@ -20,6 +20,7 @@ requires transitive org.yaml.snakeyaml; requires transitive tuweni.bytes; requires com.hedera.node.app.service.evm; + requires com.hedera.node.config; requires com.fasterxml.jackson.core; requires com.fasterxml.jackson.databind; requires com.github.docker.java.api; @@ -28,6 +29,7 @@ requires com.swirlds.config.api; requires com.swirlds.merkledb; requires com.swirlds.platform.core; + requires com.swirlds.test.framework; requires grpc.netty; requires io.netty.handler; requires java.net.http;