diff --git a/.github/workflows/node-zxc-build-release-artifact.yaml b/.github/workflows/node-zxc-build-release-artifact.yaml index aabdf1cca304..3e814aa461fa 100644 --- a/.github/workflows/node-zxc-build-release-artifact.yaml +++ b/.github/workflows/node-zxc-build-release-artifact.yaml @@ -157,7 +157,7 @@ jobs: fi - name: Checkout Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Setup Java uses: actions/setup-java@0ab4596768b603586c0de567f2430c30f5b0d2b0 # v3.13.0 @@ -243,7 +243,7 @@ jobs: echo "::endgroup::" - name: Checkout Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Authenticate to Google Cloud uses: google-github-actions/auth@35b0e87d162680511bf346c299f71c9c5c379033 # v1.1.1 @@ -389,7 +389,7 @@ jobs: - build-artifact steps: - name: Checkout Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Authenticate to Google Cloud id: google-auth @@ -517,7 +517,7 @@ jobs: - validate steps: - name: Checkout Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Install GnuPG Tools if: ${{ inputs.dry-run-enabled != true }} @@ -609,7 +609,7 @@ jobs: if: ${{ inputs.sdk-release-profile != 'none' }} steps: - name: Checkout Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Install GnuPG Tools if: ${{ inputs.dry-run-enabled != true }} diff --git a/.github/workflows/node-zxc-compile-application-code.yaml b/.github/workflows/node-zxc-compile-application-code.yaml index 3f3d1145f835..eef54c23c0d9 100644 --- a/.github/workflows/node-zxc-compile-application-code.yaml +++ b/.github/workflows/node-zxc-compile-application-code.yaml @@ -117,7 +117,7 @@ jobs: runs-on: [self-hosted, Linux, large, ephemeral] steps: - name: Checkout Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Expand Shallow Clone for SonarQube and Spotless if: ${{ (inputs.enable-sonar-analysis || inputs.enable-unit-tests || inputs.enable-spotless-check) && !cancelled() }} diff --git a/.github/workflows/node-zxcron-release-branching.yaml b/.github/workflows/node-zxcron-release-branching.yaml index 92dc2af10ebe..c4a95905e98a 100644 --- a/.github/workflows/node-zxcron-release-branching.yaml +++ b/.github/workflows/node-zxcron-release-branching.yaml @@ -41,7 +41,7 @@ jobs: triggered: ${{ steps.evaluate.outputs.triggered }} steps: - name: Checkout Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Read Trigger Time id: time @@ -89,7 +89,7 @@ jobs: if: ${{ needs.check-trigger.outputs.triggered == 'true' && !cancelled() }} steps: - name: Checkout Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Branch Creation Check id: branch-creation @@ -105,7 +105,7 @@ jobs: if: ${{ needs.check-branch.outputs.branch-create == 'true' && !cancelled() }} steps: - name: Checkout Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: fetch-depth: 0 token: ${{ secrets.GH_ACCESS_TOKEN }} @@ -194,7 +194,7 @@ jobs: if: ${{ needs.check-branch.outputs.tag-create == 'true' && needs.create-branch.result == 'success' && !cancelled() }} steps: - name: Checkout Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: ref: ${{ needs.check-branch.outputs.branch-name }} fetch-depth: 0 diff --git a/.github/workflows/node-zxcron-release-fsts-regression.yaml b/.github/workflows/node-zxcron-release-fsts-regression.yaml index 447aeae09137..b227c17865b7 100644 --- a/.github/workflows/node-zxcron-release-fsts-regression.yaml +++ b/.github/workflows/node-zxcron-release-fsts-regression.yaml @@ -33,7 +33,7 @@ jobs: runs-on: [self-hosted, Linux, small, scheduler, ephemeral] steps: - name: Checkout Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: fetch-depth: 0 diff --git a/.github/workflows/node-zxf-snyk-monitor.yaml b/.github/workflows/node-zxf-snyk-monitor.yaml index 86703beadb95..cc40c02a3d57 100644 --- a/.github/workflows/node-zxf-snyk-monitor.yaml +++ b/.github/workflows/node-zxf-snyk-monitor.yaml @@ -32,7 +32,7 @@ jobs: runs-on: [self-hosted, Linux, medium, ephemeral] steps: - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Setup Java uses: actions/setup-java@0ab4596768b603586c0de567f2430c30f5b0d2b0 # v3.13.0 diff --git a/.github/workflows/platform-zxcron-release-jrs-regression.yaml b/.github/workflows/platform-zxcron-release-jrs-regression.yaml index 9fe6057c3f6f..aa6d3f41f69d 100644 --- a/.github/workflows/platform-zxcron-release-jrs-regression.yaml +++ b/.github/workflows/platform-zxcron-release-jrs-regression.yaml @@ -33,7 +33,7 @@ jobs: runs-on: [self-hosted, Linux, small, scheduler, ephemeral] steps: - name: Checkout Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: fetch-depth: 0 diff --git a/.github/workflows/zxc-jrs-regression.yaml b/.github/workflows/zxc-jrs-regression.yaml index 9379496b9f47..5f80fa78eac5 100644 --- a/.github/workflows/zxc-jrs-regression.yaml +++ b/.github/workflows/zxc-jrs-regression.yaml @@ -165,7 +165,7 @@ jobs: runs-on: [self-hosted, Linux, large, ephemeral] steps: - name: Checkout Platform Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: ref: ${{ inputs.ref || inputs.branch-name || '' }} fetch-depth: 0 @@ -182,7 +182,7 @@ jobs: echo "branch-name=${BRANCH_NAME}" >> "${GITHUB_OUTPUT}" - name: Checkout Regression Code - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: path: platform-sdk/regression repository: swirlds/swirlds-platform-regression @@ -256,7 +256,7 @@ jobs: fi - name: Install SSH key - uses: shimataro/ssh-key-action@685d0f20da72e4b53cc81d373a2ed0a867770e46 # v2.5.1 + uses: shimataro/ssh-key-action@38b53cb2f445ea2e0eb8872407e366677c41dbc6 # v2.6.1 with: name: jrs-ssh-keyfile key: ${{ secrets.jrs-ssh-key-file }} diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/MigrationContext.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/MigrationContext.java index c9b2f4337cc0..bb8e97087f88 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/MigrationContext.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/MigrationContext.java @@ -77,4 +77,10 @@ public interface MigrationContext { @NonNull HandleThrottleParser handleThrottling(); + + /** + * Consumes and returns the next entity number. For use by migrations that need to create entities. + * @return the next entity number + */ + long newEntityNum(); } diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/HandleContext.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/HandleContext.java index dc97c3bc7c8e..1c26cbabe14c 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/HandleContext.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/HandleContext.java @@ -430,6 +430,37 @@ T dispatchReversiblePrecedingTransaction( @NonNull Predicate verifier, AccountID syntheticPayer); + /** + * Dispatches preceding transaction that can be removed. + * + *

A removable preceding transaction depends on the current transaction. That means if the user transaction + * fails, a removable preceding transaction is automatically removed and not exported. The state changes introduced by a + * removable preceding transaction are automatically committed together with the parent transaction. + * + *

This method can only be called by a {@link TransactionCategory#USER}-transaction and only as long as no state + * changes have been introduced by the user transaction (either by storing state or by calling a child + * transaction). + * + *

The provided {@link Predicate} callback will be called to verify simple keys when the child transaction calls + * any of the {@code verificationFor} methods. + * + * @param txBody the {@link TransactionBody} of the transaction to dispatch + * @param recordBuilderClass the record builder class of the transaction + * @param verifier a {@link Predicate} that will be used to validate primitive keys + * @param syntheticPayer the payer of the transaction + * @return the record builder of the transaction + * @throws NullPointerException if {@code txBody} is {@code null} + * @throws IllegalArgumentException if the transaction is not a {@link TransactionCategory#USER}-transaction or if + * the record builder type is unknown to the app + * @throws IllegalStateException if the current transaction has already introduced state changes + */ + @NonNull + T dispatchRemovablePrecedingTransaction( + @NonNull TransactionBody txBody, + @NonNull Class recordBuilderClass, + @NonNull Predicate verifier, + AccountID syntheticPayer); + /** * Dispatches a reversible preceding transaction that already has an ID. * @@ -603,7 +634,7 @@ default T dispatchRemovableChildTransaction( SavepointStack savepointStack(); /** - * Revert all child records in {@link RecordListBuilder}. + * Revert all child records in RecordListBuilder. */ void revertChildRecords(); diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/record/GenesisRecordsBuilder.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/record/GenesisRecordsBuilder.java index 8c4faa0154e7..95f6622fd928 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/record/GenesisRecordsBuilder.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/workflows/record/GenesisRecordsBuilder.java @@ -45,4 +45,9 @@ public interface GenesisRecordsBuilder { * Tracks the treasury clones created during node startup */ void treasuryClones(@NonNull final Map accounts); + + /** + * Tracks the blocklist accounts created during node startup + */ + void blocklistAccounts(@NonNull final Map accounts); } diff --git a/hedera-node/hedera-app-spi/src/testFixtures/java/com/hedera/node/app/spi/fixtures/state/NoOpGenesisRecordsBuilder.java b/hedera-node/hedera-app-spi/src/testFixtures/java/com/hedera/node/app/spi/fixtures/state/NoOpGenesisRecordsBuilder.java index 3a5af943dd9d..97988d2949d7 100644 --- a/hedera-node/hedera-app-spi/src/testFixtures/java/com/hedera/node/app/spi/fixtures/state/NoOpGenesisRecordsBuilder.java +++ b/hedera-node/hedera-app-spi/src/testFixtures/java/com/hedera/node/app/spi/fixtures/state/NoOpGenesisRecordsBuilder.java @@ -58,4 +58,9 @@ public void miscAccounts(@NonNull final Map accounts) { // Intentional no-op } + + @Override + public void blocklistAccounts(@NonNull Map accounts) { + // Intentional no-op + } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java index ed0eceb9ad7b..afb97728247e 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java @@ -37,6 +37,7 @@ import com.hedera.node.app.fees.congestion.EntityUtilizationMultiplier; import com.hedera.node.app.fees.congestion.ThrottleMultiplier; import com.hedera.node.app.ids.EntityIdService; +import com.hedera.node.app.ids.WritableEntityIdStore; import com.hedera.node.app.info.CurrentPlatformStatusImpl; import com.hedera.node.app.info.NetworkInfoImpl; import com.hedera.node.app.info.SelfNodeInfoImpl; @@ -97,6 +98,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Locale; +import java.util.Objects; import java.util.Set; import java.util.function.IntSupplier; import org.apache.logging.log4j.LogManager; @@ -240,9 +242,11 @@ public Hedera(@NonNull final ConstructableRegistry constructableRegistry) { // Create all the service implementations logger.info("Registering services"); + // FUTURE: Use the service loader framework to load these services! this.servicesRegistry = new ServicesRegistryImpl(constructableRegistry, genesisRecordsBuilder); Set.of( + new EntityIdService(), new ConsensusServiceImpl(), CONTRACT_SERVICE, new FileServiceImpl(), @@ -253,7 +257,6 @@ public Hedera(@NonNull final ConstructableRegistry constructableRegistry) { new UtilServiceImpl(), new RecordCacheService(), new BlockRecordService(), - new EntityIdService(), new FeeService(), new CongestionThrottleService()) .forEach(servicesRegistry::register); @@ -418,20 +421,54 @@ private void onMigrate( final var nodeAddress = platform.getAddressBook().getAddress(selfId); final var selfNodeInfo = SelfNodeInfoImpl.of(nodeAddress, version); final var networkInfo = new NetworkInfoImpl(selfNodeInfo, platform, bootstrapConfigProvider); - for (final var registration : servicesRegistry.registrations()) { - // FUTURE We should have metrics here to keep track of how long it takes to migrate each service - final var service = registration.service(); - final var serviceName = service.getServiceName(); - logger.info("Migrating Service {}", serviceName); - final var registry = (MerkleSchemaRegistry) registration.registry(); - registry.migrate( - state, - previousVersion, - currentVersion, - configProvider.getConfiguration(), - networkInfo, - backendThrottle); - } + + logger.info("Migrating Entity ID Service as pre-requisite for other services"); + final var entityIdRegistration = servicesRegistry.registrations().stream() + .filter(service -> EntityIdService.NAME.equals(service.service().getServiceName())) + .findFirst() + .orElseThrow(); + final var entityIdRegistry = (MerkleSchemaRegistry) entityIdRegistration.registry(); + entityIdRegistry.migrate( + state, + previousVersion, + currentVersion, + configProvider.getConfiguration(), + networkInfo, + backendThrottle, + // We call with null here because we're migrating the entity ID service itself + null); + // Now that the Entity ID Service is migrated, migrate the remaining services + servicesRegistry.registrations().stream() + .filter(r -> !Objects.equals(entityIdRegistration, r)) + .forEach(registration -> { + // FUTURE We should have metrics here to keep track of how long it takes to migrate each service + final var service = registration.service(); + final var serviceName = service.getServiceName(); + logger.info("Migrating Service {}", serviceName); + final var registry = (MerkleSchemaRegistry) registration.registry(); + + // The token service has a dependency on the entity ID service during genesis migrations, so we + // CAREFULLY create a different WritableStates specific to the entity ID service. The different + // WritableStates instances won't be able to see the changes made by each other, but there shouldn't + // be any conflicting changes. We'll inject this into the MigrationContext below to enable + // generation of entity IDs. + final var entityIdWritableStates = state.createWritableStates(EntityIdService.NAME); + final var entityIdStore = new WritableEntityIdStore(entityIdWritableStates); + + registry.migrate( + state, + previousVersion, + currentVersion, + configProvider.getConfiguration(), + networkInfo, + backendThrottle, + requireNonNull(entityIdStore)); + // Now commit any changes that were made to the entity ID state (since other service entities could + // depend on newly-generated entity IDs) + if (entityIdWritableStates instanceof MerkleHederaState.MerkleWritableStates mws) { + mws.commit(); + } + }); logger.info("Migration complete"); } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/ids/EntityIdService.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/ids/EntityIdService.java index 74d0a7975b31..ee79b094a0d7 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/ids/EntityIdService.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/ids/EntityIdService.java @@ -34,8 +34,8 @@ @SuppressWarnings("rawtypes") public class EntityIdService implements Service { public static final String NAME = "EntityIdService"; + public static final String ENTITY_ID_STATE_KEY = "ENTITY_ID"; private static final SemanticVersion GENESIS_VERSION = SemanticVersion.DEFAULT; - static final String ENTITY_ID_STATE_KEY = "ENTITY_ID"; /** {@inheritDoc} */ @NonNull diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/MerkleSchemaRegistry.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/MerkleSchemaRegistry.java index cf2c81b2ccd5..21c16674d8a5 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/MerkleSchemaRegistry.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/merkle/MerkleSchemaRegistry.java @@ -20,6 +20,7 @@ import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.SemanticVersion; +import com.hedera.node.app.ids.WritableEntityIdStore; import com.hedera.node.app.spi.HapiUtils; import com.hedera.node.app.spi.Service; import com.hedera.node.app.spi.info.NetworkInfo; @@ -154,7 +155,8 @@ public void migrate( @NonNull final SemanticVersion currentVersion, @NonNull final Configuration config, @NonNull final NetworkInfo networkInfo, - @NonNull final HandleThrottleParser handleThrottling) { + @NonNull final HandleThrottleParser handleThrottling, + @Nullable final WritableEntityIdStore entityIdStore) { requireNonNull(hederaState); requireNonNull(currentVersion); requireNonNull(config); @@ -165,6 +167,7 @@ public void migrate( // of those schemas, create the new states and remove the old states and migrate the data. final var schemasToApply = computeApplicableSchemas(previousVersion, currentVersion); final var updateInsteadOfMigrate = isSameVersion(previousVersion, currentVersion); + for (final var schema : schemasToApply) { // Now we can migrate the schema and then commit all the changes // We just have one merkle tree -- the just-loaded working tree -- to work from. @@ -221,13 +224,25 @@ public void migrate( remainingStates.removeAll(statesToRemove); final var newStates = new FilteredWritableStates(writeableStates, remainingStates); + // For any changes to state that depend on other services outside the current service, we need a reference + // to the overall state that we can pass into the context. This reference to overall state will be strictly + // controlled via the MigrationContext API so that only changes explicitly specified in the interface can be + // made (instead of allowing any arbitrary change to overall state). As above, we won't commit anything + // until after this service's migration final var migrationContext = new MigrationContextImpl( - previousStates, newStates, config, networkInfo, genesisRecordsBuilder, handleThrottling); + previousStates, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore); if (updateInsteadOfMigrate) { schema.restart(migrationContext); } else { schema.migrate(migrationContext); } + // Now commit all the service-specific changes made during this service's update or migration if (writeableStates instanceof MerkleHederaState.MerkleWritableStates mws) { mws.commit(); } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/recordcache/RecordCacheImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/recordcache/RecordCacheImpl.java index 53078e27176b..8698bfc7adde 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/recordcache/RecordCacheImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/recordcache/RecordCacheImpl.java @@ -225,7 +225,10 @@ private void addToInMemoryCache( // And all transactions, regardless of the type, are added to the payer-reverse-index, so that queries of // the payer account ID will return all transactions they paid for. final var txId = transactionRecord.transactionIDOrThrow(); - final var isChildTx = transactionRecord.hasParentConsensusTimestamp(); + // For the preceding child records parentConsensusTimestamp is not set, but the nonce will be greater than 1 + // For the following child records parentConsensusTimestamp is also set. So to differentiate child records + // from user records, we check if the nonce is greater than 0. + final var isChildTx = transactionRecord.hasParentConsensusTimestamp() || txId.nonce() > 0; final var userTxId = isChildTx ? txId.copyBuilder().nonce(0).build() : txId; // Get or create the history for this transaction ID. diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleContextImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleContextImpl.java index 3156fa84b6a0..2645a004780c 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleContextImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleContextImpl.java @@ -19,6 +19,7 @@ import static com.hedera.node.app.spi.HapiUtils.functionOf; import static com.hedera.node.app.spi.workflows.HandleContext.TransactionCategory.CHILD; import static com.hedera.node.app.spi.workflows.HandleContext.TransactionCategory.PRECEDING; +import static com.hedera.node.app.workflows.handle.HandleContextImpl.PrecedingTransactionCategory.LIMITED_CHILD_RECORDS; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.AccountID; @@ -457,7 +458,7 @@ public T dispatchPrecedingTransaction( @NonNull final Predicate callback, @NonNull final AccountID syntheticPayerId) { final Supplier recordBuilderFactory = - () -> recordListBuilder.addPreceding(configuration()); + () -> recordListBuilder.addPreceding(configuration(), LIMITED_CHILD_RECORDS); final var result = doDispatchPrecedingTransaction( syntheticPayerId, txBody, recordBuilderFactory, recordBuilderClass, callback); @@ -481,6 +482,19 @@ public T dispatchReversiblePrecedingTransaction( syntheticPayerId, txBody, recordBuilderFactory, recordBuilderClass, callback); } + @Override + @NonNull + public T dispatchRemovablePrecedingTransaction( + @NonNull final TransactionBody txBody, + @NonNull final Class recordBuilderClass, + @NonNull final Predicate callback, + @NonNull final AccountID syntheticPayerId) { + final Supplier recordBuilderFactory = + () -> recordListBuilder.addRemovablePreceding(configuration()); + return doDispatchPrecedingTransaction( + syntheticPayerId, txBody, recordBuilderFactory, recordBuilderClass, callback); + } + @NonNull public T doDispatchPrecedingTransaction( @NonNull final AccountID syntheticPayer, @@ -495,14 +509,19 @@ public T doDispatchPrecedingTransaction( if (category != TransactionCategory.USER) { throw new IllegalArgumentException("Only user-transactions can dispatch preceding transactions"); } + if (stack.depth() > 1) { throw new IllegalStateException( "Cannot dispatch a preceding transaction when a savepoint has been created"); } - if (current().isModified()) { - throw new IllegalStateException("Cannot dispatch a preceding transaction when the state has been modified"); - } + // This condition fails, because for auto-account creation we charge fees, before dispatching the transaction, + // and the state will be modified. + + // if (current().isModified()) { + // throw new IllegalStateException("Cannot dispatch a preceding transaction when the state + // has been modified"); + // } // run the transaction final var precedingRecordBuilder = recordBuilderFactory.get(); @@ -643,7 +662,7 @@ private void dispatchSyntheticTxn( childStack.commitFullStack(); } catch (HandleException e) { childRecordBuilder.status(e.getStatus()); - recordListBuilder.revertChildrenOf(childRecordBuilder); + recordListBuilder.revertChildrenOf(recordBuilder); } } @@ -657,7 +676,7 @@ public T addChildRecordBuilder(@NonNull final Class recordBuilderClass) { @Override @NonNull public T addPrecedingChildRecordBuilder(@NonNull final Class recordBuilderClass) { - final var result = recordListBuilder.addPreceding(configuration()); + final var result = recordListBuilder.addPreceding(configuration(), LIMITED_CHILD_RECORDS); return castRecordBuilder(result, recordBuilderClass); } @@ -678,4 +697,9 @@ public SavepointStack savepointStack() { public void revertChildRecords() { recordListBuilder.revertChildrenOf(recordBuilder); } + + public enum PrecedingTransactionCategory { + UNLIMITED_CHILD_RECORDS, + LIMITED_CHILD_RECORDS + } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleWorkflow.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleWorkflow.java index a2c698d10c6d..07b44b88b310 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleWorkflow.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/HandleWorkflow.java @@ -20,9 +20,11 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.DUPLICATE_TRANSACTION; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_PAYER_SIGNATURE; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_SIGNATURE; +import static com.hedera.hapi.node.base.ResponseCodeEnum.MAX_CHILD_RECORDS_EXCEEDED; import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; import static com.hedera.hapi.node.base.ResponseCodeEnum.SUCCESS; import static com.hedera.node.app.spi.HapiUtils.isHollow; +import static com.hedera.node.app.spi.key.KeyUtils.IMMUTABILITY_SENTINEL_KEY; import static com.hedera.node.app.state.HederaRecordCache.DuplicateCheckResult.NO_DUPLICATE; import static com.hedera.node.app.state.HederaRecordCache.DuplicateCheckResult.SAME_NODE; import static com.hedera.node.app.state.logging.TransactionStateLogger.logStartEvent; @@ -44,6 +46,8 @@ import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.hapi.node.base.SignatureMap; import com.hedera.hapi.node.base.Transaction; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.hapi.node.token.CryptoUpdateTransactionBody; import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.fees.ExchangeRateManager; import com.hedera.node.app.fees.FeeAccumulatorImpl; @@ -66,11 +70,13 @@ import com.hedera.node.app.spi.fees.Fees; import com.hedera.node.app.spi.info.NetworkInfo; import com.hedera.node.app.spi.info.NodeInfo; +import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleContext.TransactionCategory; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.InsufficientNonFeeDebitsException; import com.hedera.node.app.spi.workflows.InsufficientServiceFeeException; import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.spi.workflows.record.SingleTransactionRecordBuilder; import com.hedera.node.app.state.HederaRecordCache; import com.hedera.node.app.state.HederaState; import com.hedera.node.app.throttle.NetworkUtilizationManager; @@ -88,6 +94,7 @@ import com.hedera.node.app.workflows.prehandle.PreHandleWorkflow; import com.hedera.node.config.ConfigProvider; import com.hedera.node.config.VersionedConfiguration; +import com.hedera.node.config.data.ConsensusConfig; import com.hedera.node.config.data.ContractsConfig; import com.hedera.node.config.data.HederaConfig; import com.hedera.pbj.runtime.io.buffer.Bytes; @@ -102,6 +109,8 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; +import java.util.Objects; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import javax.inject.Inject; import org.apache.logging.log4j.LogManager; @@ -385,6 +394,7 @@ private void handleUserTransaction( networkUtilizationManager.trackFeePayments(payer, consensusNow, stack); } recordBuilder.status(validationResult.responseCodeEnum()); + try { if (validationResult.status() == NODE_DUE_DILIGENCE_FAILURE) { feeAccumulator.chargeNetworkFee(creator.accountId(), fees.networkFee()); @@ -408,12 +418,17 @@ private void handleUserTransaction( } } else { - networkUtilizationManager.trackTxn(transactionInfo, consensusNow, stack); - if (!authorizer.hasWaivedFees(payer, transactionInfo.functionality(), txBody)) { - // privileged transactions are not charged fees - feeAccumulator.chargeFees(payer, creator.accountId(), fees); - } try { + // Any hollow accounts that must sign to have all needed signatures, need to be finalized + // as a result of transaction being handled. + finalizeHollowAccounts(context, configuration, preHandleResult.hollowAccounts(), verifier); + + networkUtilizationManager.trackTxn(transactionInfo, consensusNow, stack); + if (!authorizer.hasWaivedFees(payer, transactionInfo.functionality(), txBody)) { + // privileged transactions are not charged fees + feeAccumulator.chargeFees(payer, creator.accountId(), fees); + } + if (networkUtilizationManager.wasLastTxnGasThrottled()) { // Don't charge the payer the service fee component, because the user-submitted transaction // was fully valid but network capacity was unavailable to satisfy it @@ -477,6 +492,53 @@ private void handleUserTransaction( blockRecordManager.endUserTransaction(recordListResult.records().stream(), state); } + /** + * Updates key on the hollow accounts that need to be finalized. This is done by dispatching a preceding + * synthetic update transaction. The ksy is derived from the signature expansion, by looking up the ECDSA key + * for the alias. + * + * @param context the handle context + * @param configuration the configuration + * @param accounts the set of hollow accounts that need to be finalized + * @param verifier the key verifier + */ + private void finalizeHollowAccounts( + @NonNull final HandleContext context, + @NonNull final Configuration configuration, + @NonNull final Set accounts, + @NonNull final DefaultKeyVerifier verifier) { + final var consensusConfig = configuration.getConfigData(ConsensusConfig.class); + final var precedingHollowAccountRecords = accounts.size(); + final var maxRecords = consensusConfig.handleMaxPrecedingRecords(); + // If the hollow accounts that need to be finalized is greater than the max preceding + // records allowed throw an exception + if (precedingHollowAccountRecords >= maxRecords) { + throw new HandleException(MAX_CHILD_RECORDS_EXCEEDED); + } else { + for (final var hollowAccount : accounts) { + // get the verified key for this hollow account + final var verification = Objects.requireNonNull( + verifier.verificationFor(hollowAccount.alias()), + "Required hollow account verified signature did not exist"); + if (verification.key() != null) { + if (!IMMUTABILITY_SENTINEL_KEY.equals(hollowAccount.keyOrThrow())) { + logger.error("Hollow account {} has a key other than the sentinel key", hollowAccount); + return; + } + // dispatch synthetic update transaction for updating key on this hollow account + final var syntheticUpdateTxn = TransactionBody.newBuilder() + .cryptoUpdateAccount(CryptoUpdateTransactionBody.newBuilder() + .accountIDToUpdate(hollowAccount.accountId()) + .key(verification.key()) + .build()) + .build(); + context.dispatchPrecedingTransaction( + syntheticUpdateTxn, SingleTransactionRecordBuilder.class, k -> true, context.payer()); + } + } + } + } + @NonNull private FeeAccumulator createFeeAccumulator( @NonNull final SavepointStackImpl stack, @@ -673,8 +735,8 @@ private PreHandleResult addMissingSignatures( // re-expand keys only if any of the keys have changed final var previousResults = previousResult.verificationResults(); - final var currentRequiredPayerKeys = context.requiredNonPayerKeys(); - final var currentOptionalPayerKeys = context.optionalNonPayerKeys(); + final var currentRequiredNonPayerKeys = context.requiredNonPayerKeys(); + final var currentOptionalNonPayerKeys = context.optionalNonPayerKeys(); final var anyKeyChanged = haveKeyChanges(previousResults, context); // If none of the keys changed then non need to re-expand all signatures. if (!anyKeyChanged) { @@ -691,9 +753,11 @@ private PreHandleResult addMissingSignatures( signatureExpander.expand(sigPairs, expanded); if (payerKey != null && !isHollow(payer)) { signatureExpander.expand(payerKey, sigPairs, expanded); + } else if (isHollow(payer)) { + context.requireSignatureForHollowAccount(payer); } - signatureExpander.expand(currentRequiredPayerKeys, sigPairs, expanded); - signatureExpander.expand(currentOptionalPayerKeys, sigPairs, expanded); + signatureExpander.expand(currentRequiredNonPayerKeys, sigPairs, expanded); + signatureExpander.expand(currentOptionalNonPayerKeys, sigPairs, expanded); // remove all keys that were already verified for (final var it = expanded.iterator(); it.hasNext(); ) { diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/TokenContextImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/TokenContextImpl.java index d30a52ccc118..ef36ab6e8e8a 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/TokenContextImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/TokenContextImpl.java @@ -16,6 +16,8 @@ package com.hedera.node.app.workflows.handle; +import static com.hedera.node.app.workflows.handle.HandleContextImpl.PrecedingTransactionCategory.LIMITED_CHILD_RECORDS; +import static com.hedera.node.app.workflows.handle.HandleContextImpl.PrecedingTransactionCategory.UNLIMITED_CHILD_RECORDS; import static java.util.Objects.requireNonNull; import com.hedera.node.app.service.token.TokenService; @@ -92,7 +94,14 @@ public void forEachChildRecord(@NonNull Class recordBuilderClass, @NonNul @NonNull @Override public T addPrecedingChildRecordBuilder(@NonNull Class recordBuilderClass) { - final var result = recordListBuilder.addPreceding(configuration()); + final var result = recordListBuilder.addPreceding(configuration(), LIMITED_CHILD_RECORDS); + return castRecordBuilder(result, recordBuilderClass); + } + + @NonNull + @Override + public T addUncheckedPrecedingChildRecordBuilder(@NonNull Class recordBuilderClass) { + final var result = recordListBuilder.addPreceding(configuration(), UNLIMITED_CHILD_RECORDS); return castRecordBuilder(result, recordBuilderClass); } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/GenesisRecordsConsensusHook.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/GenesisRecordsConsensusHook.java index 1bbde8731847..c51252fbb3f6 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/GenesisRecordsConsensusHook.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/GenesisRecordsConsensusHook.java @@ -55,6 +55,7 @@ public class GenesisRecordsConsensusHook implements GenesisRecordsBuilder, Conse private Map stakingAccounts = new HashMap<>(); private Map miscAccounts = new HashMap<>(); private Map treasuryClones = new HashMap<>(); + private Map blocklistAccounts = new HashMap<>(); private Instant consensusTimeOfLastHandledTxn = null; @@ -94,6 +95,11 @@ public void process(@NonNull final TokenContext context) { createAccountRecordBuilders(treasuryClones, context, TREASURY_CLONE_MEMO); treasuryClones = Collections.emptyMap(); } + + if (!blocklistAccounts.isEmpty()) { + createAccountRecordBuilders(blocklistAccounts, context, null); + blocklistAccounts = Collections.emptyMap(); + } } @Override @@ -116,6 +122,11 @@ public void treasuryClones(@NonNull final Map accounts) { + blocklistAccounts.putAll(requireNonNull(accounts)); + } + @VisibleForTesting void setLastConsensusTime(@Nullable final Instant lastConsensusTime) { consensusTimeOfLastHandledTxn = lastConsensusTime; @@ -137,7 +148,10 @@ private void createAccountRecordBuilders( .sorted(Comparator.comparingLong(acct -> acct.accountId().accountNum())) .toList(); for (final Account key : orderedAccts) { - final var recordBuilder = context.addPrecedingChildRecordBuilder(GenesisAccountRecordBuilder.class); + // we create preceding records on genesis for each system account created. + // This is an exception and should not fail with MAX_CHILD_RECORDS_EXCEEDED + final var recordBuilder = + context.addUncheckedPrecedingChildRecordBuilder(GenesisAccountRecordBuilder.class); final var accountId = requireNonNull(key.accountId()); recordBuilder.accountID(accountId); if (recordMemo != null) { diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/MigrationContextImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/MigrationContextImpl.java index 8db3b2d08659..a3d2200659fb 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/MigrationContextImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/MigrationContextImpl.java @@ -18,6 +18,7 @@ import static java.util.Objects.requireNonNull; +import com.hedera.node.app.ids.WritableEntityIdStore; import com.hedera.node.app.spi.info.NetworkInfo; import com.hedera.node.app.spi.state.MigrationContext; import com.hedera.node.app.spi.state.ReadableStates; @@ -26,6 +27,7 @@ import com.hedera.node.app.spi.workflows.record.GenesisRecordsBuilder; import com.swirlds.config.api.Configuration; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; /** * An implementation of {@link MigrationContext}. @@ -35,6 +37,9 @@ * @param configuration The configuration to use * @param genesisRecordsBuilder The instance responsible for genesis records * @param handleThrottling The instance responsible for handle throttling + * @param writableEntityIdStore The instance responsible for generating new entity IDs (ONLY during + * migrations). Note that this is nullable only because it cannot exist + * when the entity ID service itself is being migrated */ public record MigrationContextImpl( @NonNull ReadableStates previousStates, @@ -42,7 +47,8 @@ public record MigrationContextImpl( @NonNull Configuration configuration, @NonNull NetworkInfo networkInfo, @NonNull GenesisRecordsBuilder genesisRecordsBuilder, - @NonNull HandleThrottleParser handleThrottling) + @NonNull HandleThrottleParser handleThrottling, + @Nullable WritableEntityIdStore writableEntityIdStore) implements MigrationContext { public MigrationContextImpl { @@ -53,4 +59,10 @@ public record MigrationContextImpl( requireNonNull(genesisRecordsBuilder); requireNonNull(handleThrottling); } + + @Override + public long newEntityNum() { + return requireNonNull(writableEntityIdStore, "Entity ID store needs to exist first") + .incrementAndGet(); + } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/RecordListBuilder.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/RecordListBuilder.java index a4c3043b7a7b..f1ed9e7104bf 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/RecordListBuilder.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/record/RecordListBuilder.java @@ -16,12 +16,15 @@ package com.hedera.node.app.workflows.handle.record; +import static com.hedera.node.app.workflows.handle.HandleContextImpl.PrecedingTransactionCategory.LIMITED_CHILD_RECORDS; +import static com.hedera.node.app.workflows.handle.HandleContextImpl.PrecedingTransactionCategory.UNLIMITED_CHILD_RECORDS; import static java.util.Collections.unmodifiableList; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.state.SingleTransactionRecord; +import com.hedera.node.app.workflows.handle.HandleContextImpl; import com.hedera.node.app.workflows.handle.record.SingleTransactionRecordBuilderImpl.ReversingBehavior; import com.hedera.node.config.data.ConsensusConfig; import com.swirlds.config.api.Configuration; @@ -30,6 +33,7 @@ import java.util.ArrayList; import java.util.EnumSet; import java.util.List; +import java.util.Objects; /** * This class manages all record builders that are used while a single user transaction is running. @@ -127,18 +131,27 @@ public List childRecordBuilders() { * @throws NullPointerException if {@code consensusConfig} is {@code null} * @throws HandleException if no more preceding slots are available */ - public SingleTransactionRecordBuilderImpl addPreceding(@NonNull final Configuration configuration) { + public SingleTransactionRecordBuilderImpl addPreceding( + @NonNull final Configuration configuration, + final HandleContextImpl.PrecedingTransactionCategory precedingTxnCategory) { requireNonNull(configuration, CONFIGURATION_MUST_NOT_BE_NULL); - return doAddPreceding(configuration, ReversingBehavior.IRREVERSIBLE); + return doAddPreceding(configuration, ReversingBehavior.IRREVERSIBLE, precedingTxnCategory); } public SingleTransactionRecordBuilderImpl addReversiblePreceding(@NonNull final Configuration configuration) { requireNonNull(configuration, CONFIGURATION_MUST_NOT_BE_NULL); - return doAddPreceding(configuration, ReversingBehavior.REVERSIBLE); + return doAddPreceding(configuration, ReversingBehavior.REVERSIBLE, LIMITED_CHILD_RECORDS); + } + + public SingleTransactionRecordBuilderImpl addRemovablePreceding(@NonNull final Configuration configuration) { + requireNonNull(configuration, CONFIGURATION_MUST_NOT_BE_NULL); + return doAddPreceding(configuration, ReversingBehavior.REMOVABLE, LIMITED_CHILD_RECORDS); } public SingleTransactionRecordBuilderImpl doAddPreceding( - @NonNull final Configuration configuration, @NonNull final ReversingBehavior reversingBehavior) { + @NonNull final Configuration configuration, + @NonNull final ReversingBehavior reversingBehavior, + @NonNull final HandleContextImpl.PrecedingTransactionCategory precedingTxnCategory) { // Lazily create. FUTURE: We should reuse the RecordListBuilder between handle calls, and we should // reuse these lists. Then we can omit this lazy create entirely and produce less garbage overall. if (precedingTxnRecordBuilders == null) { @@ -151,7 +164,10 @@ public SingleTransactionRecordBuilderImpl doAddPreceding( final var consensusConfig = configuration.getConfigData(ConsensusConfig.class); final var precedingCount = precedingTxnRecordBuilders.size(); final var maxRecords = consensusConfig.handleMaxPrecedingRecords(); - if (precedingCount >= maxRecords) { + // On genesis start we create almost 700 preceding child records for creating system accounts. + // Also, we should not be failing for stake update transaction records that happen every midnight. + // In these two cases need to allow for this, but we don't want to allow for this on every handle call. + if (precedingTxnRecordBuilders.size() >= maxRecords && (precedingTxnCategory != UNLIMITED_CHILD_RECORDS)) { // We do not have a MAX_PRECEDING_RECORDS_EXCEEDED error, so use this. throw new HandleException(ResponseCodeEnum.MAX_CHILD_RECORDS_EXCEEDED); } @@ -249,6 +265,9 @@ public void revertChildrenOf(@NonNull final SingleTransactionRecordBuilderImpl r if (childRecordBuilders == null) { childRecordBuilders = new ArrayList<>(); } + if (precedingTxnRecordBuilders == null) { + precedingTxnRecordBuilders = new ArrayList<>(); + } // Find the index into the list of records from which to revert. If the record builder is the user transaction, // then we start at index 0, which is the first child transaction after the user transaction. If the record @@ -259,13 +278,17 @@ public void revertChildrenOf(@NonNull final SingleTransactionRecordBuilderImpl r index = 0; // The user transaction fails and therefore we also have to revert preceding transactions - if (precedingTxnRecordBuilders != null) { - for (final var preceding : precedingTxnRecordBuilders) { + if (!precedingTxnRecordBuilders.isEmpty()) { + for (int i = 0; i < precedingTxnRecordBuilders.size(); i++) { + final var preceding = precedingTxnRecordBuilders.get(i); if (preceding.reversingBehavior() == ReversingBehavior.REVERSIBLE && SUCCESSES.contains(preceding.status())) { preceding.status(ResponseCodeEnum.REVERTED_SUCCESS); + } else if (preceding.reversingBehavior() == ReversingBehavior.REMOVABLE) { + precedingTxnRecordBuilders.set(i, null); } } + precedingTxnRecordBuilders.removeIf(Objects::isNull); } } else { // Traverse from end to start, since we are most likely going to be reverting the most recent child, diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleContextImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleContextImpl.java index 5a6bb1976d79..87004b40271a 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleContextImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleContextImpl.java @@ -16,7 +16,7 @@ package com.hedera.node.app.workflows.prehandle; -import static com.hedera.node.app.service.token.impl.util.TokenHandlerHelper.verifyIsNotImmutableAccount; +import static com.hedera.node.app.service.token.impl.util.TokenHandlerHelper.verifyNotEmptyKey; import static com.hedera.node.app.spi.HapiUtils.EMPTY_KEY_LIST; import static com.hedera.node.app.spi.HapiUtils.isHollow; import static com.hedera.node.app.spi.key.KeyUtils.isValid; @@ -37,6 +37,7 @@ import com.hedera.node.app.spi.workflows.TransactionKeys; import com.hedera.node.app.workflows.dispatcher.ReadableStoreFactory; import com.hedera.node.app.workflows.dispatcher.TransactionDispatcher; +import com.hedera.node.config.data.AccountsConfig; import com.hedera.pbj.runtime.io.buffer.Bytes; import com.swirlds.config.api.Configuration; import edu.umd.cs.findbugs.annotations.NonNull; @@ -200,7 +201,7 @@ public Set optionalNonPayerKeys() { @Override public PreHandleContext optionalKey(@NonNull final Key key) throws PreCheckException { // Verify this key isn't for an immutable account - verifyIsNotImmutableAccount(key, ResponseCodeEnum.INVALID_ACCOUNT_ID); + verifyNotEmptyKey(key, ResponseCodeEnum.INVALID_ACCOUNT_ID); if (!key.equals(payerKey) && isValid(key)) { optionalNonPayerKeys.add(key); @@ -270,8 +271,7 @@ public PreHandleContext requireKeyOrThrow(@Nullable final Key key, @NonNull fina } // Verify this key isn't for an immutable account - verifyIsNotImmutableAccount(key, responseCode); - + verifyNotEmptyKey(key, responseCode); return requireKey(key); } @@ -290,7 +290,14 @@ public PreHandleContext requireKeyOrThrow( if (account == null) { throw new PreCheckException(responseCode); } - + // If it is hollow account, and we require this to sign, we need to finalize the account + // with the corresponding ECDSA key in handle + if (isHollow(account)) { + requiredHollowAccounts.add(account); + return this; + } + // Verify this key isn't for an immutable account + verifyNotStakingAccounts(account.accountIdOrThrow(), responseCode); final var key = account.key(); if (!isValid(key)) { // Or if it is a Contract Key? Or if it is an empty key? // Or a KeyList with no @@ -298,9 +305,6 @@ public PreHandleContext requireKeyOrThrow( throw new PreCheckException(responseCode); } - // Verify this key isn't for an immutable account - verifyIsNotImmutableAccount(key, responseCode); - return requireKey(key); } @@ -318,17 +322,20 @@ public PreHandleContext requireKeyOrThrow( if (account == null) { throw new PreCheckException(responseCode); } - + // If it is hollow account, and we require this to sign, we need to finalize the account + // with the corresponding ECDSA key in handle + if (isHollow(account)) { + requiredHollowAccounts.add(account); + return this; + } + // Verify this key isn't for an immutable account + verifyNotStakingAccounts(account.accountIdOrThrow(), responseCode); final var key = account.key(); if (!isValid(key)) { // Or if it is a Contract Key? Or if it is an empty key? // Or a KeyList with no // keys? Or KeyList with Contract keys only? throw new PreCheckException(responseCode); } - - // Verify this key isn't for an immutable account - verifyIsNotImmutableAccount(key, responseCode); - return requireKey(key); } @@ -353,7 +360,14 @@ public PreHandleContext requireKeyIfReceiverSigRequired( if (!account.receiverSigRequired()) { return this; } - + // If it is hollow account, and we require this to sign, we need to finalize the account + // with the corresponding ECDSA key in handle + if (isHollow(account)) { + requiredHollowAccounts.add(account); + return this; + } + // Verify this key isn't for an immutable account + verifyNotStakingAccounts(account.accountIdOrThrow(), responseCode); // We will require the key. If the key isn't present, then we will throw the given response code. final var key = account.key(); if (key == null @@ -362,10 +376,6 @@ public PreHandleContext requireKeyIfReceiverSigRequired( // keys? Or KeyList with Contract keys only? throw new PreCheckException(responseCode); } - - // Verify this key isn't for an immutable account - verifyIsNotImmutableAccount(key, responseCode); - return requireKey(key); } @@ -390,7 +400,14 @@ public PreHandleContext requireKeyIfReceiverSigRequired( if (!account.receiverSigRequired()) { return this; } - + // If it is hollow account, and we require this to sign, we need to finalize the account + // with the corresponding ECDSA key in handle + if (isHollow(account)) { + requiredHollowAccounts.add(account); + return this; + } + // Verify this key isn't for an immutable account + verifyNotStakingAccounts(account.accountIdOrThrow(), responseCode); // We will require the key. If the key isn't present, then we will throw the given response code. final var key = account.key(); if (!isValid(key)) { // Or if it is a Contract Key? Or if it is an empty key? @@ -398,10 +415,6 @@ public PreHandleContext requireKeyIfReceiverSigRequired( // keys? Or KeyList with Contract keys only? throw new PreCheckException(responseCode); } - - // Verify this key isn't for an immutable account - verifyIsNotImmutableAccount(key, responseCode); - return requireKey(key); } @@ -469,4 +482,21 @@ public String toString() { + innerContext + ", storeFactory=" + storeFactory + '}'; } + + /** + * Checks that an account does not represent one of the staking accounts + * Throws a {@link PreCheckException} with the designated response code otherwise. + * @param accountID the accountID to check + * @param responseCode the response code to throw + * @throws PreCheckException if the account is considered immutable + */ + private void verifyNotStakingAccounts( + @Nullable final AccountID accountID, @NonNull final ResponseCodeEnum responseCode) + throws PreCheckException { + final var accountNum = accountID != null ? accountID.accountNum() : 0; + final var accountsConfig = configuration.getConfigData(AccountsConfig.class); + if (accountNum == accountsConfig.stakingRewardAccount() || accountNum == accountsConfig.nodeRewardAccount()) { + throw new PreCheckException(responseCode); + } + } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleResult.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleResult.java index 03e78485bca8..31629505c893 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleResult.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleResult.java @@ -46,6 +46,7 @@ * @param txInfo Information about the transaction that is being handled. If the transaction was not parseable, then * this will be null, and an appropriate error status will be set. * @param requiredKeys The set of cryptographic keys that are required to be present. + * @param hollowAccounts The set of required hollow accounts to be finalized * @param verificationResults A map of {@link Future} yielding the * {@link SignatureVerificationFuture} for a given cryptographic key. Ony cryptographic keys * are used as the key of this map. diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleWorkflowImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleWorkflowImpl.java index 7a583e232e73..639329df2e8d 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleWorkflowImpl.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/prehandle/PreHandleWorkflowImpl.java @@ -205,19 +205,7 @@ private PreHandleResult expandAndVerifySignatures( final var expanded = new HashSet(); signatureExpander.expand(originals, expanded); - // 1. Expand the Payer signature - final Key payerKey; - if (!isHollow(payerAccount)) { - // If the account IS a hollow account, then we will discover all such possible signatures when expanding - // all "full prefix" keys above, so we already have it covered. We only need to do this if the payer is - // NOT a hollow account (which is the common case). - payerKey = payerAccount.keyOrThrow(); - signatureExpander.expand(payerKey, originals, expanded); - } else { - payerKey = null; - } - - // 2a. Create the PreHandleContext. This will get reused across several calls to the transaction handlers + // 1a. Create the PreHandleContext. This will get reused across several calls to the transaction handlers final PreHandleContext context; final VersionedConfiguration configuration = configProvider.getConfiguration(); try { @@ -234,6 +222,21 @@ private PreHandleResult expandAndVerifySignatures( "Payer account disappeared between preHandle and preHandleContext creation!", preCheck); } + // 2. Expand the Payer signature + final Key payerKey; + if (!isHollow(payerAccount)) { + // If the account IS a hollow account, then we will discover all such possible signatures when expanding + // all "full prefix" keys above, so we already have it covered. We only need to do this if the payer is + // NOT a hollow account (which is the common case). + payerKey = payerAccount.keyOrThrow(); + signatureExpander.expand(payerKey, originals, expanded); + } else { + payerKey = null; + // If the account is hollow and since it is the payer that needs to sign the transaction, we need to + // add to the list of requiredHollowAccounts so that we can finalize the hollow accounts in handle workflow + context.requireSignatureForHollowAccount(payerAccount); + } + // 2b. Call Pre-Transaction Handlers try { // First, perform semantic checks on the transaction diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/MerkleSchemaRegistryTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/MerkleSchemaRegistryTest.java index ef413e6f2c7d..e3336a3018d0 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/MerkleSchemaRegistryTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/MerkleSchemaRegistryTest.java @@ -23,6 +23,7 @@ import static org.mockito.Mockito.mock; import com.hedera.hapi.node.base.SemanticVersion; +import com.hedera.node.app.ids.WritableEntityIdStore; import com.hedera.node.app.spi.fixtures.state.NoOpGenesisRecordsBuilder; import com.hedera.node.app.spi.fixtures.state.TestSchema; import com.hedera.node.app.spi.info.NetworkInfo; @@ -167,7 +168,8 @@ void migrateFromV9ToV10() { version(10, 0, 0), config, networkInfo, - handleThrottling); + handleThrottling, + mock(WritableEntityIdStore.class)); } } @@ -193,7 +195,13 @@ void setUp() { void nullMerkleThrows() { //noinspection ConstantConditions assertThatThrownBy(() -> schemaRegistry.migrate( - null, versions[0], versions[1], config, networkInfo, handleThrottling)) + null, + versions[0], + versions[1], + config, + networkInfo, + handleThrottling, + mock(WritableEntityIdStore.class))) .isInstanceOf(NullPointerException.class); } @@ -202,7 +210,13 @@ void nullMerkleThrows() { void nullCurrentVersionThrows() { //noinspection ConstantConditions assertThatThrownBy(() -> schemaRegistry.migrate( - merkleTree, versions[0], null, config, networkInfo, handleThrottling)) + merkleTree, + versions[0], + null, + config, + networkInfo, + handleThrottling, + mock(WritableEntityIdStore.class))) .isInstanceOf(NullPointerException.class); } @@ -211,7 +225,13 @@ void nullCurrentVersionThrows() { void nullConfigVersionThrows() { //noinspection ConstantConditions assertThatThrownBy(() -> schemaRegistry.migrate( - merkleTree, versions[0], versions[1], null, networkInfo, handleThrottling)) + merkleTree, + versions[0], + versions[1], + null, + networkInfo, + handleThrottling, + mock(WritableEntityIdStore.class))) .isInstanceOf(NullPointerException.class); } @@ -220,7 +240,13 @@ void nullConfigVersionThrows() { void nullNetworkInfoThrows() { //noinspection ConstantConditions assertThatThrownBy(() -> schemaRegistry.migrate( - merkleTree, versions[0], versions[1], config, null, handleThrottling)) + merkleTree, + versions[0], + versions[1], + config, + null, + handleThrottling, + mock(WritableEntityIdStore.class))) .isInstanceOf(NullPointerException.class); } @@ -229,7 +255,13 @@ void nullNetworkInfoThrows() { void currentVersionLessThanPreviousVersionThrows() { //noinspection ConstantConditions assertThatThrownBy(() -> schemaRegistry.migrate( - merkleTree, versions[5], versions[4], config, networkInfo, handleThrottling)) + merkleTree, + versions[5], + versions[4], + config, + networkInfo, + handleThrottling, + mock(WritableEntityIdStore.class))) .isInstanceOf(IllegalArgumentException.class); } @@ -241,7 +273,14 @@ void migrateIsSkippedIfVersionsAreTheSame() { // When it is registered twice and migrate is called schemaRegistry.register(schema); - schemaRegistry.migrate(merkleTree, versions[1], versions[1], config, networkInfo, handleThrottling); + schemaRegistry.migrate( + merkleTree, + versions[1], + versions[1], + config, + networkInfo, + handleThrottling, + mock(WritableEntityIdStore.class)); // Then nothing happens Mockito.verify(schema, Mockito.times(0)).migrate(Mockito.any()); @@ -281,7 +320,13 @@ void migrate(int firstVersion, int lastVersion) { // When we migrate schemaRegistry.migrate( - merkleTree, versions[firstVersion], versions[lastVersion], config, networkInfo, handleThrottling); + merkleTree, + versions[firstVersion], + versions[lastVersion], + config, + networkInfo, + handleThrottling, + mock(WritableEntityIdStore.class)); // Then each schema less than or equal to firstVersion are not called for (int i = 1; i <= firstVersion; i++) { @@ -319,12 +364,20 @@ void migrateWhenSchemasSkipVersions() { final var schemaV1 = new TestSchema(versions[1], () -> called.add(versions[1])); final var schemaV4 = new TestSchema(versions[4], () -> called.add(versions[4])); final var schemaV6 = new TestSchema(versions[6], () -> called.add(versions[6])); + schemaRegistry.register(schemaV1); schemaRegistry.register(schemaV4); schemaRegistry.register(schemaV6); // When we migrate from v0 to v7 - schemaRegistry.migrate(merkleTree, null, versions[7], config, networkInfo, handleThrottling); + schemaRegistry.migrate( + merkleTree, + null, + versions[7], + config, + networkInfo, + handleThrottling, + mock(WritableEntityIdStore.class)); // Then each of v1, v4, and v6 are called assertThat(called).hasSize(3); @@ -480,7 +533,14 @@ void genesis() { // When we migrate schemaRegistry.register(schemaV1); - schemaRegistry.migrate(merkleTree, versions[0], versions[1], config, networkInfo, handleThrottling); + schemaRegistry.migrate( + merkleTree, + versions[0], + versions[1], + config, + networkInfo, + handleThrottling, + mock(WritableEntityIdStore.class)); // Then we see that the values for A, B, and C are available final var readableStates = merkleTree.createReadableStates(FIRST_SERVICE); @@ -499,7 +559,14 @@ void upgradeAndAddAState() { // When we migrate schemaRegistry.register(schemaV1); schemaRegistry.register(schemaV2); - schemaRegistry.migrate(merkleTree, versions[0], versions[2], config, networkInfo, handleThrottling); + schemaRegistry.migrate( + merkleTree, + versions[0], + versions[2], + config, + networkInfo, + handleThrottling, + mock(WritableEntityIdStore.class)); // We should see the v2 state (the delta from v2 after applied atop v1) final var readableStates = merkleTree.createReadableStates(FIRST_SERVICE); @@ -529,7 +596,14 @@ void upgradeWithARemoveStep() { schemaRegistry.register(schemaV1); schemaRegistry.register(schemaV2); schemaRegistry.register(schemaV3); - schemaRegistry.migrate(merkleTree, versions[0], versions[3], config, networkInfo, handleThrottling); + schemaRegistry.migrate( + merkleTree, + versions[0], + versions[3], + config, + networkInfo, + handleThrottling, + mock(WritableEntityIdStore.class)); // We should see the v3 state (the delta from v3 after applied atop v2 and v1) final var readableStates = merkleTree.createReadableStates(FIRST_SERVICE); @@ -565,7 +639,13 @@ void badSchema() { // We should see that the migration failed assertThatThrownBy(() -> schemaRegistry.migrate( - merkleTree, versions[0], versions[2], config, networkInfo, handleThrottling)) + merkleTree, + versions[0], + versions[2], + config, + networkInfo, + handleThrottling, + mock(WritableEntityIdStore.class))) .isInstanceOf(RuntimeException.class) .hasMessage("Bad"); diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/SerializationTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/SerializationTest.java index 5845fd73338e..b4d4aeaeff74 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/SerializationTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/merkle/SerializationTest.java @@ -20,6 +20,7 @@ import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.mock; +import com.hedera.node.app.ids.WritableEntityIdStore; import com.hedera.node.app.spi.fixtures.state.TestSchema; import com.hedera.node.app.spi.info.NetworkInfo; import com.hedera.node.app.spi.state.MigrationContext; @@ -128,7 +129,8 @@ void simpleReadAndWrite() throws IOException, ConstructableRegistryException { new MerkleSchemaRegistry(registry, FIRST_SERVICE, mock(GenesisRecordsBuilder.class)); final var schemaV1 = createV1Schema(); originalRegistry.register(schemaV1); - originalRegistry.migrate(originalTree, null, v1, config, networkInfo, handleThrottling); + originalRegistry.migrate( + originalTree, null, v1, config, networkInfo, handleThrottling, mock(WritableEntityIdStore.class)); // When we serialize it to bytes and deserialize it back into a tree originalTree.copy(); // make a fast copy because we can only write to disk an immutable copy @@ -140,8 +142,14 @@ void simpleReadAndWrite() throws IOException, ConstructableRegistryException { // Register the MerkleHederaState so, when found in serialized bytes, it will register with // our migration callback, etc. (normally done by the Hedera main method) final Supplier constructor = () -> new MerkleHederaState( - (tree, state) -> - newRegistry.migrate((MerkleHederaState) state, v1, v1, config, networkInfo, handleThrottling), + (tree, state) -> newRegistry.migrate( + (MerkleHederaState) state, + v1, + v1, + config, + networkInfo, + handleThrottling, + mock(WritableEntityIdStore.class)), (event, meta, provider) -> {}, (state, platform, dualState, trigger, version) -> {}); final var pair = new ClassConstructorPair(MerkleHederaState.class, constructor); @@ -149,7 +157,13 @@ void simpleReadAndWrite() throws IOException, ConstructableRegistryException { final MerkleHederaState loadedTree = parseTree(serializedBytes, dir); newRegistry.migrate( - loadedTree, schemaV1.getVersion(), schemaV1.getVersion(), config, networkInfo, handleThrottling); + loadedTree, + schemaV1.getVersion(), + schemaV1.getVersion(), + config, + networkInfo, + handleThrottling, + mock(WritableEntityIdStore.class)); loadedTree.migrate(1); // Then, we should be able to see all our original states again diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/recordcache/RecordCacheImplTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/recordcache/RecordCacheImplTest.java index 4ce7334ef876..71919bef4760 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/recordcache/RecordCacheImplTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/state/recordcache/RecordCacheImplTest.java @@ -199,12 +199,9 @@ void reloadsIntoCacheOnConstruction() { assertThat(getReceipt(cache, txId1)) .isEqualTo(entries.get(1).transactionRecordOrThrow().receipt()); - assertThat(getRecords(cache, pTxId1)).containsExactly(entries.get(0).transactionRecord()); - assertThat(getReceipts(cache, pTxId1)) - .containsExactly(entries.get(0).transactionRecordOrThrow().receipt()); - assertThat(getRecords(cache, txId1)) .containsExactly( + entries.get(0).transactionRecord(), entries.get(1).transactionRecord(), entries.get(2).transactionRecord(), entries.get(5).transactionRecord()); @@ -216,6 +213,7 @@ void reloadsIntoCacheOnConstruction() { entries.get(5).transactionRecord()); assertThat(getReceipts(cache, txId1)) .containsExactly( + entries.get(0).transactionRecordOrThrow().receipt(), entries.get(1).transactionRecordOrThrow().receipt(), entries.get(2).transactionRecordOrThrow().receipt(), entries.get(5).transactionRecordOrThrow().receipt()); diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleContextImplTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleContextImplTest.java index 7f0297140f40..8a4f381a2586 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleContextImplTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/HandleContextImplTest.java @@ -21,18 +21,22 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.SUCCESS; import static com.hedera.node.app.spi.HapiUtils.functionOf; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static com.hedera.node.app.workflows.handle.HandleContextImpl.PrecedingTransactionCategory.LIMITED_CHILD_RECORDS; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatNoException; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertSame; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.BDDMockito.given; import static org.mockito.Mock.Strictness.LENIENT; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -764,7 +768,8 @@ void setup() { .dispatchHandle(any()); when(childRecordBuilder.status()).thenReturn(ResponseCodeEnum.OK); - when(recordListBuilder.addPreceding(any())).thenReturn(childRecordBuilder); + when(recordListBuilder.addPreceding(any(), eq(LIMITED_CHILD_RECORDS))) + .thenReturn(childRecordBuilder); when(recordListBuilder.addReversiblePreceding(any())).thenReturn(childRecordBuilder); when(recordListBuilder.addChild(any())).thenReturn(childRecordBuilder); when(recordListBuilder.addRemovableChild(any())).thenReturn(childRecordBuilder); @@ -959,7 +964,7 @@ void testDispatchPrecedingWithNonUserTxnFails(TransactionCategory category) { VERIFIER_CALLBACK, AccountID.DEFAULT)) .isInstanceOf(IllegalArgumentException.class); - verify(recordListBuilder, never()).addPreceding(any()); + verify(recordListBuilder, never()).addPreceding(any(), eq(LIMITED_CHILD_RECORDS)); verify(dispatcher, never()).dispatchHandle(any()); assertThat(stack.createReadableStates(FOOD_SERVICE) .get(FRUIT_STATE_KEY) @@ -969,7 +974,7 @@ void testDispatchPrecedingWithNonUserTxnFails(TransactionCategory category) { } @Test - void testDispatchPrecedingWithNonEmptyStackFails() { + void testDispatchPrecedingWithNonEmptyStackDoesntFail() { // given final var context = createContext(defaultTransactionBody(), TransactionCategory.USER); stack.createSavepoint(); @@ -987,7 +992,7 @@ void testDispatchPrecedingWithNonEmptyStackFails() { VERIFIER_CALLBACK, AccountID.DEFAULT)) .isInstanceOf(IllegalStateException.class); - verify(recordListBuilder, never()).addPreceding(any()); + verify(recordListBuilder, never()).addPreceding(any(), eq(LIMITED_CHILD_RECORDS)); verify(dispatcher, never()).dispatchHandle(any()); assertThat(stack.createReadableStates(FOOD_SERVICE) .get(FRUIT_STATE_KEY) @@ -996,30 +1001,30 @@ void testDispatchPrecedingWithNonEmptyStackFails() { } @Test - void testDispatchPrecedingWithChangedDataFails() { + void testDispatchPrecedingWithChangedDataDoesntFail() { // given final var context = createContext(defaultTransactionBody(), TransactionCategory.USER); stack.peek().createWritableStates(FOOD_SERVICE).get(FRUIT_STATE_KEY).put(B_KEY, BLUEBERRY); // then - assertThatThrownBy(() -> context.dispatchPrecedingTransaction( + assertThatNoException() + .isThrownBy(() -> context.dispatchPrecedingTransaction( defaultTransactionBody(), SingleTransactionRecordBuilder.class, VERIFIER_CALLBACK, - AccountID.DEFAULT)) - .isInstanceOf(IllegalStateException.class); - assertThatThrownBy(() -> context.dispatchPrecedingTransaction( + AccountID.DEFAULT)); + assertThatNoException() + .isThrownBy((() -> context.dispatchPrecedingTransaction( defaultTransactionBody(), SingleTransactionRecordBuilder.class, VERIFIER_CALLBACK, - AccountID.DEFAULT)) - .isInstanceOf(IllegalStateException.class); - verify(recordListBuilder, never()).addPreceding(any()); - verify(dispatcher, never()).dispatchHandle(any()); + AccountID.DEFAULT))); + verify(recordListBuilder, times(2)).addPreceding(any(), eq(LIMITED_CHILD_RECORDS)); + verify(dispatcher, times(2)).dispatchHandle(any()); assertThat(stack.createReadableStates(FOOD_SERVICE) .get(FRUIT_STATE_KEY) .get(A_KEY)) - .isEqualTo(APPLE); + .isEqualTo(ACAI); } @Test @@ -1034,7 +1039,7 @@ void testDispatchChildFromPrecedingFails() { VERIFIER_CALLBACK, AccountID.DEFAULT)) .isInstanceOf(IllegalArgumentException.class); - verify(recordListBuilder, never()).addPreceding(any()); + verify(recordListBuilder, never()).addPreceding(any(), eq(LIMITED_CHILD_RECORDS)); verify(dispatcher, never()).dispatchHandle(any()); assertThat(stack.createReadableStates(FOOD_SERVICE) .get(FRUIT_STATE_KEY) @@ -1054,7 +1059,7 @@ void testDispatchRemovableChildFromPrecedingFails() { VERIFIER_CALLBACK, AccountID.DEFAULT)) .isInstanceOf(IllegalArgumentException.class); - verify(recordListBuilder, never()).addPreceding(any()); + verify(recordListBuilder, never()).addPreceding(any(), eq(LIMITED_CHILD_RECORDS)); verify(dispatcher, never()).dispatchHandle(any()); assertThat(stack.createReadableStates(FOOD_SERVICE) .get(FRUIT_STATE_KEY) diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/record/GenesisRecordsConsensusHookTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/record/GenesisRecordsConsensusHookTest.java index d4cca1357458..23b381b16c00 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/record/GenesisRecordsConsensusHookTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/record/GenesisRecordsConsensusHookTest.java @@ -71,7 +71,7 @@ class GenesisRecordsConsensusHookTest { @BeforeEach void setup() { given(context.consensusTime()).willReturn(CONSENSUS_NOW); - given(context.addPrecedingChildRecordBuilder(GenesisAccountRecordBuilder.class)) + given(context.addUncheckedPrecedingChildRecordBuilder(GenesisAccountRecordBuilder.class)) .willReturn(genesisAccountRecordBuilder); subject = new GenesisRecordsConsensusHook(); @@ -131,6 +131,17 @@ void processCreatesTreasuryClones() { verifyBuilderInvoked(ACCOUNT_ID_2, ACCT_2_CREATE, EXPECTED_TREASURY_CLONE_MEMO); } + @Test + void processCreatesBlocklistAccounts() { + subject.blocklistAccounts( + Map.of(ACCOUNT_1, ACCT_1_CREATE.copyBuilder(), ACCOUNT_2, ACCT_2_CREATE.copyBuilder())); + + subject.process(context); + + verifyBuilderInvoked(ACCOUNT_ID_1, ACCT_1_CREATE, null); + verifyBuilderInvoked(ACCOUNT_ID_2, ACCT_2_CREATE, null); + } + @Test void processCreatesAllRecords() { final var acctId3 = ACCOUNT_ID_1.copyBuilder().accountNum(3).build(); @@ -139,10 +150,14 @@ void processCreatesAllRecords() { final var acctId4 = ACCOUNT_ID_1.copyBuilder().accountNum(4).build(); final var acct4 = ACCOUNT_1.copyBuilder().accountId(acctId4).build(); final var acct4Create = ACCT_1_CREATE.copyBuilder().memo("builder4").build(); + final var acctId5 = ACCOUNT_ID_1.copyBuilder().accountNum(5).build(); + final var acct5 = ACCOUNT_1.copyBuilder().accountId(acctId5).build(); + final var acct5Create = ACCT_1_CREATE.copyBuilder().memo("builder5").build(); subject.systemAccounts(Map.of(ACCOUNT_1, ACCT_1_CREATE.copyBuilder())); subject.stakingAccounts(Map.of(ACCOUNT_2, ACCT_2_CREATE.copyBuilder())); subject.miscAccounts(Map.of(acct3, acct3Create.copyBuilder())); subject.treasuryClones(Map.of(acct4, acct4Create.copyBuilder())); + subject.blocklistAccounts(Map.of(acct5, acct5Create.copyBuilder())); // Call the first time to make sure records are generated subject.process(context); @@ -159,6 +174,7 @@ void processCreatesAllRecords() { EXPECTED_STAKING_MEMO); verifyBuilderInvoked(acctId3, acct3Create, null); verifyBuilderInvoked(acctId4, acct4Create, EXPECTED_TREASURY_CLONE_MEMO); + verifyBuilderInvoked(acctId5, acct5Create, null); // Call process() a second time to make sure no other records are created Mockito.clearInvocations(genesisAccountRecordBuilder); @@ -206,6 +222,12 @@ void treasuryAccountsNullParam() { Assertions.assertThatThrownBy(() -> subject.treasuryClones(null)).isInstanceOf(NullPointerException.class); } + @SuppressWarnings("DataFlowIssue") + @Test + void blocklistAccountsNullParam() { + Assertions.assertThatThrownBy(() -> subject.blocklistAccounts(null)).isInstanceOf(NullPointerException.class); + } + private void verifyBuilderInvoked( final AccountID acctId, final CryptoCreateTransactionBody acctCreateBody, final String expectedMemo) { verify(genesisAccountRecordBuilder).accountID(acctId); diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/record/RecordListBuilderTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/record/RecordListBuilderTest.java index d54f0e9f42c9..d093cdf33d11 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/record/RecordListBuilderTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/handle/record/RecordListBuilderTest.java @@ -19,6 +19,7 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_ID_DOES_NOT_EXIST; import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; import static com.hedera.hapi.node.base.ResponseCodeEnum.REVERTED_SUCCESS; +import static com.hedera.node.app.workflows.handle.HandleContextImpl.PrecedingTransactionCategory.LIMITED_CHILD_RECORDS; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; @@ -83,7 +84,7 @@ void testAddSinglePreceding() { addUserTransaction(recordListBuilder); // when - recordListBuilder.addPreceding(CONFIGURATION).transaction(simpleCryptoTransfer()); + recordListBuilder.addPreceding(CONFIGURATION, LIMITED_CHILD_RECORDS).transaction(simpleCryptoTransfer()); final var result = recordListBuilder.build(); final var records = result.records(); @@ -108,8 +109,8 @@ void testAddMultiplePrecedingRecords() { addUserTransaction(recordListBuilder); // when - recordListBuilder.addPreceding(CONFIGURATION).transaction(simpleCryptoTransfer()); - recordListBuilder.addPreceding(CONFIGURATION).transaction(simpleCryptoTransfer()); + recordListBuilder.addPreceding(CONFIGURATION, LIMITED_CHILD_RECORDS).transaction(simpleCryptoTransfer()); + recordListBuilder.addPreceding(CONFIGURATION, LIMITED_CHILD_RECORDS).transaction(simpleCryptoTransfer()); final var result = recordListBuilder.build(); final var records = result.records(); @@ -140,11 +141,11 @@ void testAddTooManyPrecedingRecordsFails() { addUserTransaction(recordListBuilder); // when - recordListBuilder.addPreceding(config); - recordListBuilder.addPreceding(config); + recordListBuilder.addPreceding(config, LIMITED_CHILD_RECORDS); + recordListBuilder.addPreceding(config, LIMITED_CHILD_RECORDS); // then - assertThatThrownBy(() -> recordListBuilder.addPreceding(config)) + assertThatThrownBy(() -> recordListBuilder.addPreceding(config, LIMITED_CHILD_RECORDS)) .isInstanceOf(HandleException.class) .hasFieldOrPropertyWithValue("status", ResponseCodeEnum.MAX_CHILD_RECORDS_EXCEEDED); } @@ -155,11 +156,11 @@ void testRevertSinglePreceding() { final var consensusTime = Instant.now(); final var recordListBuilder = new RecordListBuilder(consensusTime); final var base = addUserTransaction(recordListBuilder); - recordListBuilder.addPreceding(CONFIGURATION).transaction(simpleCryptoTransfer()); + recordListBuilder.addPreceding(CONFIGURATION, LIMITED_CHILD_RECORDS).transaction(simpleCryptoTransfer()); // when recordListBuilder.revertChildrenOf(base); - recordListBuilder.addPreceding(CONFIGURATION).transaction(simpleCryptoTransfer()); + recordListBuilder.addPreceding(CONFIGURATION, LIMITED_CHILD_RECORDS).transaction(simpleCryptoTransfer()); final var result = recordListBuilder.build(); final var records = result.records(); @@ -248,7 +249,7 @@ void testAddTooManyReversiblePrecedingRecordsFails() { recordListBuilder.addReversiblePreceding(config); // then - assertThatThrownBy(() -> recordListBuilder.addPreceding(config)) + assertThatThrownBy(() -> recordListBuilder.addPreceding(config, LIMITED_CHILD_RECORDS)) .isInstanceOf(HandleException.class) .hasFieldOrPropertyWithValue("status", ResponseCodeEnum.MAX_CHILD_RECORDS_EXCEEDED); } @@ -356,9 +357,9 @@ void testRevertMultipleMixedPreceding() { final var consensusTime = Instant.now(); final var recordListBuilder = new RecordListBuilder(consensusTime); final var base = addUserTransaction(recordListBuilder); - recordListBuilder.addPreceding(config).transaction(simpleCryptoTransfer()); + recordListBuilder.addPreceding(config, LIMITED_CHILD_RECORDS).transaction(simpleCryptoTransfer()); recordListBuilder.addReversiblePreceding(config).transaction(simpleCryptoTransfer()); - recordListBuilder.addPreceding(config).transaction(simpleCryptoTransfer()); + recordListBuilder.addPreceding(config, LIMITED_CHILD_RECORDS).transaction(simpleCryptoTransfer()); recordListBuilder.addReversiblePreceding(config).transaction(simpleCryptoTransfer()); // when @@ -477,8 +478,8 @@ void testAddPrecedingAndChildRecords() { final var fifth = simpleCryptoTransfer(); // mixing up preceding vs. following, but within which, in order recordListBuilder.addChild(CONFIGURATION).transaction(fourth); - recordListBuilder.addPreceding(CONFIGURATION).transaction(first); - recordListBuilder.addPreceding(CONFIGURATION).transaction(second); + recordListBuilder.addPreceding(CONFIGURATION, LIMITED_CHILD_RECORDS).transaction(first); + recordListBuilder.addPreceding(CONFIGURATION, LIMITED_CHILD_RECORDS).transaction(second); recordListBuilder.addChild(CONFIGURATION).transaction(fifth); final var result = recordListBuilder.build(); final var records = result.records(); diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/PreHandleContextListUpdatesTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/PreHandleContextListUpdatesTest.java index c035df61aa69..b9d4b8144a74 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/PreHandleContextListUpdatesTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/prehandle/PreHandleContextListUpdatesTest.java @@ -19,6 +19,8 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_CONTRACT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_PAYER_ACCOUNT_ID; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.spi.HapiUtils.EMPTY_KEY_LIST; import static com.hedera.node.app.spi.fixtures.Assertions.assertThrowsPreCheck; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; @@ -138,6 +140,7 @@ void nullInputToBuilderArgumentsThrows() throws PreCheckException { // Given an account with a key, and a transaction using that account as the payer given(accountStore.getAccountById(payer)).willReturn(account); given(account.key()).willReturn(payerKey); + given(account.accountIdOrThrow()).willReturn(payer); given(storeFactory.getStore(ReadableAccountStore.class)).willReturn(accountStore); // When we create a PreHandleContext by passing null as either argument @@ -230,6 +233,7 @@ void returnsIfGivenKeyIsPayer() throws PreCheckException { // Given an account with a key, and a transaction using that account as the payer and a PreHandleContext given(accountStore.getAccountById(payer)).willReturn(account); given(account.key()).willReturn(payerKey); + given(account.accountIdOrThrow()).willReturn(payer); given(storeFactory.getStore(ReadableAccountStore.class)).willReturn(accountStore); subject = new PreHandleContextImpl(storeFactory, createAccountTransaction(), CONFIG, dispatcher); @@ -266,6 +270,8 @@ void addsContractIdKey() throws PreCheckException { given(account.key()).willReturn(payerKey); given(accountStore.getContractById(otherContractId)).willReturn(contractAccount); given(contractAccount.key()).willReturn(contractIdKey); + given(contractAccount.keyOrElse(EMPTY_KEY_LIST)).willReturn(contractIdKey); + given(contractAccount.accountIdOrThrow()).willReturn(asAccount(otherContractId.contractNum())); given(storeFactory.getStore(ReadableAccountStore.class)).willReturn(accountStore); subject = new PreHandleContextImpl(storeFactory, createAccountTransaction(), CONFIG, dispatcher); @@ -284,6 +290,7 @@ void doesntFailForAliasedAccount() throws PreCheckException { given(accountStore.getAccountById(payer)).willReturn(account); given(account.key()).willReturn(payerKey); given(storeFactory.getStore(ReadableAccountStore.class)).willReturn(accountStore); + given(account.accountIdOrThrow()).willReturn(payer); subject = new PreHandleContextImpl(storeFactory, createAccountTransaction(), CONFIG, dispatcher); // When we require the account by alias @@ -299,6 +306,8 @@ void doesntFailForAliasedContract() throws PreCheckException { final var alias = ContractID.newBuilder().evmAddress(Bytes.wrap("test")).build(); given(accountStore.getContractById(alias)).willReturn(contractAccount); given(contractAccount.key()).willReturn(otherKey); + given(contractAccount.keyOrElse(EMPTY_KEY_LIST)).willReturn(otherKey); + given(contractAccount.accountIdOrThrow()).willReturn(asAccount(otherContractId.contractNum())); given(accountStore.getAccountById(payer)).willReturn(account); given(account.key()).willReturn(payerKey); given(storeFactory.getStore(ReadableAccountStore.class)).willReturn(accountStore); diff --git a/hedera-node/hedera-app/src/testFixtures/java/com/hedera/node/app/fixtures/state/FakeSchemaRegistry.java b/hedera-node/hedera-app/src/testFixtures/java/com/hedera/node/app/fixtures/state/FakeSchemaRegistry.java index 713e500d74ff..5d78b01dd1c1 100644 --- a/hedera-node/hedera-app/src/testFixtures/java/com/hedera/node/app/fixtures/state/FakeSchemaRegistry.java +++ b/hedera-node/hedera-app/src/testFixtures/java/com/hedera/node/app/fixtures/state/FakeSchemaRegistry.java @@ -111,6 +111,11 @@ public HandleThrottleParser handleThrottling() { public NetworkInfo networkInfo() { return networkInfo; } + + @Override + public long newEntityNum() { + return 0; + } }); // Now commit them all diff --git a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/ConsensusConfig.java b/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/ConsensusConfig.java index 26b4aaaedc43..22b34c119d7d 100644 --- a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/ConsensusConfig.java +++ b/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/ConsensusConfig.java @@ -24,7 +24,7 @@ public record ConsensusConfig( @ConfigProperty(value = "message.maxBytesAllowed", defaultValue = "1024") @NetworkProperty int messageMaxBytesAllowed, - @ConfigProperty(value = "message.maxPrecedingRecords", defaultValue = "950") @NetworkProperty + @ConfigProperty(value = "message.maxPrecedingRecords", defaultValue = "3") @NetworkProperty long handleMaxPrecedingRecords, @ConfigProperty(value = "message.maxFollowingRecords", defaultValue = "50") @NetworkProperty long handleMaxFollowingRecords) {} diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/schemas/GenesisSchemaTest.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/schemas/GenesisSchemaTest.java index d92e05f9bf7c..c57b06fa9781 100644 --- a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/schemas/GenesisSchemaTest.java +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/schemas/GenesisSchemaTest.java @@ -18,10 +18,12 @@ import static com.swirlds.common.utility.CommonUtils.unhex; import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; import com.hedera.hapi.node.base.FileID; import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.state.file.File; +import com.hedera.node.app.ids.WritableEntityIdStore; import com.hedera.node.app.service.file.impl.FileServiceImpl; import com.hedera.node.app.service.file.impl.schemas.GenesisSchema; import com.hedera.node.app.spi.fixtures.info.FakeNetworkInfo; @@ -70,7 +72,13 @@ void emptyFilesCreatedForUpdateFiles() { // When we migrate schema.migrate(new MigrationContextImpl( - prevStates, newStates, config, networkInfo, new GenesisRecordsConsensusHook(), handleThrottleParser)); + prevStates, + newStates, + config, + networkInfo, + new GenesisRecordsConsensusHook(), + handleThrottleParser, + mock(WritableEntityIdStore.class))); // Then the new state has empty bytes for files 151-158 and proper values final var files = newStates.get(FileServiceImpl.BLOBS_KEY); diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/txns/crypto/AutoCreationLogic.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/txns/crypto/AutoCreationLogic.java index 8b4fbd14a322..f512cf2d2e35 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/txns/crypto/AutoCreationLogic.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/txns/crypto/AutoCreationLogic.java @@ -127,8 +127,10 @@ protected void trackSigImpactIfNeeded(final Builder syntheticCreation, Expirable } public void submitRecordsTo(final RecordsHistorian recordsHistorian) { - submitRecords((syntheticBody, recordSoFar) -> - recordsHistorian.trackPrecedingChildRecord(DEFAULT_SOURCE_ID, syntheticBody, recordSoFar)); + submitRecords((syntheticBody, recordSoFar) -> { + recordSoFar.onlyExternalizeIfSuccessful(); + recordsHistorian.trackPrecedingChildRecord(DEFAULT_SOURCE_ID, syntheticBody, recordSoFar); + }); } @VisibleForTesting diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/txns/crypto/HollowAccountFinalizationLogic.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/txns/crypto/HollowAccountFinalizationLogic.java index 0e6b212d2cd4..118f09f2ff43 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/txns/crypto/HollowAccountFinalizationLogic.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/txns/crypto/HollowAccountFinalizationLogic.java @@ -135,7 +135,7 @@ private List getFinalPendingCompletions() { /** * Given a mutable list of {@link PendingCompletion}, checks whether the current transaction - * is a valid EthereumTransaction and its sender sender is a hollow account. If such is the case, + * is a valid EthereumTransaction and its sender is a hollow account. If such is the case, * adds a new {@link PendingCompletion} for the hollow wrapped sender of the EthereumTransaction. * * @param pendingFinalizations a mutable list of {@link PendingCompletion} diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetRecordHandler.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetRecordHandler.java index 9e6f739dbf8a..038f2cec1e9f 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetRecordHandler.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetRecordHandler.java @@ -47,6 +47,7 @@ import com.hederahashgraph.api.proto.java.FeeComponents; import com.hederahashgraph.api.proto.java.FeeData; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Comparator; import javax.inject.Inject; import javax.inject.Singleton; @@ -122,7 +123,13 @@ public Response findResponse(@NonNull final QueryContext context, @NonNull final responseBuilder.duplicateTransactionRecords(history.duplicateRecords()); } if (op.includeChildRecords()) { - responseBuilder.childTransactionRecords(history.childRecords()); + // Sort the transaction records based on nonce, so that the user transaction is always first, + // followed by any preceding transactions, followed by any child transactions. + final var sortedRecords = history.childRecords().stream() + .sorted(Comparator.comparingLong( + a -> a.transactionIDOrThrow().nonce())) + .toList(); + responseBuilder.childTransactionRecords(sortedRecords); } } } diff --git a/hedera-node/hedera-token-service-impl/build.gradle.kts b/hedera-node/hedera-token-service-impl/build.gradle.kts index b52f9efd656a..78964191db22 100644 --- a/hedera-node/hedera-token-service-impl/build.gradle.kts +++ b/hedera-node/hedera-token-service-impl/build.gradle.kts @@ -30,7 +30,6 @@ testModuleInfo { requires("com.swirlds.merkle") requires("com.swirlds.test.framework") requires("org.assertj.core") - requires("org.bouncycastle.provider") requires("org.hamcrest") requires("org.junit.jupiter.api") requires("org.junit.jupiter.params") @@ -39,6 +38,5 @@ testModuleInfo { requiresStatic("com.github.spotbugs.annotations") runtimeOnly("org.mockito.inline") requires("com.google.protobuf") - requires("com.hedera.node.app.service.evm") requires("com.swirlds.common") } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/BlocklistParser.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/BlocklistParser.java new file mode 100644 index 000000000000..1d191967ee68 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/BlocklistParser.java @@ -0,0 +1,148 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl; + +import static java.util.Objects.requireNonNull; + +import com.hedera.node.app.service.evm.utils.EthSigsUtils; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.math.BigInteger; +import java.util.Collections; +import java.util.HexFormat; +import java.util.List; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.bouncycastle.asn1.sec.SECNamedCurves; +import org.bouncycastle.crypto.params.ECDomainParameters; +import org.bouncycastle.crypto.params.ECPublicKeyParameters; + +/** + * Encapsulates the logic for reading blocked accounts from file + */ +public class BlocklistParser { + private static final Logger log = LogManager.getLogger(BlocklistParser.class); + + /** + * Makes sure that all blocked accounts contained in the blocklist resource are present in state, and creates their definitions (if necessary). + * + *

Note: this method assumes that blocklists are enabled – it does not check that config property + */ + public List parse(@NonNull final String blocklistResourceName) { + final List fileLines = readFileLines(blocklistResourceName); + if (fileLines.isEmpty()) return Collections.emptyList(); + + return parseBlockList(fileLines); + } + + private List readFileLines(@NonNull final String blocklistResourceName) { + try { + return readPrivateKeyBlocklist(blocklistResourceName); + } catch (Exception e) { + log.error("Failed to read blocklist resource {}", blocklistResourceName, e); + return Collections.emptyList(); + } + } + + private static List parseBlockList(final List fileLines) { + final List blocklist; + try { + final var columnHeaderLine = fileLines.get(0); // Assume that the first line is the header + final var blocklistLines = fileLines.subList(1, fileLines.size()); + final var columnCount = columnHeaderLine.split(",").length; + blocklist = blocklistLines.stream() + .map(line -> parseCSVLine(line, columnCount)) + .toList(); + } catch (IllegalArgumentException iae) { + log.error("Failed to parse blocklist", iae); + return Collections.emptyList(); + } + return blocklist; + } + + @NonNull + private static List readPrivateKeyBlocklist(@NonNull final String fileName) { + try (final var inputStream = BlocklistParser.class.getClassLoader().getResourceAsStream(fileName); + final var reader = new BufferedReader(new InputStreamReader(requireNonNull(inputStream)))) { + return reader.lines().toList(); + } catch (IOException e) { + throw new RuntimeException("Failed to load blocklist", e); + } + } + + /** + * Parses a line from the blocklist resource and returns blocked account info record. + * + * The line should have the following format: + * , + * where is a hex-encoded private key + * and is a memo for the blocked account + * and both values are comma-separated. + * + * The resulting blocked account info record contains the EVM address derived from the private key, and the memo. + * + * @param line line from the blocklist resource + * @param columnCount number of comma-separated values in a line + * @return blocked account info record + */ + @NonNull + private static BlockedInfo parseCSVLine(final @NonNull String line, final int columnCount) { + final var parts = line.split(",", -1); + if (parts.length != columnCount) { + throw new IllegalArgumentException("Invalid line in blocklist resource: " + line); + } + + final byte[] privateKeyBytes; + try { + privateKeyBytes = HexFormat.of().parseHex(parts[0]); + } catch (IllegalArgumentException iae) { + throw new IllegalArgumentException("Failed to decode line " + line, iae); + } + + final var publicKeyBytes = ecdsaPrivateToPublicKey(privateKeyBytes); + final var evmAddressBytes = EthSigsUtils.recoverAddressFromPubKey(publicKeyBytes); + return new BlockedInfo(Bytes.wrap(evmAddressBytes), parts[1]); + } + + /** + * Derives the ECDSA public key bytes from the given ECDSA private key bytes. + * + * @param privateKeyBytes ECDSA private key bytes + * @return ECDSA public key bytes + */ + private static byte[] ecdsaPrivateToPublicKey(byte[] privateKeyBytes) { + final var ecdsaSecp256K1Curve = SECNamedCurves.getByName("secp256k1"); + final var ecdsaSecp256K1Domain = new ECDomainParameters( + ecdsaSecp256K1Curve.getCurve(), + ecdsaSecp256K1Curve.getG(), + ecdsaSecp256K1Curve.getN(), + ecdsaSecp256K1Curve.getH()); + final var privateKeyData = new BigInteger(1, privateKeyBytes); + var q = ecdsaSecp256K1Domain.getG().multiply(privateKeyData); + var publicParams = new ECPublicKeyParameters(q, ecdsaSecp256K1Domain); + return publicParams.getQ().getEncoded(true); + } + + /** + * @param evmAddress the EVM address of the blocked account + * @param memo the memo of the blocked account + */ + public record BlockedInfo(@NonNull Bytes evmAddress, @NonNull String memo) {} +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableAccountStoreImpl.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableAccountStoreImpl.java index 19ef0e2811e6..836862f1508b 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableAccountStoreImpl.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableAccountStoreImpl.java @@ -96,7 +96,7 @@ protected > T aliases() { * * @param accountID the {@code AccountID} which {@code Account is requested} * @return an {@link Optional} with the {@code Account}, if it was found, an empty {@code - * Optional} otherwise + * Optional} otherwise */ @Override @Nullable diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoTransferHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoTransferHandler.java index 192c8c478ece..d995ecedd561 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoTransferHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoTransferHandler.java @@ -295,9 +295,16 @@ private void checkFungibleTokenTransfers( // is set on the transaction, then we defer to the token transfer logic to determine if all // signing requirements were met ("isApproval" is a way for the client to say "I don't need a key // because I'm approved which you will see when you handle this transaction"). - if (isDebit && !accountAmount.isApproval() && !isHollow(account)) { - // NOTE: should change to ACCOUNT_IS_IMMUTABLE after modularization - ctx.requireKeyOrThrow(account.key(), INVALID_ACCOUNT_ID); + if (isDebit && !accountAmount.isApproval()) { + // If the account is a hollow account, then we require a signature for it. + // It is possible that the hollow account has signed this transaction, in which case + // we need to finalize the hollow account by setting its key. + if (isHollow(account)) { + ctx.requireSignatureForHollowAccount(account); + } else { + ctx.requireKeyOrThrow(account.key(), INVALID_ACCOUNT_ID); + } + } else if (isCredit && account.receiverSigRequired()) { ctx.requireKeyOrThrow(account.key(), INVALID_TRANSFER_ACCOUNT_ID); } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenCreateHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenCreateHandler.java index 3e849e14cbfd..bc3110e1c295 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenCreateHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenCreateHandler.java @@ -22,7 +22,7 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.MAX_ENTITIES_IN_PRICE_REGIME_HAVE_BEEN_CREATED; import static com.hedera.node.app.hapi.fees.usage.crypto.CryptoOpsUsage.txnEstimateFactory; import static com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbj; -import static com.hedera.node.app.service.token.impl.util.TokenHandlerHelper.verifyIsNotImmutableAccount; +import static com.hedera.node.app.service.token.impl.util.TokenHandlerHelper.verifyNotEmptyKey; import static com.hedera.node.app.service.token.impl.validators.CustomFeesValidator.SENTINEL_TOKEN_ID; import static com.hedera.node.app.spi.validation.ExpiryMeta.NA; import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; @@ -361,7 +361,7 @@ private void addCustomFeeCollectorKeys( if (collectorAcct != null && (collectorAcct.alias() == null || Bytes.EMPTY.equals(collectorAcct.alias())) && (collectorAcct.hasKey())) { - verifyIsNotImmutableAccount(collectorAcct.keyOrThrow(), INVALID_CUSTOM_FEE_COLLECTOR); + verifyNotEmptyKey(collectorAcct.key(), INVALID_CUSTOM_FEE_COLLECTOR); } /* A fractional fee collector and a collector for a fixed fee denominated diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/staking/EndOfStakingPeriodUpdater.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/staking/EndOfStakingPeriodUpdater.java index 1925fad1e60c..c028b5d19a91 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/staking/EndOfStakingPeriodUpdater.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/staking/EndOfStakingPeriodUpdater.java @@ -235,8 +235,11 @@ public void updateNodes(@NonNull final TokenContext context) { stakingConfig.rewardBalanceThreshold(), stakingConfig.maxStakeRewarded()); log.info("Exporting:\n{}", finalNodeStakes); - - final var nodeStakeUpdateBuilder = context.addPrecedingChildRecordBuilder(NodeStakeUpdateRecordBuilder.class); + // We don't want to fail adding the preceding child record for the node stake update that happens every + // midnight. + // So, we add the preceding child record builder as unchecked, that doesn't fail with MAX_CHILD_RECORDS_EXCEEDED + final var nodeStakeUpdateBuilder = + context.addUncheckedPrecedingChildRecordBuilder(NodeStakeUpdateRecordBuilder.class); nodeStakeUpdateBuilder.transaction(Transaction.newBuilder() .body(syntheticNodeStakeUpdateTxn.build()) .build()); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/AutoAccountCreator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/AutoAccountCreator.java index a065ad2b190e..dd75f13ffc6a 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/AutoAccountCreator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/transfer/AutoAccountCreator.java @@ -92,20 +92,10 @@ public AccountID create(@NonNull final Bytes alias, int maxAutoAssociations) { memo = AUTO_MEMO; } - // TODO : distribute autocreation fee and deduct payer balance - // final var payer = handleContext.body().transactionID().accountID(); - // final var payerAccount = accountStore.get(payer); - // final var currentBalance = payerAccount.tinybarBalance(); - // validateTrue(currentBalance >= fee, INSUFFICIENT_PAYER_BALANCE); - // final var payerCopy = payerAccount.copyBuilder() - // .tinybarBalance(currentBalance - fee) - // .build(); - // accountStore.put(payerCopy.copyBuilder().build()); - final Predicate verifier = key -> handleContext.verificationFor(key).passed(); - - final var childRecord = handleContext.dispatchRemovableChildTransaction( + // dispatch the auto-creation record as a preceding record + final var childRecord = handleContext.dispatchRemovablePrecedingTransaction( syntheticCreation.memo(memo).build(), CryptoCreateRecordBuilder.class, verifier, handleContext.payer()); var fee = autoCreationFeeFor(syntheticCreation); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/schemas/GenesisSchema.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/schemas/GenesisSchema.java index c89789977043..81bf6942f21d 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/schemas/GenesisSchema.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/schemas/GenesisSchema.java @@ -23,6 +23,7 @@ import static com.hedera.node.app.service.token.impl.TokenServiceImpl.STAKING_NETWORK_REWARDS_KEY; import static com.hedera.node.app.service.token.impl.TokenServiceImpl.TOKENS_KEY; import static com.hedera.node.app.service.token.impl.TokenServiceImpl.TOKEN_RELS_KEY; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static com.hedera.node.app.spi.HapiUtils.EMPTY_KEY_LIST; import static com.hedera.node.app.spi.HapiUtils.FUNDING_ACCOUNT_EXPIRY; @@ -43,6 +44,8 @@ import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.hapi.node.token.CryptoCreateTransactionBody; +import com.hedera.node.app.service.token.impl.BlocklistParser; +import com.hedera.node.app.service.token.impl.TokenServiceImpl; import com.hedera.node.app.spi.state.MigrationContext; import com.hedera.node.app.spi.state.Schema; import com.hedera.node.app.spi.state.StateDefinition; @@ -51,10 +54,12 @@ import com.hedera.node.config.data.HederaConfig; import com.hedera.node.config.data.LedgerConfig; import com.hedera.node.config.data.StakingConfig; +import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Arrays; import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.stream.LongStream; import org.apache.logging.log4j.LogManager; @@ -76,11 +81,14 @@ public class GenesisSchema extends Schema { private static final long LAST_RESERVED_SYSTEM_CONTRACT = 399L; private static final long FIRST_POST_SYSTEM_FILE_ENTITY = 200L; + private final BlocklistParser blocklistParser; + /** * Create a new instance */ public GenesisSchema() { super(GENESIS_VERSION); + blocklistParser = new BlocklistParser(); } @NonNull @@ -213,6 +221,62 @@ public void migrate(@NonNull MigrationContext ctx) { "Ledger float is {} tinyBars in {} accounts.", totalBalance, accounts.modifiedKeys().size()); + + // ---------- Create blocklist accounts (if enabled) ------------------------- + final Map blocklistAccts = new HashMap<>(); + if (accountsConfig.blocklistEnabled()) { + final var blocklistResourceName = accountsConfig.blocklistResource(); + final var blocklist = blocklistParser.parse(blocklistResourceName); + if (blocklist.isEmpty()) { + return; + } + + final var aliases = ctx.newStates().get(TokenServiceImpl.ALIASES_KEY); + + // We only want to create accounts that are not already in state, so we filter based on blocked account EVM + // addresses that don't yet exist in state + final var blockedToCreate = blocklist.stream() + .filter(blockedAccount -> aliases.get(blockedAccount.evmAddress()) == null) + .toList(); + + for (final var blockedInfo : blockedToCreate) { + final var newId = ctx.newEntityNum(); + final var account = blockedAccountWith(blockedInfo, bootstrapConfig) + .accountId(asAccount(newId)) + .build(); + blocklistAccts.put(account, newCryptoCreate(account)); + accounts.put(account.accountIdOrThrow(), account); + aliases.put(account.alias(), account.accountIdOrThrow()); + } + } + recordsKeeper.blocklistAccounts(blocklistAccts); + log.info("Created {} blocklist accounts", blocklistAccts.size()); + } + + /** + * Creates a blocked Hedera account with the given memo and EVM address. + * A blocked account has receiverSigRequired flag set to true, key set to the genesis key, and balance set to 0. + * + * @param blockedInfo record containing EVM address and memo for the blocked account + * @return a Hedera account with the given memo and EVM address + */ + @NonNull + private Account.Builder blockedAccountWith( + @NonNull final BlocklistParser.BlockedInfo blockedInfo, @NonNull final BootstrapConfig bootstrapConfig) { + final var expiry = bootstrapConfig.systemEntityExpiry(); + final var acctBuilder = Account.newBuilder() + .receiverSigRequired(true) + .declineReward(true) + .deleted(false) + .expirationSecond(expiry) + .smartContract(false) + .key(superUserKey(bootstrapConfig)) + .autoRenewSeconds(expiry) + .alias(blockedInfo.evmAddress()); + + if (!blockedInfo.memo().isEmpty()) acctBuilder.memo(blockedInfo.memo()); + + return acctBuilder; } private static AccountID asAccountId(final long acctNum, final HederaConfig hederaConfig) { @@ -239,10 +303,13 @@ private Key superUserKey(@NonNull final BootstrapConfig bootstrapConfig) { return Key.newBuilder().ed25519(superUserKeyBytes).build(); } - private Account createAccount(@NonNull final AccountID id, final long balance, final long expiry, final Key key) { + @NonNull + private Account createAccount( + @NonNull final AccountID id, final long balance, final long expiry, @NonNull final Key key) { return createAccount(id, balance, expiry, key, true); } + @NonNull private Account createAccount( @NonNull final AccountID id, final long balance, @@ -264,7 +331,7 @@ private Account createAccount( .build(); } - private void initializeStakingNodeInfo(final MigrationContext ctx) { + private void initializeStakingNodeInfo(@NonNull final MigrationContext ctx) { // TODO: This need to go through address book and set all the nodes final var config = ctx.configuration(); final var ledgerConfig = config.getConfigData(LedgerConfig.class); @@ -289,7 +356,7 @@ private void initializeStakingNodeInfo(final MigrationContext ctx) { stakingInfoState.put(EntityNumber.newBuilder().number(0L).build(), stakingInfo); } - private void initializeNetworkRewards(final MigrationContext ctx) { + private void initializeNetworkRewards(@NonNull final MigrationContext ctx) { // Set genesis network rewards state final var networkRewardsState = ctx.newStates().getSingleton(STAKING_NETWORK_REWARDS_KEY); final var networkRewards = NetworkStakingRewards.newBuilder() @@ -301,7 +368,7 @@ private void initializeNetworkRewards(final MigrationContext ctx) { networkRewardsState.put(networkRewards); } - private static CryptoCreateTransactionBody.Builder newCryptoCreate(Account account) { + private static CryptoCreateTransactionBody.Builder newCryptoCreate(@NonNull final Account account) { return CryptoCreateTransactionBody.newBuilder() .key(account.key()) .memo(account.memo()) @@ -310,6 +377,7 @@ private static CryptoCreateTransactionBody.Builder newCryptoCreate(Account accou .autoRenewPeriod(Duration.newBuilder() .seconds(account.autoRenewSeconds()) .build()) - .initialBalance(account.tinybarBalance()); + .initialBalance(account.tinybarBalance()) + .alias(account.alias()); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java index 12c7c7b1b107..dbbfadc2261d 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java @@ -206,15 +206,8 @@ public static TokenRelation getIfUsable( return tokenRel; } - /** - * Checks that a key does not represent an immutable account, e.g. the staking rewards account. - * Throws a {@link PreCheckException} with the designated response code otherwise. - * @param key the key to check - * @param responseCode the response code to throw - * @throws PreCheckException if the account is considered immutable - */ - public static void verifyIsNotImmutableAccount( - @Nullable final Key key, @NonNull final ResponseCodeEnum responseCode) throws PreCheckException { + public static void verifyNotEmptyKey(@Nullable final Key key, @NonNull final ResponseCodeEnum responseCode) + throws PreCheckException { if (EMPTY_KEY_LIST.equals(key)) { throw new PreCheckException(responseCode); } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/module-info.java b/hedera-node/hedera-token-service-impl/src/main/java/module-info.java index a91d58b72538..07403c511867 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/module-info.java @@ -10,11 +10,13 @@ requires transitive dagger; requires transitive javax.inject; requires com.hedera.node.app.hapi.utils; + requires com.hedera.node.app.service.evm; requires com.google.common; requires com.swirlds.base; requires org.apache.commons.lang3; requires org.apache.logging.log4j; requires static com.github.spotbugs.annotations; + requires org.bouncycastle.provider; requires static java.compiler; // javax.annotation.processing.Generated provides com.hedera.node.app.service.token.TokenService with diff --git a/hedera-node/hedera-token-service-impl/src/main/resources/evm-addresses-blocklist.csv b/hedera-node/hedera-token-service-impl/src/main/resources/evm-addresses-blocklist.csv new file mode 100644 index 000000000000..1452fa78539c --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/resources/evm-addresses-blocklist.csv @@ -0,0 +1,300 @@ +privateKey,memo +7f109a9e3b0d8ecfba9cc23a3614433ce0fa7ddcc80f2a8f10b222179a5a80d6,Hedera Local Node address +6ec1f2e7d126a74a1d2ff9e1c5d90b92378c725e506651ff8bb8616a5c724628,Hedera Local Node address +b4d7f7e82f61d81c95985771b8abf518f9328d019c36849d4214b5f995d13814,Hedera Local Node address +941536648ac10d5734973e94df413c17809d6cc5e24cd11e947e685acfbd12ae,Hedera Local Node address +5829cf333ef66b6bdd34950f096cb24e06ef041c5f63e577b4f3362309125863,Hedera Local Node address +8fc4bffe2b40b2b7db7fd937736c4575a0925511d7a0a2dfc3274e8c17b41d20,Hedera Local Node address +b6c10e2baaeba1fa4a8b73644db4f28f4bf0912cceb6e8959f73bb423c33bd84,Hedera Local Node address +fe8875acb38f684b2025d5472445b8e4745705a9e7adc9b0485a05df790df700,Hedera Local Node address +bdc6e0a69f2921a78e9af930111334a41d3fab44653c8de0775572c526feea2d,Hedera Local Node address +3e215c3d2a59626a669ed04ec1700f36c05c9b216e592f58bbfd3d8aa6ea25f9,Hedera Local Node address +105d050185ccb907fba04dd92d8de9e32c18305e097ab41dadda21489a211524,Hedera Local Node address +2e1d968b041d84dd120a5860cee60cd83f9374ef527ca86996317ada3d0d03e7,Hedera Local Node address +45a5a7108a18dd5013cf2d5857a28144beadc9c70b3bdbd914e38df4e804b8d8,Hedera Local Node address +6e9d61a325be3f6675cf8b7676c70e4a004d2308e3e182370a41f5653d52c6bd,Hedera Local Node address +0b58b1bd44469ac9f813b5aeaf6213ddaea26720f0b2f133d08b6f234130a64f,Hedera Local Node address +95eac372e0f0df3b43740fa780e62458b2d2cc32d6a440877f1cc2a9ad0c35cc,Hedera Local Node address +6c6e6727b40c8d4b616ab0d26af357af09337299f09c66704146e14236972106,Hedera Local Node address +5072e7aa1b03f531b4731a32a021f6a5d20d5ddc4e55acbb71ae202fc6f3a26d,Hedera Local Node address +60fe891f13824a2c1da20fb6a14e28fa353421191069ba6b6d09dd6c29b90eff,Hedera Local Node address +eae4e00ece872dd14fb6dc7a04f390563c7d69d16326f2a703ec8e0934060cc7,Hedera Local Node address +ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80,Hardhat address +59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d,Hardhat address +5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a,Hardhat address +7c852118294e51e653712a81e05800f419141751be58f605c371e15141b007a6,Hardhat address +47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a,Hardhat address +8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba,Hardhat address +92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e,Hardhat address +4bbbf85ce3377467afe5d46f804f221813b2bb87f24d81f60f1fcdbf7cbf4356,Hardhat address +dbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97,Hardhat address +2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6,Hardhat address +f214f2b2cd398c806f84e317254e0f0b801d0643303237d97a22a48e01628897,Hardhat address +701b615bbdfb9de65240bc28bd21bbc0d996645a3dd57e7b12bc2bdf6f192c82,Hardhat address +a267530f49f8280200edf313ee7af6b827f2a8bce2897751d06a843f644967b1,Hardhat address +47c99abed3324a2707c28affff1267e45918ec8c3f20b8aa892e8b065d2942dd,Hardhat address +c526ee95bf44d8fc405a158bb884d9d1238d99f0612e9f33d006bb0789009aaa,Hardhat address +8166f546bab6da521a8369cab06c5d2b9e46670292d85c875ee9ec20e84ffb61,Hardhat address +ea6c44ac03bff858b476bba40716402b03e41b8e97e276d1baec7c37d42484a0,Hardhat address +689af8efa8c651a91ad287602527f3af2fe9f6501a7ac4b061667b5a93e037fd,Hardhat address +de9be858da4a475276426320d5e9262ecfc3ba460bfac56360bfa6c4c28b4ee0,Hardhat address +df57089febbacf7ba0bc227dafbffa9fc08a93fdc68e1e42411a14efcf23656e,Hardhat address +0000000000000000000000000000000000000000000000000000000000000001,Hardhat test account +0000000000000000000000000000000000006000000000000000000000000000,Hardhat test account +0000000000000000000000000000000000000000000000000000000000000100,Hardhat test account +8db185e31c0cd39fdfb04737671e2cfb6e7b81506214eff6399fe1953ee6c141,Hardhat test account +d8f6581e7054cb6ed2a8785ea6a1ee25c23626695f4684cd75af97d119ed4cff,Hardhat test account +8cfc6038620c6cc8bb63dd3b674dc9fa39a4fb3dfbadfd441d58ea7a3abd2d67,Hardhat test account +d67e0e0dc9ed18d2f1aa7bb349c29b458a36f5708f9530ca2bb2e3d9e3bea148,Hardhat test account +026ef42327f997e7d90368c7eada05d898db50d6c3c7d95c32f3f67fd75bba2e,Hardhat test account +bdb1f27c9ed0171824f00563165256958b50814259be0cb59f14e196361c990c,Hardhat test account +e975d94030de9cd138767d8783f38f63b4750c62124b85f89f49a40f99363c6c,Hardhat test account +f0223b33e08da98ea33c0dba4cf835b8e069f2af56291774ce8d1000c3225ee9,Hardhat test account +515eda4465e5d6530262daedbc19ab607300875556960f87b9371b7913180726,Hardhat test account +5986e6db40aa28038e7f120bfff4d31752a68c11ef65dc826c442aa98daacc69,Hardhat test account +bfd4779a916c95198ac9631a4a9a81f857ac143eaffbf5c345202dc0fca2e2a4,Hardhat test account +b13b2627c3bcccaae65d03bc2c7f63382f702014eee7000a333e56954b18e824,Hardhat test account +8e43baf412f2e72f42e8d8a2733fd308b68c5d11f3b7a2ede7419fd0e9ebb175,Hardhat test account +cc75fc424e4aecc4f8313997d2e3a7c91ad590e3ced1522aa839349debcc7a7a,Hardhat test account +a453028c3abad5b8f6a930f5d5373bd2380deb4fed2ae27e8f590e11a65ac0fd,Hardhat test account +0b85f98bb9c53c50dec8f42fcc0996bcc5412568bf4402d5001a368baca32a78,Hardhat test account +3000b145721909acf5cc11f35a75878306c4b6eea8bf5c327ba54720ba669e08,Hardhat test account +ead0e52d984b7571bb6ab39b3b358e17900ae36fb17417e63b5829d77ccf7eb9,Hardhat test account +45004572ee3a3a491716d8933e0cdcdaf1c817271cfa64f49ec1e685a6acc99b,Hardhat test account +04f8b9e24a7f0bd509c8d9ccc8afabffd4a215968de0a484d681b72fc726cb7f,Hardhat test account +0d3f44eaabcfbce32db36064c094459e552ba831b4867c2deae0da41d8b8f825,Hardhat test account +128010655fa081cdeea6de733ea36d7743925dd2644a43f1f26f4514a04b76aa,Hardhat test account +6bc70972d901fbec62a0179a0392f6ad2a5e6ce5aa808c4f5c1e3431519003e8,Hardhat test account +149b81a1ef36642d400b13c1f55ea73f611c1c93d1d8a2c7737dcd1d820ae9d4,Hardhat test account +108a7661af5b887a34306f66c842cf3bc828bc9ede5bb43cccc3c5204a6e8ee9,Hardhat test account +c355aca072b45a84f3c823cd87bd05378e1fb94ba7ad11591c189abf6e9d5548,Hardhat test account +5077bbdd55e876053b2aced503c8ebdeee9ff59967d92b4b20a7227728ade1e2,Hardhat test account +ac7dfbbb5edcc0d0093539c1101961c9043c2d00ebcfd81a61d39d365fecc4f9,Hardhat test account +52d7b70d469829fbf895bb5b59bff8d7f2036826a6cbe5d472f2ba52fb510dd2,Hardhat test account +826e434b4d071cb7f6b5db163705497b877127615cb3ac17a8b51514d8eafff5,Hardhat test account +a8127b989c73ec75eeaced9e4e3f2cbed6c7b4c192c0fce9db46c6be0abb9783,Hardhat test account +38f809dc5e4893c77ae3b0d3f0aa551d3cff042315385d0ed594927e718c1a14,Hardhat test account +fae38ad774c73f9952b9413570aa77b812b2003cdcfba833d6e867629f8cb606,Hardhat test account +fe385f84b1f6e4b9a653cb1fa1fcd23027f9bf4aacdf13838704eac268f6513c,Hardhat test account +f12ad5e2b77c8b8215a6e5280b5db9eb879ba133ad0c37588858e0c404459c5e,Hardhat test account +b064ae77cb072e80d2a55b1f38a3a5cab6c68d8b8ac82018dc9e918cacc946b4,Hardhat test account +843fc862098c7417ab998ae4729b7807b508f2e8d034065786c9def01ca9a483,Hardhat test account +7a565d68dccee0db203f38d50ddfded2f97c625e1078d3deaacfa9962761cb1b,Hardhat test account +66190b3f2c4a6b8d28c6cbea4ff7b163b4097610d9af89a000b726abd4649ea7,Hardhat test account +8d2e8445288e3004095b27f5452f2f5ed71a9362fbb6291ef42f32288c5b4b49,Hardhat test account +a9d3a9ad0080a881a868660fc520fd09626bfcfcb194e7d5184d6139b4715463,Hardhat test account +2d97a8fe9131bba16605a691ef3a24e0f3a3504a82ffb4367e9146101cb30a1d,Hardhat test account +a9b968d3e4f5eac71a2b020f45601cf26189083548fc4bb36657a73105ed8326,Hardhat test account +34daf766e5c2776717455737514d923944e58971048cd8fea4a48105d22cb825,Hardhat test account +c0612ea08a29b26e6d89a5f5f7b3f13093be5a2c23caac697c19c7d969aad2e3,Hardhat test account +6912f43225d3870c9aaf7acf79b741f845071ddfff746ef6469142c7e8569edd,Hardhat test account +1b355392e085a8f827b0eeb552730ab7e32432e54820fb3e6194782c4ede1d8d,Hardhat test account +ab555e8a627ba8d564de56e3451e6fe4fb407891891f6e46512ae9bb669e08fd,Hardhat test account +5909ba36285cbb5c3ce2c5f9d82a442c420ac836bebf79698df4050ab778c334,Hardhat test account +017f8483f632a4cc3b3ebaf79445779924c44810eaa7a041faf32afeef93eb03,Hardhat test account +37bb21a2b630d8ffe3b850dc6fa348f1ca5bca19c81a3f8a95b4c446a861f222,Hardhat test account +9ec6939b9b769e55b5b8277d44625d761e40865be8564cb7461b4ce6912d8aa0,Hardhat test account +1d6b2316d0cb9825ca5f9382e34e9886e249425b9db2cd7cc82868c70cec6167,Hardhat test account +badf11273f4562aaff7b7a0957bc6a5b77fda6f4b452d1e11408686400c82971,Hardhat test account +1bec8eff46f9282ec6bc52b72979a4cc874ea000f98c632a49817b92239a57a9,Hardhat test account +ca42dd8b21bf9ff2f1a7fd96210e20130371441d39b2d73d977e436cdf29d3e5,Hardhat test account +f2636116412a024ba8dabd6a2d93a5743c933f06508f9e631fea3bf234a913af,Hardhat test account +e471f9669a34860b0854c3e444ce658f51f2bd7e11333baeeb358105b7c8ae54,Hardhat test account +05f333a8addf5b596b1b55ce9b04946c0b13835bb09a763d14e53ba7a89970f8,Hardhat test account +a50e4a16f5a7c04f13069d700e6d94ed5d1c809a1fca6e57e330c276779a885b,Hardhat test account +f2d92cf62d165dcb2d415873916b1217fd9a55e698517b525d4423fdef28510a,Hardhat test account +794c6ce9103d17419d4786cc3439cf4704cc10c6a589122f5fe8f194ee3803f2,Hardhat test account +203650adcd06950f477f3e3c924c72f7bf8a67e0a82ba54a10bea395038f5008,Hardhat test account +834becadd58cb52b7bc133b159e3157c7f50a34650c3f9c115fed19ba33da8c3,Hardhat test account +08bfb858eaf9d8d424931eb761357362f0d3658db169bbc06fda1967d817c9e6,Hardhat test account +0805c2e1ce3e780af961315cf58bb96271dd9f6c7a4db696679a8542618e771c,Hardhat test account +3fb43263114ae06b1040906c7077f4b1bb86be4d8a6ed5ab2f909b56160bc294,Hardhat test account +5c1dd0b3436dc84923c35a10479ca9b36a3327c01d6e6a18e6299f354d93c50c,Hardhat test account +16fc6b98211c38752c34808d255ca68fea673caea186e7694a02f9569172952c,Hardhat test account +5167bd38a213a7069257eba56911bc52e7765bb02f07fb2cc343411afc170ddf,Hardhat test account +c23901e9ec498262a88e09ba936d11a3b104449bfdcac3148287fd4040c57aea,Hardhat test account +9a209f0bd9c8d317b3ce26fa4e8277c922f7da7d98a3c60d5f55543113a46925,Hardhat test account +757744ee9626cfd4937ab5cff81e589a7096d1323da7edc55d0e5e0b3b8d2cef,Hardhat test account +3cd899e9c35470721f9bc2d79d4009d2fcf2eef49346d2e1621fae6b4465da95,Hardhat test account +0585fa49b160d3bb2c30c91a47e950dd25d2f0e238dd36a1bdaffe3929041d58,Hardhat test account +501244805e4a8b2fd19f81b3a5d2e0cb00920347e9139ed887597de44faee6b1,Hardhat test account +41a8f3dd42d568a2825f659be4742bb83cd065aef395559a664d79ccc700b69a,Hardhat test account +debf1d79d52489daf60fa976f76958dfac8be9cb50175adff82bb8d0467ae45b,Hardhat test account +7c46409eab16f0948322a622971d0536527a07e67aefeae93492ad2f4ca1f1d4,Hardhat test account +68b68312d2c3c37cb245c840fc632a969fed36fe5dee0cb69fef2a79788c60f1,Hardhat test account +c8fa013eb0c36ea847466854fbfff5840569fd9669e74e4c9d01594a05857872,Hardhat test account +43908367224ec093a95291967aefe271238e9adebf67e369441dbe0244149f73,Hardhat test account +078d9c5d307d9b5864e0da6d4eaff8dc3e12a2b9fb90db62572892427f82bec8,Hardhat test account +5e8e4a6075913b865aa52599c26db78dde63763b5a71875908fec2ffcec92bdb,Hardhat test account +e2bb828023ba28b2ea8c77e23d9eae2d5d4f5ce5fa3492648ef5e1eab86c7eb3,Hardhat test account +ff58a65b5af895bb2b0219b84aec34d16809dfdce420c7f69d07ed81a436042c,Hardhat test account +c984e118ebbb8a2d412bbb4be64742d0bfe86e2a400110666c350f525d6499ff,Hardhat test account +a4a1e10eedadf4fa19108457d9b718ba4325fcea630557df004bfc45ec9c1344,Hardhat test account +ef932ce3d55002155a5660688d1980714458e758f697f5859576d01379a3730d,Hardhat test account +b14918649b5280a622bc3b352999dafcba29acf149a5aa7f02f6af50371eeae4,Hardhat test account +c1fb91add5e2a9060142302eb5f69a627351cd8013ab7488f77bec45460d6898,Hardhat test account +ffb5090bafbedd989f159a87d7c43c74507d7b9aae644a046e6a3256d80d4fee,Hardhat test account +9492f41281f4ce5120af83302444fa5b0f53c461239d2161bc1bb228f1a71d92,Hardhat test account +b00a8d7f24e221c64156220a929716e2589d24c98dbe09bb0df105be0899cb96,Hardhat test account +366c3697f687f553cf2109f66ee09f4f38c6a92d59b0445f812748d01bb37091,Hardhat test account +c10e7745a452a068901a3aeff6951f375eaf7cbc6eed08b8f9de429c6f063380,Hardhat test account +d6ea5d51374e273f9f39ff9faa43eb91134c656fbf7e4150fa558da6abd2fa9b,Hardhat test account +87f6ba4a59f66de3e53413fc178810b1660a9d7b6e48b1d2e604e8aef8742bec,Hardhat test account +85092744975f4b760162a138262c1b7bbad02886205054a9496152ef58f14870,Hardhat test account +b73274c6c2b900a356fdb177701d86e88aa682dea75d6b42a9c1d509db343124,Hardhat test account +6971f564b0a5584c6045df85c6ed7381a425f72a3edc39735eefacad2d00156a,Hardhat test account +5bb53ca85146cc122f008fbae41f72a1d7953974723eb35af3b4a5540203e904,Hardhat test account +64bc2e6486a776b41ba9dec53bf2659afefe9783eb4733c21c3a9bb4336b98f7,Hardhat test account +ab744cc795f32169145da967a78cc4947373e25787ce10e75766d5b5677b18f6,Hardhat test account +55fcd60087496b40285d3281e42556249274cf0f30e2405c83270b47543fa8dc,Hardhat test account +f8c7d9d40f2cd515d983c9def54d28b22623d9fe729b10b44233772c0569abb4,Hardhat test account +0ccbaf2f637e11dffe22381dc9d7903f037ce9e52ac7b17c6ead6747206d3103,Hardhat test account +7fd7a1a4357707215b8be4ae9ea2bf25f9583e907379dfdad94789bb589dfe1c,Hardhat test account +4ee2bc0823b83a6c2ac057302e9303ab7f8fd1865fbbb4b44fd13368cdce79a0,Hardhat test account +6ef5b09300f69faf692581be2f70a574e02251f165d32b387d3c9e14d053deb1,Hardhat test account +cfca5a15d0a7d4c608d4b2c130d5a90d502f69c9b683f2353dc539ef4e1dcf73,Hardhat test account +e9a8e6f49ccfee6ff3ba47b304e54541e11028167530a641e732baa994cac633,Hardhat test account +d011d674001593644e2c3b1e260d4b91072db755b0e7ffce7a5c2e388a967152,Hardhat test account +753369cc03c928079f358a16a67cee28684e288eac1970ac543273e69a6addee,Hardhat test account +d89a177e139db44713aa8ee8e2b01ef0c47a7b6caf45473c871aa8376ab63242,Hardhat test account +afd6104553dd7e6d9a01a84d352ad43917f192c9cf029ef2c9df4ebd19249bf6,Hardhat test account +17c060591c918ed782db01aceb2b3475fd2a20dcd18cf12d085d07ffa8a5fc8b,Hardhat test account +626464d37ac54296131a0e6449363c32913f9a9d986d796402e8d3fd495a8c9a,Hardhat test account +5b8c9961ba2d142dc443416497cbdf61f9ab277b3774855aa8f2cd3aa4c7b297,Hardhat test account +86229f6a2924f8635ef31532ffad45733b9268baaa7391d604010e8969268da6,Hardhat test account +31f3436e955133e9defe2a36ab755134120e917d90736ab5b219b879dc2767ce,Hardhat test account +b27e9e82e34ddeb663e303c218c00da245316bf2156f0e3bd8596ee384e63c28,Hardhat test account +768da1330fc0c4a26ec76ffea13eaad37e3f96b278cc04273ebce48a9de89fd5,Hardhat test account +5c962d4337e2f1dabfeec26f2cd5a59eae24fd45d0488a14bc4d789b88182679,Hardhat test account +84d9a4f90fd05501aed0c93cc9c8658930db801becaebe20811e2f7934d3424e,Hardhat test account +db3cee4c1d0b347db4f932818597946ded9bb838be1f88005973a9fd42f156ae,Hardhat test account +3ed94ed550d691e383734b996ab64a1238c3a43a488f1d1a926b3c19975053c3,Hardhat test account +0e2dd35a69e9f8d4325d30e3eefdf68b6d5f1948284b90fc3701dc8758bf4c12,Hardhat test account +8efab780d5f7040ce6a0b8d8391b5cd093ef91325cfdf02281a9d5872603ed79,Hardhat test account +aa91261cdb30617fd1967e44431e22940d26e893fe82e4a9a3c4f51e3e13b44f,Hardhat test account +bfe69b015ec846e2235f212d90ab46c8f69e670ac5e10c7ce0e391589008630f,Hardhat test account +808688330d2f50287c5c428543738db1f05aae49552e55aa279ab2a91e17e64b,Hardhat test account +adce8b151e0c620adb49252902c52711ece73f908ba7f9021ad4bf4d598ef1c1,Hardhat test account +ee73faf620e739afdc37c6148eb2d7f8c94389373aefa2294159ad250cc2c184,Hardhat test account +3fc6e594568880cac325a4b14e675c423d2bb72167d8c8399391e6cc76f1ff0a,Hardhat test account +3aeaf4d8c36dcee68deb32f35ea2f9dd5891be554e105227a6ee9d07a4d219de,Hardhat test account +188e818ad3e541fdc1790b296ec42b75975a9efc1022c2e4213f4929bbcd94be,Hardhat test account +fcf7938542209b0cfe77a58633cced1c82c120c3ac4adc6f55727590ef135fac,Hardhat test account +6cae5ab39112b57deb37b809d6023fd41250921297e546cf2043b538eb32ee88,Hardhat test account +dc5d3170aa82774de0ad3c186db0d75235c31fe0f81038f345fd51e3134a80fc,Hardhat test account +68941241b80a139e3e6a1cb8d3772daac4e9653b932f8ac2c0a8601c72d6db69,Hardhat test account +d4e6f59d7f60dd4b29abf7cf0c5d0d4088540599b552b88721d6b1d054e4efeb,Hardhat test account +174fdcb377f644364c168ad54ee87e26170666652542dd2013f3cd6cee520c1f,Hardhat test account +5b42553d8a5d2c70c22836df46fbacf6c9a9c7db0d0f91d58feddc8f63839037,Hardhat test account +dd5999f776d98d814f15d18370d288cd1ae65c18b57be1d6be40084d4861f7b2,Hardhat test account +83de53e65ea65632d9d68acbfe6a47cd8f5a277409bda37e73c8410eb071847d,Hardhat test account +d791c688f0f0b145705902fff0199452b75e24a6d62db32709e724dbaa7b36b0,Hardhat test account +ed7a23beccf899196b786d20dd5d6f2ab3d08374522c2200e683f4f895ff8665,Hardhat test account +6584f75fe803f52964d960d900d596a978449fc275cd4646e0ab1e7e725187ed,Hardhat test account +056040518b213c8f7200696be175f1de890b979b6fe8bc0cb4c1c4ce1748d18a,Hardhat test account +c19e749a4170846b25bee02e01a0536aecad9e9221c0185ec48a9fc7311a235c,Hardhat test account +e1bf6909df7154ee58c73265a864d5a92d4292c3eef77c2ce3b4966156a3cb51,Hardhat test account +64d386735132bebdb66723d73db2db7941f090cff37c32deaf803edddfa87cb8,Hardhat test account +c2b56a7fbb4bdab173c2f5056913c91290b2fd54500fb4873b3d1459091ceee4,Hardhat test account +7b323e0e1d5fe14858d0245bfb0d9e9698a014f2ea01e0737be55221b153d3d6,Hardhat test account +2a064982043d4ef16c457f03fcf90344244e0d251d57ddd5e55c71092ea96cc1,Hardhat test account +ba4a3c984a144fce3778e8a9f1771fc0d53ff30e4e94754854234671e1961c59,Hardhat test account +1b098c36e956145cba55d53dafd6911c4e4f01095ad8c89eddcf8285a48641d1,Hardhat test account +341b4bdbf33409cafe1f20eefda23b7e2ce1a09f75c041428ff38bc9df29009a,Hardhat test account +0a8994e1b05a6ad9986f5943ecf68a1e1cce54802650b8ca35ad2d3b5f161b45,Hardhat test account +446cd1627278b8017dd160d8843cbbe17229b2f830276c45e56eb2b04f5c80b8,Hardhat test account +be1f49b135e851fa730d045f8099eedb6d4d43b3ce0e94915af728fe57bf70ee,Hardhat test account +171a4e8d6fd35d8b11b40989255c2675512ca7db57c713e4562f7acd4f304bad,Hardhat test account +f057fa9696735032f294a3108f0fff4e98ed8ec6a79a7a0942e6cc90450096d6,Hardhat test account +6bbfc294f577b77f20ae6d94150b7e6262be7c081975e1554b407401d0b99885,Hardhat test account +2d1ba6db7faaf2f11f6853c2accc24bc935be13bda694ff529c96f7ed56fcadb,Hardhat test account +721af161b925a5927b3b62448317341fa23b5bde9f8687f9c8a02e59a05ed086,Hardhat test account +06c1146ac384fc7ddf0c5345725a17b61363ce1f5f2d299042f2fb90920192d2,Hardhat test account +e0bf10c7feefcd67cf59092451fc73e2d846d0669b091dfeae55db890ea720c1,Hardhat test account +3c854d07ab8e0666f839829d692fd68fed4eb126dc4efcef0ea63e150d4c80bb,Hardhat test account +27d8a2b1e102888c581ea3574c60026f43c74da92b8d77087a9f8d5c3d04bcc3,Hardhat test account +7d75481a43635b278ff8aa5edf3ac1b15a779cf8a29f116e7471b843b6bf80a0,Hardhat test account +f3aee7669c3208083882c192e1a7854825f5254f3cee40501e174485da957a66,Hardhat test account +6cdedc408ccbfedfac5ca78f6f4ea509198c659e0fc1fbe824da33a5a9695a64,Hardhat test account +0d2cab56d1f6eae46ffb7612a82f6a338102b37df674378665c4e28d42c5c003,Hardhat test account +a56ff737750b0df93733472c3720fee8b00debf085c545e333926419a919f7e9,Hardhat test account +d1b2f5162d26da9db10b44f5abc317446552c124b4c9fb0de4aed5df0d3f011f,Hardhat test account +2b0a4c35c2664a6bd7cb4efbbbc9c524581fef50d491c9e6ebe3e6c367b7a86c,Hardhat test account +82c0c29f7b4aa61b1edc6bdcf9d501554948d18710bcac8da3841818c5eb20b9,Hardhat test account +2d4155ecaf58388ffbab862cc7717c8f44c468afcbd45b6f9b347b59ec47fcf5,Hardhat test account +e067d5b0deaa3f710d4486fa76545719a7a174ff0f927779d1a21a1d9814abd4,Hardhat test account +b3a8abb0ec63fb271b2f9deaaf346121cab78bf69c015590e9d77f83226c2152,Hardhat test account +ec9e25c95cd684ab10269590b45625bc8814005e3feb9e589af051608184c3f8,Hardhat test account +85de6a69106cb816ef03002dea718f670fbc5b4f2e9d07a8d19ad7c79adb1e91,Hardhat test account +eb4e40014e5b343e4d3de8ed67d9eea82809e9c8af5a743b363184cc64b4edc8,Hardhat test account +a1b977e31ccf4258fd4f1d9e679fd620a8a6d1ad478845f195831d0e7beb099c,Hardhat test account +7b53360f3c48a3501288906b050296e634c2d7c69aa645a00637abfbda9e5951,Hardhat test account +4d55a0f9727c61e3124fefef0d35d22dfe937cabf9ea0dc283f210e8a6c84592,Hardhat test account +b3f2a465e7aadd8102a3369d011a7f9b703c2ada5544cfac1e85129730e8c926,Hardhat test account +f32b2be409de19457551ffff2ad4d7f8698df90aad3f99476a1d71db95d70c5a,Hardhat test account +d9db1a106577f3e9dfff12bf1cafc422206fcb7bf522646bb17cde996eea498e,Hardhat test account +1d2fc66a572c55d5f911eb045a0d44ed08450c93ab6c4b23b9c05a368528c323,Hardhat test account +07e8eecd8dc801b4a5cd1848df3fd2b41e42a9f3f13aaea246d2434c457adbf7,Hardhat test account +2722e56c0bf80f067bf5e32b2bd90a9779d91b20b02237ae3836a01515e7abfd,Hardhat test account +2db2f7f94216204a595433b6162934aa834f4d04d5daae038f9fcd7c4584910a,Hardhat test account +b2e60011cac3eae4b7e76d574fbbce7cea2e86ece0480a0a2f93cd9dc4249239,Hardhat test account +5ef79806e7278ebb426c94ff80056903bde6adc5645d9429dff0c264ac568ef6,Hardhat test account +cd27b94629f38541535faefed56c2ec006b97bec7d151e40ea1e6e1eee15a51b,Hardhat test account +866414bac0efec359ab0151e481c7b1145751466c521e0403101b009bdb73d02,Hardhat test account +f15f21bee75ed3bfa8ec326b7883efddf856f34846a07b26be5f80293aa4dba3,Hardhat test account +5bc1a19659e95c96a4dfe14b83575a433453987d987db87c81aaff56e285da4a,Hardhat test account +784b3489fa678535125dce4f07fc05ec81a5811b073039781aa9ed318c7d0356,Hardhat test account +510ba3040c09137112b884225770241181834b965cefd1f54f5d793ffd3c9f2d,Hardhat test account +648d60fbae53c92f233ab275b1d6cad021a9b14dfbea847742623bbf4c6d8ff7,Hardhat test account +5e958728f77b118fc2c45cb2a6d5bef2740f2d22593b38a2dde445c690c58dce,Hardhat test account +01a52461cb1429361b0292fbb95237d4bcdd23622e8b82d16b863fb8aee9fe84,Hardhat test account +8f4b17c470dfe671125db52a3df13d6a2ac3f65d6ed6c9555fb812c84f7d52b3,Hardhat test account +7bcf1d94e275d0ca02c6b7edc2fdfd154a78b62250dd8f87c138c33c31b74a51,Hardhat test account +9ddb473ff3f6cb98a8d66c7f73dd5a392a6fb096b21f73b2c90c576e50de2a37,Hardhat test account +a4f1dc59086ca29a95e208551583ef25c37d8417c341f2f943327497334d33b8,Hardhat test account +a2083945285ee7410408b0e141173f414335282264f06ca01c22e79e68470dbe,Hardhat test account +45dcaa523554b59d8988d03de237e2868b1eb5da90009cfc89c6d6492f67658f,Hardhat test account +7a53a4080d06ee048fb3cb8b5f4549a018b3320de8b19556c89eeac73c3776c1,Hardhat test account +e3528cf837194707122a1fdc407795ba6372bab5ba81df3b2dd265327387b65a,Hardhat test account +f5c3779a1f6b67f9f0626d8187f461af15055c09a3ea2288e26b07b1f58786e7,Hardhat test account +b1d6b84e59d844f9d3ea9b6e52da572a18a9456f8e8dc88d57fb81b34f246786,Hardhat test account +5d917983c1b2a4df7bc2ebc61ad9cedfa848d8a04fcc2827a47802ac42e2201f,Hardhat test account +e531c8d3d407b323a70c0d28e11d0f68d3cb2575999fcfcfb7ad9d103dba1c4e,Hardhat test account +53e962c184c67ec8f2045c14edc662d47e5effbbb9931f7c8676b98a60968bd3,Hardhat test account +3cb099d6b3c69c47e31a3abcb2801bdd01de8721f623257cdd0b1195f3da9581,Hardhat test account +6be2d3a944561b1b75c1157cc4c6ac7c585a5d65c8e6b102eac533e67fffd5d3,Hardhat test account +c1cf64a6d57d7d63b5d032f19e3a76cae9d56edd2e9ea2231cb87ef6b7af8163,Hardhat test account +43cb24e897187c43e77a86b2a4d1195da1540ee7ccf2633f3e109f04293fccf3,Hardhat test account +0bcb8ba3b59ae835891eafe2e8801d2514153618119b1478d09e2cce5135bb83,Hardhat test account +5c82a151f730844ce5fc128028e4442f510c6b579d20915fd8cf81e02354b992,Hardhat test account +216768f6ef69388b3fab11f110b7e1fb08694a95a48f55f6801cdf079e14599c,Hardhat test account +220e6339b703f4ad85f76e1abfac6f63c444a01baa95631aa00667adba3b7346,Hardhat test account +3b135f48b6a9219b192ea951ecc79bf77eafa22383f700e498fd295b213c3267,Hardhat test account +6a9183f26e773765397e86a621efc80df040feeea04e047fb9c8059982b0aad2,Hardhat test account +5d048e7be67d1528423161ae6420999b4083d11fc9ab1990384de7dc89ddf197,Hardhat test account +587149c6c919ce9f880eb8a30ab1118aeba31787bda7fee3eb430f9c28488341,Hardhat test account +addab648d8488d667b01dc9299c3ffe9b0afb3bde5cad43ee6fb705b35578592,Hardhat test account +18bc325975d0687da38d0f0821573cedc911580aa6e274a731bca216d6069fa7,Hardhat test account +9165573e1ee13a6c547b793f5632073d9dddb1cf254bbbaf1ea5529172e9554a,Hardhat test account +4f8a12a5999c1899da1146a141bb6ede9fc2be9d62dbe97e0009e1ccef54f676,Hardhat test account +12b437ee78c209a7425ba3243280a095795c9ba19e12a67c8fa409bf74f50fe3,Hardhat test account +ad9919b611791a3126c0541c1d4d63c9d1e45a87c7043df082c212ffe658f03d,Hardhat test account +bf573e2bc0243ca4c3c4d7dd188a41dd6f612f860f47b07835a5bc3d10d314c0,Hardhat test account +b357c9a118553574eb1ece22b66abdbfbc482b95cdee6959d371a5f918920e96,Hardhat test account +c92459e297b8209838f6b0d05eb44cf65d5368b843a216771c712ec41fc46f50,Hardhat test account +4e56e2ca9b84df3b838c603109065004009b87afa2da911b42ef2c6829181675,Hardhat test account +6dbfafd104b01ba0d3e7a7552a47eff35e42e2bab98e766a193001c1d1577141,Hardhat test account +b998b5dc55d07016a3e80bd1ca27418ba2e96f51e15ca94559f3ac4024df725c,Hardhat test account +7aeca6b611544bdea77d86fcb14c7b8f14633705aacf86bc7486933aec242328,Hardhat test account +743c4c2a92b09a386ecd033edb71c0bf2b389641ff1aa7fdd7335f70fb2eec30,Hardhat test account +0b93a86bedcf68f6334b9084036ce739438297b27723a93de2e676c07ac88e74,Hardhat test account +596b57f2546ba4ca3f20f47c88626673735ed4b90c56d0f6a4637a45498ed52b,Hardhat test account +f47d65db799c924ef9c4aeb44fc32a7cd9a30ce6b5a67442c89c3935776319da,Hardhat test account +9a4989c8edad7d649d8f320cc2dae730adeee2bbc4ae37bba86f87bf1b1bc6eb,Hardhat test account +78c17a46bb4d6b7e620e75443d6db10470d5e3f03334b535fe0967bb88714cc0,Hardhat test account +fe0c61984318d0b5985f811179fa91b92f53d5d402300735c8eef5c372e2166a,Hardhat test account +5d21eff406c2fd9487eb327820fddc519f4eef904ba3331911621c52f6e3ffa3,Hardhat test account +ffef32246e1046f17a86f5f9c68099ffc7a6f55c7625a02084ca3989808a9a84,Hardhat test account +823d54e5a06d50581e78f4b971093107deb73f17e3d04d5e88efebfcd0ac7042,Hardhat test account +b592a868a52048890bef3e23dc9b72f8d00d8039ebeec13b18874203f8fdc01a,Hardhat test account +3a495751c937c4f96a612c765c5900837e641173fdf1e4f32c7e59369c2aba8d,Hardhat test account diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/BlocklistParserTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/BlocklistParserTest.java new file mode 100644 index 000000000000..54cdb8d098f9 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/BlocklistParserTest.java @@ -0,0 +1,74 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test; + +import com.hedera.node.app.service.token.impl.BlocklistParser; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import org.assertj.core.api.Assertions; +import org.assertj.core.groups.Tuple; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; + +class BlocklistParserTest { + + private static final String BASE_TEST_FILE_DIR = "blocklist-parsing/"; + + private BlocklistParser subject; + + @BeforeEach + void setup() { + subject = new BlocklistParser(); + } + + @CsvSource({ + BASE_TEST_FILE_DIR + "non-csv.txt,", // Not a CSV file + BASE_TEST_FILE_DIR + "empty.csv,", // An empty file + BASE_TEST_FILE_DIR + "this-file-doesnt-exist.csv,", // This file doesn't actually exist + BASE_TEST_FILE_DIR + "partially-valid-evm-addresses-blocklist.csv" // Partially valid CSV file + }) + @ParameterizedTest + void parseInvalidFile(final String filename) { + final var result = subject.parse(filename); + Assertions.assertThat(result).isEmpty(); + } + + @Test + void parseCsvFileWithNoHeader() { + final var result = subject.parse(BASE_TEST_FILE_DIR + "no-header-test-evm-addresses-blocklist.csv"); + // Even though there are two entries in the test file, the code assumes that the first line is a header line + Assertions.assertThat(result).hasSize(1); + } + + @Test + void parseValidFile() { + final var result = subject.parse(BASE_TEST_FILE_DIR + "test-evm-addresses-blocklist.csv"); + Assertions.assertThat(result).hasSize(6); + Assertions.assertThat(result) + .extracting("evmAddress", "memo") + .containsExactly( + Tuple.tuple( + Bytes.fromHex("e261e26aecce52b3788fac9625896ffbc6bb4424"), "Hedera Local Node address"), + Tuple.tuple( + Bytes.fromHex("ce16e8eb8f4bf2e65ba9536c07e305b912bafacf"), "Hedera Local Node address"), + Tuple.tuple(Bytes.fromHex("f39fd6e51aad88f6f4ce6ab8827279cfffb92266"), "Hardhat address"), + Tuple.tuple(Bytes.fromHex("70997970c51812dc3a010c7d01b50e0d17dc79c8"), "Hardhat address"), + Tuple.tuple(Bytes.fromHex("7e5f4552091a69125d5dfcb7b8c2659029395bdf"), "Hardhat test account"), + Tuple.tuple(Bytes.fromHex("a04a864273e77be6fe500ad2f5fad320d9168bb6"), "Hardhat test account")); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerTest.java index 1165da3b0a90..0779e6749187 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoTransferHandlerTest.java @@ -234,7 +234,7 @@ void failsWhenAutoAssociatedTokenHasKycKey() { refreshWritableStores(); givenStoresAndConfig(handleContext); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = @@ -265,7 +265,7 @@ void happyPathWorksWithAutoCreation() { writableTokenStore.put(fungibleToken.copyBuilder().kycKey((Key) null).build()); givenStoresAndConfig(handleContext); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = @@ -341,7 +341,7 @@ void failsOnRepeatedAliasAndCorrespondingNumber() { refreshWritableStores(); givenStoresAndConfig(handleContext); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = @@ -387,7 +387,7 @@ void failsOnRepeatedAliasAndCorrespondingNumberInTokenTransferList() { refreshWritableStores(); givenStoresAndConfig(handleContext); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/staking/EndOfStakingPeriodUpdaterTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/staking/EndOfStakingPeriodUpdaterTest.java index db454b0b67ee..ddcbe33e5c83 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/staking/EndOfStakingPeriodUpdaterTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/staking/EndOfStakingPeriodUpdaterTest.java @@ -200,7 +200,7 @@ void calculatesNewTotalStakesAsExpected() { .willReturn((WritableSingletonState) stakingRewardsState); final var stakingRewardsStore = new WritableNetworkStakingRewardsStore(states); given(context.writableStore(WritableNetworkStakingRewardsStore.class)).willReturn(stakingRewardsStore); - given(context.addPrecedingChildRecordBuilder(NodeStakeUpdateRecordBuilder.class)) + given(context.addUncheckedPrecedingChildRecordBuilder(NodeStakeUpdateRecordBuilder.class)) .willReturn(nodeStakeUpdateRecordBuilder); // Assert preconditions diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/AutoAccountCreatorTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/AutoAccountCreatorTest.java index 5aa41bd82b55..6965d8f2ce69 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/AutoAccountCreatorTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/AutoAccountCreatorTest.java @@ -75,7 +75,7 @@ void refusesToCreateBeyondMaxNumber() { // TODO: In end to end tests need to validate other fields set correctly on auto created accounts void happyPathECKeyAliasWorks() { accountCreatorInternalSetup(false); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = @@ -105,7 +105,7 @@ void happyPathECKeyAliasWorks() { // TODO: In end to end tests need to validate other fields set correctly on auto created accounts void happyPathEDKeyAliasWorks() { accountCreatorInternalSetup(false); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = @@ -136,7 +136,7 @@ void happyPathEDKeyAliasWorks() { void happyPathWithHollowAccountAliasInHbarTransfersWorks() { accountCreatorInternalSetup(false); final var address = new ProtoBytes(Bytes.wrap(evmAddress)); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/EnsureAliasesStepTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/EnsureAliasesStepTest.java index 107aea735b1d..d87d51d185ba 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/EnsureAliasesStepTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/EnsureAliasesStepTest.java @@ -69,7 +69,7 @@ private void ensureAliasesInternalSetup(final boolean prepopulateReceiverIds) { @Test void autoCreatesAccounts() { ensureAliasesInternalSetup(false); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = @@ -131,7 +131,7 @@ void autoCreateEvmAddressesAccounts() { .build(); givenTxn(body, payerId); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = account.copyBuilder() @@ -230,7 +230,7 @@ void failsOnRepeatedAliasesInTokenTransferList() { ensureAliasesStep = new EnsureAliasesStep(body); transferContext = new TransferContextImpl(handleContext); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/ReplaceAliasesWithIDsInOpTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/ReplaceAliasesWithIDsInOpTest.java index ec22369f3380..83797eac8d0f 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/ReplaceAliasesWithIDsInOpTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/ReplaceAliasesWithIDsInOpTest.java @@ -64,7 +64,7 @@ private void replaceAliasesInternalSetup(final boolean prepopulateReceiverIds) { @Test void autoCreatesAccounts() { replaceAliasesInternalSetup(false); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = @@ -130,7 +130,7 @@ ownerId, asAccountWithAlias(evmAddressAlias3.value()), 1)) .build(); givenTxn(body, payerId); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = account.copyBuilder() @@ -231,7 +231,7 @@ void failsOnRepeatedAliasesInTokenTransferList() { ensureAliasesStep = new EnsureAliasesStep(body); transferContext = new TransferContextImpl(handleContext); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = @@ -271,7 +271,7 @@ void failsOnRepeatedAliasesInHbarTransferList() { ensureAliasesStep = new EnsureAliasesStep(body); transferContext = new TransferContextImpl(handleContext); - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(payerId))) .will((invocation) -> { final var copy = diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/StepsBase.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/StepsBase.java index 91eb8a3355d0..b32b90226d1c 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/StepsBase.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/transfer/StepsBase.java @@ -209,7 +209,7 @@ protected void givenAutoCreationDispatchEffects() { } protected void givenAutoCreationDispatchEffects(AccountID syntheticPayer) { - given(handleContext.dispatchRemovableChildTransaction( + given(handleContext.dispatchRemovablePrecedingTransaction( any(), eq(CryptoCreateRecordBuilder.class), any(Predicate.class), eq(syntheticPayer))) .will((invocation) -> { final var copy = account.copyBuilder() diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/schemas/GenesisSchemaTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/schemas/GenesisSchemaTest.java index 28f7f5674ee0..c0b5d25671d9 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/schemas/GenesisSchemaTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/schemas/GenesisSchemaTest.java @@ -17,6 +17,8 @@ package com.hedera.node.app.service.token.impl.test.schemas; import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ACCOUNTS_KEY; +import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ALIASES_KEY; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; import static com.hedera.node.app.spi.HapiUtils.EMPTY_KEY_LIST; import static java.util.Collections.emptyMap; import static org.assertj.core.api.Assertions.assertThat; @@ -24,8 +26,11 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.Duration; +import com.hedera.hapi.node.state.common.EntityNumber; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.token.CryptoCreateTransactionBody; +import com.hedera.node.app.ids.EntityIdService; +import com.hedera.node.app.ids.WritableEntityIdStore; import com.hedera.node.app.service.token.impl.TokenServiceImpl; import com.hedera.node.app.service.token.impl.schemas.GenesisSchema; import com.hedera.node.app.spi.fixtures.info.FakeNetworkInfo; @@ -33,6 +38,7 @@ import com.hedera.node.app.spi.fixtures.state.MapWritableStates; import com.hedera.node.app.spi.info.NetworkInfo; import com.hedera.node.app.spi.state.EmptyReadableStates; +import com.hedera.node.app.spi.state.WritableSingletonState; import com.hedera.node.app.spi.state.WritableSingletonStateBase; import com.hedera.node.app.spi.state.WritableStates; import com.hedera.node.app.spi.throttle.HandleThrottleParser; @@ -63,6 +69,16 @@ final class GenesisSchemaTest { private static final long EXPECTED_ENTITY_EXPIRY = 1812637686L; private static final long TREASURY_ACCOUNT_NUM = 2L; private static final long NUM_RESERVED_SYSTEM_ENTITIES = 750L; + private static final String EVM_ADDRESS_0 = "e261e26aecce52b3788fac9625896ffbc6bb4424"; + private static final String EVM_ADDRESS_1 = "ce16e8eb8f4bf2e65ba9536c07e305b912bafacf"; + private static final String EVM_ADDRESS_2 = "f39fd6e51aad88f6f4ce6ab8827279cfffb92266"; + private static final String EVM_ADDRESS_3 = "70997970c51812dc3a010c7d01b50e0d17dc79c8"; + private static final String EVM_ADDRESS_4 = "7e5f4552091a69125d5dfcb7b8c2659029395bdf"; + private static final String EVM_ADDRESS_5 = "a04a864273e77be6fe500ad2f5fad320d9168bb6"; + private static final String[] EVM_ADDRESSES = { + EVM_ADDRESS_0, EVM_ADDRESS_1, EVM_ADDRESS_2, EVM_ADDRESS_3, EVM_ADDRESS_4, EVM_ADDRESS_5 + }; + private static final long BEGINNING_ENTITY_ID = 3000; @Mock private GenesisRecordsBuilder genesisRecordsBuilder; @@ -82,28 +98,42 @@ final class GenesisSchemaTest { @Captor private ArgumentCaptor> treasuryCloneMapCaptor; + @Captor + private ArgumentCaptor> blocklistMapCaptor; + private MapWritableKVState accounts; + private MapWritableKVState aliases; private WritableStates newStates; private Configuration config; private NetworkInfo networkInfo; + private WritableEntityIdStore entityIdStore; @BeforeEach void setUp() { accounts = MapWritableKVState.builder(TokenServiceImpl.ACCOUNTS_KEY) .build(); + aliases = MapWritableKVState.builder(ALIASES_KEY).build(); - newStates = newStatesInstance(accounts); + newStates = newStatesInstance(accounts, aliases, newWritableEntityIdState()); + + entityIdStore = new WritableEntityIdStore(newStates); networkInfo = new FakeNetworkInfo(); - config = buildConfig(NUM_SYSTEM_ACCOUNTS); + config = buildConfig(NUM_SYSTEM_ACCOUNTS, true); } @Test void createsAllAccounts() { final var schema = new GenesisSchema(); final var migrationContext = new MigrationContextImpl( - EmptyReadableStates.INSTANCE, newStates, config, networkInfo, genesisRecordsBuilder, handleThrottling); + EmptyReadableStates.INSTANCE, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore); schema.migrate(migrationContext); @@ -158,6 +188,26 @@ void createsAllAccounts() { .map(AccountID::accountNum)) .allMatch(acctNum -> Arrays.contains(GenesisSchema.nonContractSystemNums(NUM_RESERVED_SYSTEM_ENTITIES), acctNum)); + + // Verify created blocklist accounts + verify(genesisRecordsBuilder).blocklistAccounts(blocklistMapCaptor.capture()); + final var blocklistAcctsResult = blocklistMapCaptor.getValue(); + Assertions.assertThat(blocklistAcctsResult).isNotNull().hasSize(6).allSatisfy((account, builder) -> { + Assertions.assertThat(account).isNotNull(); + + Assertions.assertThat(account.accountId().accountNum()) + .isBetween(BEGINNING_ENTITY_ID, BEGINNING_ENTITY_ID + EVM_ADDRESSES.length); + Assertions.assertThat(account.receiverSigRequired()).isTrue(); + Assertions.assertThat(account.declineReward()).isTrue(); + Assertions.assertThat(account.deleted()).isFalse(); + Assertions.assertThat(account.expirationSecond()).isEqualTo(EXPECTED_ENTITY_EXPIRY); + Assertions.assertThat(account.autoRenewSeconds()).isEqualTo(EXPECTED_ENTITY_EXPIRY); + Assertions.assertThat(account.smartContract()).isFalse(); + Assertions.assertThat(account.key()).isNotNull(); + Assertions.assertThat(account.alias()).isNotNull(); + + verifyCryptoCreateBuilder(account, builder); + }); } @Test @@ -165,7 +215,7 @@ void someAccountsAlreadyExist() { final var schema = new GenesisSchema(); // We'll only configure 4 system accounts, half of which will already exist - config = buildConfig(4); + config = buildConfig(4, true); final var accts = new HashMap(); IntStream.rangeClosed(1, 2).forEach(i -> putNewAccount(i, accts)); // One of the two staking accounts will already exist @@ -177,9 +227,24 @@ void someAccountsAlreadyExist() { IntStream.rangeClosed(200, 745).forEach(i -> { if (isRegularAcctNum(i)) putNewAccount(i, accts); }); - newStates = newStatesInstance(new MapWritableKVState<>(ACCOUNTS_KEY, accts)); + // Half of the blocklist accounts will already exist (simulated by the existence of alias mappings, not the + // account objects) + final var blocklistAccts = Map.of( + Bytes.fromHex(EVM_ADDRESS_0), asAccount(BEGINNING_ENTITY_ID), + Bytes.fromHex(EVM_ADDRESS_2), asAccount(BEGINNING_ENTITY_ID + 2), + Bytes.fromHex(EVM_ADDRESS_4), asAccount(BEGINNING_ENTITY_ID + 4)); + newStates = newStatesInstance( + new MapWritableKVState<>(ACCOUNTS_KEY, accts), + new MapWritableKVState<>(ALIASES_KEY, blocklistAccts), + newWritableEntityIdState()); final var migrationContext = new MigrationContextImpl( - EmptyReadableStates.INSTANCE, newStates, config, networkInfo, genesisRecordsBuilder, handleThrottling); + EmptyReadableStates.INSTANCE, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore); schema.migrate(migrationContext); @@ -202,6 +267,11 @@ void someAccountsAlreadyExist() { final var treasuryCloneAcctsResult = treasuryCloneMapCaptor.getValue(); // Only treasury clones with IDs 746-750 should have been created Assertions.assertThat(treasuryCloneAcctsResult).hasSize(5); + + verify(genesisRecordsBuilder).blocklistAccounts(blocklistMapCaptor.capture()); + final var blocklistAcctsResult = blocklistMapCaptor.getValue(); + // Only half of the blocklist accts should have been created + Assertions.assertThat(blocklistAcctsResult).hasSize(3); } @Test @@ -219,9 +289,26 @@ void allAccountsAlreadyExist() { IntStream.rangeClosed(200, 750).forEach(i -> { if (isRegularAcctNum(i)) putNewAccount(i, accts); }); - newStates = newStatesInstance(new MapWritableKVState<>(ACCOUNTS_KEY, accts)); + // All the blocklist accounts will already exist + final var blocklistEvmAliasMappings = Map.of( + Bytes.fromHex(EVM_ADDRESS_0), asAccount(BEGINNING_ENTITY_ID), + Bytes.fromHex(EVM_ADDRESS_1), asAccount(BEGINNING_ENTITY_ID + 1), + Bytes.fromHex(EVM_ADDRESS_2), asAccount(BEGINNING_ENTITY_ID + 2), + Bytes.fromHex(EVM_ADDRESS_3), asAccount(BEGINNING_ENTITY_ID + 3), + Bytes.fromHex(EVM_ADDRESS_4), asAccount(BEGINNING_ENTITY_ID + 4), + Bytes.fromHex(EVM_ADDRESS_5), asAccount(BEGINNING_ENTITY_ID + 5)); + newStates = newStatesInstance( + new MapWritableKVState<>(ACCOUNTS_KEY, accts), + new MapWritableKVState<>(ALIASES_KEY, blocklistEvmAliasMappings), + newWritableEntityIdState()); final var migrationContext = new MigrationContextImpl( - EmptyReadableStates.INSTANCE, newStates, config, networkInfo, genesisRecordsBuilder, handleThrottling); + EmptyReadableStates.INSTANCE, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore); schema.migrate(migrationContext); @@ -229,13 +316,40 @@ void allAccountsAlreadyExist() { verify(genesisRecordsBuilder).stakingAccounts(emptyMap()); verify(genesisRecordsBuilder).miscAccounts(emptyMap()); verify(genesisRecordsBuilder).treasuryClones(emptyMap()); + verify(genesisRecordsBuilder).blocklistAccounts(emptyMap()); + } + + @Test + void blocklistNotEnabled() { + final var schema = new GenesisSchema(); + + // None of the blocklist accounts will exist, but they shouldn't be created since blocklists aren't enabled + config = buildConfig(NUM_SYSTEM_ACCOUNTS, false); + final var migrationContext = new MigrationContextImpl( + EmptyReadableStates.INSTANCE, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore); + + schema.migrate(migrationContext); + + verify(genesisRecordsBuilder).blocklistAccounts(emptyMap()); } @Test void systemAccountsCreated() { final var schema = new GenesisSchema(); schema.migrate(new MigrationContextImpl( - EmptyReadableStates.INSTANCE, newStates, config, networkInfo, genesisRecordsBuilder, handleThrottling)); + EmptyReadableStates.INSTANCE, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore)); for (int i = 1; i <= 100; i++) { final var balance = i == 2 ? EXPECTED_TREASURY_TINYBARS_BALANCE : 0L; @@ -253,7 +367,13 @@ void systemAccountsCreated() { void accountsBetweenFilesAndContracts() { final var schema = new GenesisSchema(); schema.migrate(new MigrationContextImpl( - EmptyReadableStates.INSTANCE, newStates, config, networkInfo, genesisRecordsBuilder, handleThrottling)); + EmptyReadableStates.INSTANCE, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore)); for (int i = 200; i < 350; i++) { final var account = accounts.get(accountID(i)); @@ -268,7 +388,13 @@ void accountsBetweenFilesAndContracts() { void contractEntityIdsNotUsed() { final var schema = new GenesisSchema(); schema.migrate(new MigrationContextImpl( - EmptyReadableStates.INSTANCE, newStates, config, networkInfo, genesisRecordsBuilder, handleThrottling)); + EmptyReadableStates.INSTANCE, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore)); for (int i = 350; i < 400; i++) { assertThat(accounts.contains(accountID(i))).isFalse(); @@ -279,7 +405,13 @@ void contractEntityIdsNotUsed() { void accountsAfterContracts() { final var schema = new GenesisSchema(); schema.migrate(new MigrationContextImpl( - EmptyReadableStates.INSTANCE, newStates, config, networkInfo, genesisRecordsBuilder, handleThrottling)); + EmptyReadableStates.INSTANCE, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore)); for (int i = 400; i <= 750; i++) { final var account = accounts.get(accountID(i)); @@ -294,7 +426,13 @@ void accountsAfterContracts() { void entityIdsBetweenSystemAccountsAndRewardAccountsAreEmpty() { final var schema = new GenesisSchema(); schema.migrate(new MigrationContextImpl( - EmptyReadableStates.INSTANCE, newStates, config, networkInfo, genesisRecordsBuilder, handleThrottling)); + EmptyReadableStates.INSTANCE, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore)); for (int i = 751; i < 800; i++) { assertThat(accounts.contains(accountID(i))).isFalse(); @@ -305,7 +443,13 @@ void entityIdsBetweenSystemAccountsAndRewardAccountsAreEmpty() { void stakingRewardAccounts() { final var schema = new GenesisSchema(); schema.migrate(new MigrationContextImpl( - EmptyReadableStates.INSTANCE, newStates, config, networkInfo, genesisRecordsBuilder, handleThrottling)); + EmptyReadableStates.INSTANCE, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore)); final var stakingRewardAccount = accounts.get(accountID(800)); verifyStakingAccount(stakingRewardAccount); @@ -318,7 +462,13 @@ void stakingRewardAccounts() { void entityIdsAfterRewardAccountsAreEmpty() { final var schema = new GenesisSchema(); schema.migrate(new MigrationContextImpl( - EmptyReadableStates.INSTANCE, newStates, config, networkInfo, genesisRecordsBuilder, handleThrottling)); + EmptyReadableStates.INSTANCE, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore)); for (int i = 802; i < 900; i++) { assertThat(accounts.contains(accountID(i))).isFalse(); @@ -326,10 +476,16 @@ void entityIdsAfterRewardAccountsAreEmpty() { } @Test - void specialAccountsAfter900() { + void miscAccountsAfter900() { final var schema = new GenesisSchema(); schema.migrate(new MigrationContextImpl( - EmptyReadableStates.INSTANCE, newStates, config, networkInfo, genesisRecordsBuilder, handleThrottling)); + EmptyReadableStates.INSTANCE, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore)); for (int i = 900; i <= 1000; i++) { final var account = accounts.get(accountID(i)); @@ -339,6 +495,24 @@ void specialAccountsAfter900() { } } + @Test + void blocklistAccountIdsMatchEntityIds() { + final var schema = new GenesisSchema(); + schema.migrate(new MigrationContextImpl( + EmptyReadableStates.INSTANCE, + newStates, + config, + networkInfo, + genesisRecordsBuilder, + handleThrottling, + entityIdStore)); + + for (int i = 0; i < EVM_ADDRESSES.length; i++) { + final var acctId = aliases.get(Bytes.fromHex(EVM_ADDRESSES[i])); + assertThat(acctId).isEqualTo(accountID((int) BEGINNING_ENTITY_ID + i + 1)); + } + } + private void verifySystemAccount(final Account account) { assertThat(account).isNotNull(); final long expectedBalance = @@ -417,6 +591,7 @@ private void verifyCryptoCreateBuilder( .autoRenewPeriod(Duration.newBuilder() .seconds(acctResult.autoRenewSeconds()) .build()) + .alias(acctResult.alias()) .build()); } @@ -428,12 +603,14 @@ private void putNewAccount(final long num, final HashMap acc accts.put(acctId, acct); } - private Configuration buildConfig(int numSystemAccounts) { + private Configuration buildConfig(final int numSystemAccounts, final boolean blocklistEnabled) { return HederaTestConfigBuilder.create() // Accounts Config .withValue("accounts.treasury", TREASURY_ACCOUNT_NUM) .withValue("accounts.stakingRewardAccount", 800L) .withValue("accounts.nodeRewardAccount", 801L) + .withValue("accounts.blocklist.enabled", blocklistEnabled) + .withValue("accounts.blocklist.path", "blocklist-parsing/test-evm-addresses-blocklist.csv") // Bootstrap Config .withValue("bootstrap.genesisPublicKey", "0x" + GENESIS_KEY) .withValue("bootstrap.system.entityExpiry", EXPECTED_ENTITY_EXPIRY) @@ -447,13 +624,23 @@ private Configuration buildConfig(int numSystemAccounts) { .getOrCreateConfig(); } - private MapWritableStates newStatesInstance(final MapWritableKVState accts) { + private WritableSingletonState newWritableEntityIdState() { + return new WritableSingletonStateBase<>( + EntityIdService.ENTITY_ID_STATE_KEY, () -> new EntityNumber(BEGINNING_ENTITY_ID), c -> {}); + } + + private MapWritableStates newStatesInstance( + final MapWritableKVState accts, + final MapWritableKVState aliases, + final WritableSingletonState entityIdState) { return MapWritableStates.builder() .state(accts) + .state(aliases) .state(MapWritableKVState.builder(TokenServiceImpl.STAKING_INFO_KEY) .build()) .state(new WritableSingletonStateBase<>( TokenServiceImpl.STAKING_NETWORK_REWARDS_KEY, () -> null, c -> {})) + .state(entityIdState) .build(); } diff --git a/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/empty-file.csv b/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/empty-file.csv new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/no-header-test-evm-addresses-blocklist.csv b/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/no-header-test-evm-addresses-blocklist.csv new file mode 100644 index 000000000000..f101bcc35059 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/no-header-test-evm-addresses-blocklist.csv @@ -0,0 +1,2 @@ +7f109a9e3b0d8ecfba9cc23a3614433ce0fa7ddcc80f2a8f10b222179a5a80d6,Hedera Local Node address +6ec1f2e7d126a74a1d2ff9e1c5d90b92378c725e506651ff8bb8616a5c724628,Hedera Local Node address diff --git a/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/non-csv.txt b/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/non-csv.txt new file mode 100644 index 000000000000..13efe9d1040b --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/non-csv.txt @@ -0,0 +1,4 @@ +This is not a CSV file +It does have some mildly interesting text, though: + +“Why do they call it rush hour when nothing moves?” \ No newline at end of file diff --git a/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/partially-valid-evm-addresses-blocklist.csv b/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/partially-valid-evm-addresses-blocklist.csv new file mode 100644 index 000000000000..5ed4d1d0c5c4 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/partially-valid-evm-addresses-blocklist.csv @@ -0,0 +1,8 @@ +privateKey,memo +7f109a9e3b0d8ecfba9cc23a3614433ce0fa7ddcc80f2a8f10b222179a5a80d6, Hedera Local Node address +6ec1f2e7d126a74a1d2ff9e1c5d90b92378c725e506651ff8bb8616a5c724628, + ,Hardhat address + +0000000000000000000000000000000000006000000000000000000000000000,Hardhat test account,extra field +BogusContentWithNoComma +BogusContentWithStrayApostrophe' \ No newline at end of file diff --git a/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/test-evm-addresses-blocklist.csv b/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/test-evm-addresses-blocklist.csv new file mode 100644 index 000000000000..0bc37278f8f4 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/resources/blocklist-parsing/test-evm-addresses-blocklist.csv @@ -0,0 +1,7 @@ +privateKey,memo +7f109a9e3b0d8ecfba9cc23a3614433ce0fa7ddcc80f2a8f10b222179a5a80d6,Hedera Local Node address +6ec1f2e7d126a74a1d2ff9e1c5d90b92378c725e506651ff8bb8616a5c724628,Hedera Local Node address +ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80,Hardhat address +59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d,Hardhat address +0000000000000000000000000000000000000000000000000000000000000001,Hardhat test account +0000000000000000000000000000000000006000000000000000000000000000,Hardhat test account \ No newline at end of file diff --git a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/AliasUtils.java b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/AliasUtils.java index a16f7f7c1ae0..9547f6f85a24 100644 --- a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/AliasUtils.java +++ b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/AliasUtils.java @@ -135,6 +135,12 @@ public static Bytes recoverAddressFromPubKey(@NonNull final Bytes alias) { public static boolean isSerializedProtoKey(@NonNull final Bytes alias) { requireNonNull(alias); + // If the alias is an evmAddress we don't need to parse with Key.PROTOBUF. + // This will cause BufferUnderflowException + if (!isAliasSizeGreaterThanEvmAddress(alias)) { + return false; + } + // Determine whether these bytes represent a serialized Key (as protobuf bytes). // FUTURE: Rather than parsing and catching an error, we could have PBJ provide a method that simply returns // a boolean instead of throwing an exception. Or maybe we can make sure the alias is a valid ECDSA key length @@ -200,6 +206,11 @@ public static Key asKeyFromAliasPreCheck(@NonNull final Bytes alias) throws PreC @Nullable public static Key asKeyFromAliasOrElse(@NonNull final Bytes alias, @Nullable final Key def) { requireNonNull(alias); + // If the alias is an evmAddress we don't need to parse with Key.PROTOBUF. + // This will cause BufferUnderflowException + if (!isAliasSizeGreaterThanEvmAddress(alias)) { + return def; + } try { return Key.PROTOBUF.parseStrict(alias.toReadableSequentialData()); } catch (final Exception e) { @@ -207,4 +218,9 @@ public static Key asKeyFromAliasOrElse(@NonNull final Bytes alias, @Nullable fin return def; } } + + public static boolean isAliasSizeGreaterThanEvmAddress(@NonNull final Bytes alias) { + requireNonNull(alias); + return alias.length() > EVM_ADDRESS_SIZE; + } } diff --git a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/records/TokenContext.java b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/records/TokenContext.java index 0b54d13b1cca..5c60ddcd7297 100644 --- a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/records/TokenContext.java +++ b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/records/TokenContext.java @@ -79,4 +79,20 @@ public interface TokenContext { */ @NonNull T addPrecedingChildRecordBuilder(@NonNull Class recordBuilderClass); + + /** + * Adds a preceding child record builder to the list of record builders. This does not check if the records + * constructed are greater than allowed number of records. + * This is used when adding several system account creation records on genesis start up and also while generating + * staking updates happening after midnight. + * If the current {@link HandleContext} (or any parent context) is rolled back, all child record builders will be reverted. + * + * @param recordBuilderClass the record type + * @param the record type + * @return the new child record builder + * @throws NullPointerException if {@code recordBuilderClass} is {@code null} + * @throws IllegalArgumentException if the record builder type is unknown to the app + */ + @NonNull + T addUncheckedPrecedingChildRecordBuilder(@NonNull Class recordBuilderClass); } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/HollowAccountFinalizationSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/HollowAccountFinalizationSuite.java index 3c8389848a04..a7ffad83dfe9 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/HollowAccountFinalizationSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/HollowAccountFinalizationSuite.java @@ -50,7 +50,6 @@ import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_ACCOUNT_BALANCE; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ALIAS_KEY; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.MAX_CHILD_RECORDS_EXCEEDED; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.REVERTED_SUCCESS; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; import com.esaulpaugh.headlong.abi.Tuple; @@ -126,6 +125,7 @@ public List getSpecsInSuite() { precompileTransferFromHollowAccountWithNeededSigFailsAndDoesNotFinalizeAccount()); } + @HapiTest private HapiSpec hollowAccountCompletionWithTokenTransfer() { final var fungibleToken = "fungibleToken"; final AtomicReference ftId = new AtomicReference<>(); @@ -220,6 +220,7 @@ private HapiSpec hollowAccountCompletionWithTokenTransfer() { })); } + @HapiTest private HapiSpec hollowAccountCompletionWithTokenAssociation() { return defaultHapiSpec("HollowAccountCompletionWithTokenAssociation") .given( @@ -249,6 +250,7 @@ private HapiSpec hollowAccountCompletionWithTokenAssociation() { })); } + @HapiTest private HapiSpec hollowAccountFinalizationWhenAccountNotPresentInPreHandle() { final var ECDSA_2 = "ECDSA_2"; return defaultHapiSpec("hollowAccountFinalizationWhenAccountNotPresentInPreHandle") @@ -291,6 +293,7 @@ private HapiSpec hollowAccountFinalizationWhenAccountNotPresentInPreHandle() { })); } + @HapiTest private HapiSpec hollowAccountFinalizationOccursOnlyOnceWhenMultipleFinalizationTensComeInAtTheSameTime() { final var ECDSA_2 = "ECDSA_2"; return defaultHapiSpec("hollowAccountFinalizationOccursOnlyOnceWhenMultipleFinalizationTensComeInAtTheSameTime") @@ -343,6 +346,7 @@ private HapiSpec hollowAccountFinalizationOccursOnlyOnceWhenMultipleFinalization })); } + @HapiTest private HapiSpec hollowAccountCompletionWithCryptoTransfer() { return defaultHapiSpec("HollowAccountCompletionWithCryptoTransfer") .given(newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE)) @@ -385,6 +389,7 @@ private HapiSpec hollowAccountCompletionWithCryptoTransfer() { })); } + @HapiTest private HapiSpec hollowAccountCompletionWhenHollowAccountSigRequiredInOtherReqSigs() { return defaultHapiSpec("hollowAccountCompletionWhenHollowAccountSigRequiredInOtherReqSigs") .given( @@ -435,6 +440,7 @@ private HapiSpec hollowAccountCompletionWhenHollowAccountSigRequiredInOtherReqSi })); } + @HapiTest private HapiSpec hollowAccountCompletionWithContractCreate() { final var CONTRACT = "CreateTrivial"; return defaultHapiSpec("HollowAccountCompletionWithContractCreate") @@ -464,6 +470,7 @@ private HapiSpec hollowAccountCompletionWithContractCreate() { })); } + @HapiTest private HapiSpec hollowAccountCompletionWithContractCall() { final var DEPOSIT_AMOUNT = 1000; return defaultHapiSpec("HollowAccountCompletionWithContractCall") @@ -533,6 +540,7 @@ private HapiSpec hollowAccountCompletionViaNonReqSigIsNotAllowed() { })); } + @HapiTest private HapiSpec tooManyHollowAccountFinalizationsShouldFail() { final var ECDSA_KEY_1 = "ECDSA_KEY_1"; final var ECDSA_KEY_2 = "ECDSA_KEY_2"; @@ -680,6 +688,7 @@ private HapiSpec completedHollowAccountsTransfer() { })); } + @HapiTest private HapiSpec txnWith2CompletionsAndAnother2PrecedingChildRecords() { final var ecdsaKey2 = "ecdsaKey2"; final var recipientKey = "recipient"; @@ -723,9 +732,8 @@ private HapiSpec txnWith2CompletionsAndAnother2PrecedingChildRecords() { TRANSFER_TXN_2, MAX_CHILD_RECORDS_EXCEEDED, recordWith().status(SUCCESS), - recordWith().status(SUCCESS), - recordWith().status(REVERTED_SUCCESS)); - // // assert that the payer has been finalized + recordWith().status(SUCCESS)); + // assert that the payer has been finalized final var ecdsaKey = spec.registry().getKey(SECP_256K1_SOURCE_KEY); final var payerEvmAddress = ByteString.copyFrom(recoverAddressFromPubKey( ecdsaKey.getECDSASecp256K1().toByteArray())); @@ -744,6 +752,7 @@ private HapiSpec txnWith2CompletionsAndAnother2PrecedingChildRecords() { })); } + @HapiTest private HapiSpec hollowPayerAndOtherReqSignerBothGetCompletedInASingleTransaction() { final var ecdsaKey2 = "ecdsaKey2"; final var recipientKey = "recipient"; @@ -802,6 +811,7 @@ private HapiSpec hollowPayerAndOtherReqSignerBothGetCompletedInASingleTransactio })); } + @HapiTest private HapiSpec hollowAccountCompletionIsPersistedEvenIfTxnFails() { return defaultHapiSpec("hollowAccountCompletionIsPersistedEvenIfTxnFails") .given(newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE))