From e3d739453c01c21a4539830ad1778bc33cc3ce54 Mon Sep 17 00:00:00 2001 From: Michael Tinker Date: Fri, 9 Jun 2023 14:45:37 -0500 Subject: [PATCH 01/70] Make no-min fractional fees viewable via precompiles (#7028) Signed-off-by: Michael Tinker --- .../app/service/mono/utils/EvmTokenUtil.java | 3 +- .../service/mono/utils/EvmTokenUtilTest.java | 52 +++++++++++++++++++ .../precompile/TokenInfoHTSSuite.java | 38 +++++++++++--- .../precompile/HTSPrecompileResult.java | 3 +- 4 files changed, 87 insertions(+), 9 deletions(-) create mode 100644 hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/utils/EvmTokenUtilTest.java diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/utils/EvmTokenUtil.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/utils/EvmTokenUtil.java index 414cec224db0..fb2017928402 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/utils/EvmTokenUtil.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/utils/EvmTokenUtil.java @@ -162,7 +162,8 @@ public static void extractFees( evmCustomFee.setFixedFee(fixedFee); evmCustomFees.add(evmCustomFee); - } else if (customFee.getFractionalFee().getMinimumAmount() > 0) { + } else if (customFee.getFractionalFee().getMinimumAmount() > 0 + || customFee.getFractionalFee().getFractionalAmount().getNumerator() > 0) { var fractionalFee = getFractionalFee(customFee.getFractionalFee(), feeCollector); evmCustomFee.setFractionalFee(fractionalFee); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/utils/EvmTokenUtilTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/utils/EvmTokenUtilTest.java new file mode 100644 index 000000000000..757342b458e2 --- /dev/null +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/utils/EvmTokenUtilTest.java @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.mono.utils; + +import static org.junit.jupiter.api.Assertions.*; + +import com.hederahashgraph.api.proto.java.AccountID; +import com.hederahashgraph.api.proto.java.CustomFee; +import com.hederahashgraph.api.proto.java.Fraction; +import com.hederahashgraph.api.proto.java.FractionalFee; +import java.util.ArrayList; +import java.util.List; +import org.hyperledger.besu.datatypes.Address; +import org.junit.jupiter.api.Test; + +class EvmTokenUtilTest { + @Test + void includesNonZeroFractionalFeesWithoutMinimumAmount() { + final var fracNoMinFee = CustomFee.newBuilder() + .setFractionalFee(FractionalFee.newBuilder() + .setFractionalAmount(Fraction.newBuilder() + .setNumerator(1) + .setDenominator(2) + .build()) + .build()) + .setFeeCollectorAccountId(AccountID.newBuilder().setAccountNum(0x1234)) + .build(); + final List accum = + new ArrayList<>(); + EvmTokenUtil.extractFees(fracNoMinFee, accum); + assertEquals(1, accum.size()); + final var expectedEvmFee = new com.hedera.node.app.service.evm.store.contracts.precompile.codec.CustomFee(); + expectedEvmFee.setFractionalFee( + new com.hedera.node.app.service.evm.store.contracts.precompile.codec.FractionalFee( + 1, 2, 0, 0, false, Address.fromHexString("0x1234"))); + assertEquals(expectedEvmFee, accum.get(0)); + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenInfoHTSSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenInfoHTSSuite.java index 86cf408ce448..a0d64d82aebe 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenInfoHTSSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenInfoHTSSuite.java @@ -206,6 +206,9 @@ private HapiSpec happyPathGetTokenInfo() { .feeScheduleKey(FEE_SCHEDULE_KEY) .pauseKey(PAUSE_KEY) .withCustom(fixedHbarFee(500L, HTS_COLLECTOR)) + // Include a fractional fee with no minimum to collect + .withCustom(fractionalFee( + NUMERATOR, DENOMINATOR * 2L, 0, OptionalLong.empty(), TOKEN_TREASURY)) .withCustom(fractionalFee( NUMERATOR, DENOMINATOR, @@ -301,6 +304,9 @@ private HapiSpec happyPathUpdateTokenInfoAndGetLatestInfo() { .feeScheduleKey(FEE_SCHEDULE_KEY) .pauseKey(PAUSE_KEY) .withCustom(fixedHbarFee(500L, HTS_COLLECTOR)) + // Include a fractional fee with no minimum to collect + .withCustom(fractionalFee( + NUMERATOR, DENOMINATOR * 2L, 0, OptionalLong.empty(), TOKEN_TREASURY)) .withCustom(fractionalFee( NUMERATOR, DENOMINATOR, @@ -396,6 +402,9 @@ private HapiSpec happyPathGetFungibleTokenInfo() { .feeScheduleKey(FEE_SCHEDULE_KEY) .pauseKey(PAUSE_KEY) .withCustom(fixedHbarFee(500L, HTS_COLLECTOR)) + // Also include a fractional fee with no minimum to collect + .withCustom(fractionalFee( + NUMERATOR, DENOMINATOR * 2L, 0, OptionalLong.empty(), TOKEN_TREASURY)) .withCustom(fractionalFee( NUMERATOR, DENOMINATOR, @@ -429,9 +438,6 @@ private HapiSpec happyPathGetFungibleTokenInfo() { allRunFor( spec, - getTxnRecord(FUNGIBLE_TOKEN_INFO_TXN) - .andAllChildRecords() - .logged(), childRecordsCheck( FUNGIBLE_TOKEN_INFO_TXN, SUCCESS, @@ -490,6 +496,9 @@ private HapiSpec happyPathUpdateFungibleTokenInfoAndGetLatestInfo() { .feeScheduleKey(FEE_SCHEDULE_KEY) .pauseKey(PAUSE_KEY) .withCustom(fixedHbarFee(500L, HTS_COLLECTOR)) + // Include a fractional fee with no minimum to collect + .withCustom(fractionalFee( + NUMERATOR, DENOMINATOR * 2L, 0, OptionalLong.empty(), TOKEN_TREASURY)) .withCustom(fractionalFee( NUMERATOR, DENOMINATOR, @@ -968,6 +977,9 @@ private HapiSpec happyPathGetTokenCustomFees() { .maxSupply(MAX_SUPPLY) .initialSupply(500L) .withCustom(fixedHbarFee(500L, HTS_COLLECTOR)) + // Include a fractional fee with no minimum to collect + .withCustom(fractionalFee( + NUMERATOR, DENOMINATOR * 2L, 0, OptionalLong.empty(), TOKEN_TREASURY)) .withCustom(fractionalFee( NUMERATOR, DENOMINATOR, @@ -975,7 +987,7 @@ private HapiSpec happyPathGetTokenCustomFees() { OptionalLong.of(MAXIMUM_TO_COLLECT), TOKEN_TREASURY)) .via(CREATE_TXN), - getTokenInfo(PRIMARY_TOKEN_NAME).via(GET_TOKEN_INFO_TXN)) + getTokenInfo(PRIMARY_TOKEN_NAME).via(GET_TOKEN_INFO_TXN).logged()) .when(withOpContext((spec, opLog) -> allRunFor( spec, contractCall( @@ -1002,7 +1014,7 @@ private HapiSpec happyPathGetTokenCustomFees() { .contractCallResult(htsPrecompileResult() .forFunction(FunctionType.HAPI_GET_TOKEN_CUSTOM_FEES) .withStatus(SUCCESS) - .withCustomFees(getCustomFees(spec)))))))); + .withCustomFees(getExpectedCustomFees(spec)))))))); } private HapiSpec happyPathGetNonFungibleTokenCustomFees() { @@ -1220,7 +1232,7 @@ private TokenInfo getTokenInfoStructForFungibleToken( final long expirySecond) { final var autoRenewAccount = spec.registry().getAccountID(AUTO_RENEW_ACCOUNT); - final ArrayList customFees = getCustomFees(spec); + final ArrayList customFees = getExpectedCustomFees(spec); return TokenInfo.newBuilder() .setLedgerId(fromString("0x03")) @@ -1248,13 +1260,24 @@ private TokenInfo getTokenInfoStructForFungibleToken( } @NotNull - private ArrayList getCustomFees(final HapiSpec spec) { + private ArrayList getExpectedCustomFees(final HapiSpec spec) { final var fixedFee = FixedFee.newBuilder().setAmount(500L).build(); final var customFixedFee = CustomFee.newBuilder() .setFixedFee(fixedFee) .setFeeCollectorAccountId(spec.registry().getAccountID(HTS_COLLECTOR)) .build(); + final var firstFraction = Fraction.newBuilder() + .setNumerator(NUMERATOR) + .setDenominator(DENOMINATOR * 2L) + .build(); + final var firstFractionalFee = + FractionalFee.newBuilder().setFractionalAmount(firstFraction).build(); + final var firstCustomFractionalFee = CustomFee.newBuilder() + .setFractionalFee(firstFractionalFee) + .setFeeCollectorAccountId(spec.registry().getAccountID(TOKEN_TREASURY)) + .build(); + final var fraction = Fraction.newBuilder() .setNumerator(NUMERATOR) .setDenominator(DENOMINATOR) @@ -1271,6 +1294,7 @@ private ArrayList getCustomFees(final HapiSpec spec) { final var customFees = new ArrayList(); customFees.add(customFixedFee); + customFees.add(firstCustomFractionalFee); customFees.add(customFractionalFee); return customFees; } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/utils/contracts/precompile/HTSPrecompileResult.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/utils/contracts/precompile/HTSPrecompileResult.java index 69ef6b9caa05..e6c22d0623a7 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/utils/contracts/precompile/HTSPrecompileResult.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/utils/contracts/precompile/HTSPrecompileResult.java @@ -407,7 +407,8 @@ private void extractFees( final var feeCollector = expandByteArrayTo32Length(Utils.asAddress(customFee.getFeeCollectorAccountId())); if (customFee.getFixedFee().getAmount() > 0) { fixedFees.add(getFixedFeeTuple(customFee.getFixedFee(), feeCollector)); - } else if (customFee.getFractionalFee().getMinimumAmount() > 0) { + } else if (customFee.getFractionalFee().getMinimumAmount() > 0 + || customFee.getFractionalFee().getFractionalAmount().getNumerator() > 0) { fractionalFees.add(getFractionalFeeTuple(customFee.getFractionalFee(), feeCollector)); } else if (customFee.getRoyaltyFee().getExchangeValueFraction().getNumerator() > 0) { royaltyFees.add(getRoyaltyFeeTuple(customFee.getRoyaltyFee(), feeCollector)); From c722375169e64bab418448463b8cb5a33b9d6435 Mon Sep 17 00:00:00 2001 From: Cody Littley <56973212+cody-littley@users.noreply.github.com> Date: Fri, 9 Jun 2023 14:48:16 -0500 Subject: [PATCH 02/70] Fix null pointer exception in critical quorum (#7029) Signed-off-by: Cody Littley --- .../com/swirlds/platform/components/CriticalQuorumImpl.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/CriticalQuorumImpl.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/CriticalQuorumImpl.java index 37133270a26d..6bd274ecfc7e 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/CriticalQuorumImpl.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/CriticalQuorumImpl.java @@ -127,6 +127,9 @@ public CriticalQuorumImpl( */ @Override public boolean isInCriticalQuorum(@Nullable final NodeId nodeId) { + if (nodeId == null) { + return false; + } return eventCounts.getOrDefault(nodeId, 0) <= threshold.get() + thresholdSoftening; } From c159e01649882ba07be3d1813e3a91509517eec6 Mon Sep 17 00:00:00 2001 From: Richard Bair Date: Fri, 9 Jun 2023 13:01:44 -0700 Subject: [PATCH 03/70] Add JUL bridge for log4j (#7026) Signed-off-by: Richard Bair --- .../com.hedera.hashgraph.jpms-modules.gradle.kts | 1 + .../src/main/java/com/hedera/node/app/Hedera.java | 5 +++++ .../main/java/com/hedera/node/app/ServicesMain.java | 5 +++++ settings.gradle.kts | 10 +++++----- 4 files changed, 16 insertions(+), 5 deletions(-) diff --git a/buildSrc/src/main/kotlin/com.hedera.hashgraph.jpms-modules.gradle.kts b/buildSrc/src/main/kotlin/com.hedera.hashgraph.jpms-modules.gradle.kts index e0241132ebb2..48e616210e32 100644 --- a/buildSrc/src/main/kotlin/com.hedera.hashgraph.jpms-modules.gradle.kts +++ b/buildSrc/src/main/kotlin/com.hedera.hashgraph.jpms-modules.gradle.kts @@ -227,6 +227,7 @@ extraJavaModuleInfo { knownModule("org.apache.logging.log4j:log4j-api", "org.apache.logging.log4j") knownModule("org.apache.logging.log4j:log4j-core", "org.apache.logging.log4j.core") knownModule("org.apache.logging.log4j:log4j-slf4j", "org.apache.logging.log4j.slf4j") + knownModule("org.apache.logging.log4j:log4j-jul", "org.apache.logging.log4j.jul") knownModule("org.jetbrains.kotlin:kotlin-stdlib-jdk8", "kotlin.stdlib.jdk8") knownModule("org.slf4j:slf4j-api", "org.slf4j") diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java index 9c73b9684ec7..3b7e167512ab 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java @@ -132,6 +132,11 @@ * controls execution of the node. If you want to understand our system, this is a great place to start! */ public final class Hedera implements SwirldMain { + static { + // Helidon uses java.util.logging, so we need to set up the bridge before it has a chance to log anything + System.setProperty("java.util.logging.manager", "org.apache.logging.log4j.jul.LogManager"); + } + private static final Logger logger = LogManager.getLogger(Hedera.class); // This should come from configuration, NOT be hardcoded. public static final int MAX_SIGNED_TXN_SIZE = 6144; diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/ServicesMain.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/ServicesMain.java index ba7693c2ffbb..2f58525def65 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/ServicesMain.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/ServicesMain.java @@ -37,6 +37,11 @@ * {@link Hedera} is used; otherwise, {@link MonoServicesMain} is used. */ public class ServicesMain implements SwirldMain { + static { + // Helidon uses java.util.logging, so we need to set up the bridge before it has a chance to log anything + System.setProperty("java.util.logging.manager", "org.apache.logging.log4j.jul.LogManager"); + } + private static final Logger logger = LogManager.getLogger(ServicesMain.class); /** diff --git a/settings.gradle.kts b/settings.gradle.kts index 475b44b0cdbc..8f72c56a335a 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -123,9 +123,8 @@ dependencyResolutionManagement { @Suppress("UnstableApiUsage") versionCatalogs { // The libs of this catalog are the **ONLY** ones that are authorized to be part of the - // runtime - // distribution. These libs can be depended on during compilation, or bundled as part of - // runtime. + // runtime distribution. These libs can be depended on during compilation, or bundled as + // part of runtime. create("libs") { val besuNativeVersion = "0.6.1" val besuVersion = "23.1.2" @@ -151,7 +150,7 @@ dependencyResolutionManagement { version("com.google.common", "31.1-jre") version("com.google.protobuf", "3.19.4") version("com.google.protobuf.util", "3.19.2") - version("com.hedera.pbj.runtime", "0.6.0") + version("com.hedera.pbj.runtime", "0.6.1") version("com.sun.jna", "5.12.1") version("com.swirlds.base", swirldsVersion) version("com.swirlds.cli", swirldsVersion) @@ -187,6 +186,7 @@ dependencyResolutionManagement { version("org.apache.commons.lang3", "3.12.0") version("org.apache.logging.log4j", log4jVersion) version("org.apache.logging.log4j.core", log4jVersion) + version("org.apache.logging.log4j.jul", log4jVersion) version("org.assertj.core", "3.23.1") version("org.bouncycastle.pkix", bouncycastleVersion) version("org.bouncycastle.provider", bouncycastleVersion) @@ -214,7 +214,7 @@ dependencyResolutionManagement { version("hapi-proto", hapiProtoVersion) - plugin("pbj", "com.hedera.pbj.pbj-compiler").version("0.6.0") + plugin("pbj", "com.hedera.pbj.pbj-compiler").version("0.6.1") } } } From d1bee90d3c7c25a776e75e72c274d7e8b44e6c38 Mon Sep 17 00:00:00 2001 From: Neeharika Sompalli <52669918+Neeharika-Sompalli@users.noreply.github.com> Date: Fri, 9 Jun 2023 16:05:27 -0500 Subject: [PATCH 04/70] Move spender validation only for approvals (#7030) Signed-off-by: Neeharika-Sompalli --- .../data/config/api-permission.properties | 1 + .../txns/crypto/ApproveAllowanceLogic.java | 24 +++-- .../crypto/CryptoApproveAllowanceSuite.java | 99 ++++++++++++++++++- .../crypto/CryptoDeleteAllowanceSuite.java | 59 ++++++++++- 4 files changed, 175 insertions(+), 8 deletions(-) diff --git a/hedera-node/data/config/api-permission.properties b/hedera-node/data/config/api-permission.properties index 13e69171ba15..2044f605b1f1 100644 --- a/hedera-node/data/config/api-permission.properties +++ b/hedera-node/data/config/api-permission.properties @@ -9,6 +9,7 @@ getAccountRecords=0-* getTxRecordByTxID=0-* getTransactionReceipts=0-* approveAllowances=0-* +deleteAllowances=0-* utilPrng=0-* # File createFile=0-* diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/txns/crypto/ApproveAllowanceLogic.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/txns/crypto/ApproveAllowanceLogic.java index eb80606c5337..55572912d640 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/txns/crypto/ApproveAllowanceLogic.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/txns/crypto/ApproveAllowanceLogic.java @@ -101,14 +101,16 @@ private void applyCryptoAllowances(final List cryptoAllowances, final var cryptoMap = accountToApprove.getMutableCryptoAllowances(); final var spender = Id.fromGrpcAccount(allowance.getSpender()); - accountStore.loadAccountOrFailWith(spender, INVALID_ALLOWANCE_SPENDER_ID); - final var amount = allowance.getAmount(); if (cryptoMap.containsKey(spender.asEntityNum()) && amount == 0) { + // spender need not be validated as being a valid account when removing allowances, + // since it might be deleted and allowance is being removed by owner if it exists in map. removeEntity(cryptoMap, spender, accountToApprove); } if (amount > 0) { + // To add allowances spender should be validated as being a valid account + accountStore.loadAccountOrFailWith(spender, INVALID_ALLOWANCE_SPENDER_ID); cryptoMap.put(spender.asEntityNum(), amount); validateAllowanceLimitsOn(accountToApprove, dynamicProperties.maxAllowanceLimitPerAccount()); entitiesChanged.put(accountToApprove.getId().num(), accountToApprove); @@ -140,16 +142,19 @@ private void applyFungibleTokenAllowances(final List tokenAllowa final var tokensMap = accountToApprove.getMutableFungibleTokenAllowances(); final var spender = Id.fromGrpcAccount(allowance.getSpender()); - accountStore.loadAccountOrFailWith(spender, INVALID_ALLOWANCE_SPENDER_ID); - final var amount = allowance.getAmount(); final var tokenId = allowance.getTokenId(); final var key = FcTokenAllowanceId.from(EntityNum.fromTokenId(tokenId), spender.asEntityNum()); if (tokensMap.containsKey(key) && amount == 0) { + // spender need not be validated as being a valid account when removing allowances, + // since it might be deleted and allowance is being removed by owner if it exists in map. removeTokenEntity(key, tokensMap, accountToApprove); } if (amount > 0) { + // To add allowances spender should be validated as being a valid account + accountStore.loadAccountOrFailWith(spender, INVALID_ALLOWANCE_SPENDER_ID); + tokensMap.put(key, amount); validateAllowanceLimitsOn(accountToApprove, dynamicProperties.maxAllowanceLimitPerAccount()); entitiesChanged.put(accountToApprove.getId().num(), accountToApprove); @@ -173,20 +178,27 @@ protected void applyNftAllowances(final List nftAllowances, final final var owner = allowance.getOwner(); final var approvingAccount = fetchOwnerAccount(owner, payerAccount, accountStore, entitiesChanged); final var spenderId = Id.fromGrpcAccount(allowance.getSpender()); - accountStore.loadAccountOrFailWith(spenderId, INVALID_ALLOWANCE_SPENDER_ID); - final var tokenId = Id.fromGrpcToken(allowance.getTokenId()); + if (allowance.hasApprovedForAll()) { final var approveForAllNfts = approvingAccount.getMutableApprovedForAllNfts(); final var key = FcTokenAllowanceId.from(tokenId.asEntityNum(), spenderId.asEntityNum()); if (allowance.getApprovedForAll().getValue()) { + // Validate the spender/operator account + accountStore.loadAccountOrFailWith(spenderId, INVALID_ALLOWANCE_SPENDER_ID); approveForAllNfts.add(key); } else { + // Need not validate anything here to revoke the approval approveForAllNfts.remove(key); } validateAllowanceLimitsOn(approvingAccount, dynamicProperties.maxAllowanceLimitPerAccount()); } + if (allowance.getSerialNumbersCount() > 0) { + // To add allowance for any serials, need to validate spender + accountStore.loadAccountOrFailWith(spenderId, INVALID_ALLOWANCE_SPENDER_ID); + } + final var nfts = updateSpender( tokenStore, approvingAccount.getId(), spenderId, tokenId, allowance.getSerialNumbersList()); for (final var nft : nfts) { diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoApproveAllowanceSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoApproveAllowanceSuite.java index ee522f1f21d6..4e8a9cb6cf3b 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoApproveAllowanceSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoApproveAllowanceSuite.java @@ -136,7 +136,104 @@ public List getSpecsInSuite() { approveForAllSpenderCanDelegateOnNFT(), duplicateEntriesGetsReplacedWithDifferentTxn(), duplicateKeysAndSerialsInSameTxnDoesntThrow(), - scheduledCryptoApproveAllowanceWorks()); + scheduledCryptoApproveAllowanceWorks(), + canDeleteAllowanceFromDeletedSpender()); + } + + private HapiSpec canDeleteAllowanceFromDeletedSpender() { + return defaultHapiSpec("canDeleteAllowanceFromDeletedSpender") + .given( + newKeyNamed(SUPPLY_KEY), + cryptoCreate(OWNER).balance(ONE_HUNDRED_HBARS).maxAutomaticTokenAssociations(10), + cryptoCreate(SPENDER).balance(ONE_HUNDRED_HBARS), + cryptoCreate(TOKEN_TREASURY) + .balance(100 * ONE_HUNDRED_HBARS) + .maxAutomaticTokenAssociations(10), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .supplyType(TokenSupplyType.FINITE) + .supplyKey(SUPPLY_KEY) + .maxSupply(1000L) + .initialSupply(10L) + .treasury(TOKEN_TREASURY), + tokenCreate(NON_FUNGIBLE_TOKEN) + .maxSupply(10L) + .initialSupply(0) + .supplyType(TokenSupplyType.FINITE) + .tokenType(NON_FUNGIBLE_UNIQUE) + .supplyKey(SUPPLY_KEY) + .treasury(TOKEN_TREASURY), + tokenAssociate(OWNER, FUNGIBLE_TOKEN), + tokenAssociate(OWNER, NON_FUNGIBLE_TOKEN), + mintToken(FUNGIBLE_TOKEN, 500L).via(FUNGIBLE_TOKEN_MINT_TXN), + mintToken( + NON_FUNGIBLE_TOKEN, + List.of( + ByteString.copyFromUtf8("a"), + ByteString.copyFromUtf8("b"), + ByteString.copyFromUtf8("c")))) + .when( + cryptoApproveAllowance() + .payingWith(OWNER) + .addCryptoAllowance(OWNER, SPENDER, 100L) + .addTokenAllowance(OWNER, FUNGIBLE_TOKEN, SPENDER, 1) + .addNftAllowance(OWNER, NON_FUNGIBLE_TOKEN, SPENDER, true, List.of()), + getAccountDetails(OWNER) + .payingWith(GENESIS) + .has(accountDetailsWith() + .cryptoAllowancesCount(1) + .tokenAllowancesCount(1) + .nftApprovedForAllAllowancesCount(1) + .cryptoAllowancesContaining(SPENDER, 100L) + .tokenAllowancesContaining(FUNGIBLE_TOKEN, SPENDER, 1)), + cryptoDelete(SPENDER), + // removing fungible allowances should be possible even if the + // spender is deleted + cryptoApproveAllowance() + .payingWith(OWNER) + .addCryptoAllowance(OWNER, SPENDER, 0) + .blankMemo(), + getAccountDetails(OWNER) + .payingWith(GENESIS) + .has(accountDetailsWith() + .cryptoAllowancesCount(0) + .tokenAllowancesCount(1) + .nftApprovedForAllAllowancesCount(1) + .tokenAllowancesContaining(FUNGIBLE_TOKEN, SPENDER, 1)), + cryptoApproveAllowance() + .payingWith(OWNER) + .addTokenAllowance(OWNER, FUNGIBLE_TOKEN, SPENDER, 0) + .blankMemo(), + getAccountDetails(OWNER) + .payingWith(GENESIS) + .has(accountDetailsWith() + .cryptoAllowancesCount(0) + .tokenAllowancesCount(0) + .nftApprovedForAllAllowancesCount(1)), + // It should not be possible to remove approveForAllNftAllowance + // and also add allowance to serials + cryptoApproveAllowance() + .payingWith(OWNER) + .addNftAllowance(OWNER, NON_FUNGIBLE_TOKEN, SPENDER, false, List.of(1L, 2L)) + .hasKnownStatus(INVALID_ALLOWANCE_SPENDER_ID), + getAccountDetails(OWNER) + .payingWith(GENESIS) + .has(accountDetailsWith() + .cryptoAllowancesCount(0) + .tokenAllowancesCount(0) + .nftApprovedForAllAllowancesCount(1)), + getTokenNftInfo(NON_FUNGIBLE_TOKEN, 1L).hasNoSpender(), + getTokenNftInfo(NON_FUNGIBLE_TOKEN, 2L).hasNoSpender(), + cryptoApproveAllowance() + .payingWith(OWNER) + .addNftAllowance(OWNER, NON_FUNGIBLE_TOKEN, SPENDER, false, List.of()), + getAccountDetails(OWNER) + .payingWith(GENESIS) + .has(accountDetailsWith() + .cryptoAllowancesCount(0) + .tokenAllowancesCount(0) + .nftApprovedForAllAllowancesCount(0))) + .then(); } private HapiSpec duplicateKeysAndSerialsInSameTxnDoesntThrow() { diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoDeleteAllowanceSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoDeleteAllowanceSuite.java index 7f42bee80ebb..e887835a0fbe 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoDeleteAllowanceSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoDeleteAllowanceSuite.java @@ -82,10 +82,67 @@ public List getSpecsInSuite() { exceedsTransactionLimit(), succeedsWhenTokenPausedFrozenKycRevoked(), feesAsExpected(), - duplicateEntriesDoesntThrow() + duplicateEntriesDoesntThrow(), + canDeleteAllowanceForDeletedSpender() }); } + private HapiSpec canDeleteAllowanceForDeletedSpender() { + final String owner = "owner"; + final String spender = "spender"; + final String nft = "nft"; + return defaultHapiSpec("canDeleteAllowanceForDeletedSpender") + .given( + newKeyNamed("supplyKey"), + cryptoCreate(owner).balance(ONE_HUNDRED_HBARS).maxAutomaticTokenAssociations(10), + cryptoCreate(spender).balance(ONE_HUNDRED_HBARS), + cryptoCreate(TOKEN_TREASURY) + .balance(100 * ONE_HUNDRED_HBARS) + .maxAutomaticTokenAssociations(10) + .payingWith(GENESIS), + tokenCreate(nft) + .maxSupply(10L) + .initialSupply(0) + .supplyType(TokenSupplyType.FINITE) + .tokenType(NON_FUNGIBLE_UNIQUE) + .supplyKey("supplyKey") + .treasury(TOKEN_TREASURY) + .payingWith(GENESIS), + tokenAssociate(owner, nft), + mintToken( + nft, + List.of( + ByteString.copyFromUtf8("a"), + ByteString.copyFromUtf8("b"), + ByteString.copyFromUtf8("c"))) + .via("nftTokenMint") + .payingWith(GENESIS), + cryptoTransfer(movingUnique(nft, 1L, 2L, 3L).between(TOKEN_TREASURY, owner)) + .payingWith(GENESIS)) + .when( + cryptoApproveAllowance() + .payingWith(owner) + .addNftAllowance(owner, nft, spender, true, List.of(3L)) + .via("otherAdjustTxn"), + getAccountDetails(owner) + .payingWith(GENESIS) + .has(accountDetailsWith().nftApprovedForAllAllowancesCount(1)), + getTokenNftInfo(nft, 3L).hasSpenderID(spender)) + .then( + cryptoDelete(spender), + cryptoDeleteAllowance() + .payingWith(owner) + .addNftDeleteAllowance(owner, nft, List.of(3L)) + .blankMemo() + .via("cryptoDeleteAllowanceTxn") + .logged(), + getTxnRecord("cryptoDeleteAllowanceTxn").logged(), + getAccountDetails(owner) + .payingWith(GENESIS) + .has(accountDetailsWith().nftApprovedForAllAllowancesCount(1)), + getTokenNftInfo(nft, 3L).hasNoSpender()); + } + private HapiSpec duplicateEntriesDoesntThrow() { final String owner = "owner"; final String spender = "spender"; From 92deb7d3e09d7bb16773aba46645c6f348a05ff9 Mon Sep 17 00:00:00 2001 From: Lev Povolotsky <16233475+povolev15@users.noreply.github.com> Date: Fri, 9 Jun 2023 19:06:02 -0400 Subject: [PATCH 05/70] implement get Record query (#7032) Signed-off-by: Lev Povolotsky --- .../NetworkGetAccountDetailsHandler.java | 13 - .../NetworkTransactionGetReceiptHandler.java | 13 - .../NetworkTransactionGetRecordHandler.java | 76 ++++- .../handlers/NetworkAdminHandlerTestBase.java | 13 +- ...tworkTransactionGetReceiptHandlerTest.java | 19 -- ...etworkTransactionGetRecordHandlerTest.java | 265 ++++++++++++++++++ 6 files changed, 350 insertions(+), 49 deletions(-) create mode 100644 hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetRecordHandlerTest.java diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkGetAccountDetailsHandler.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkGetAccountDetailsHandler.java index 7c0d8d04d96d..05d63768f6ed 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkGetAccountDetailsHandler.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkGetAccountDetailsHandler.java @@ -19,8 +19,6 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.FAIL_INVALID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; -import static com.hedera.hapi.node.base.ResponseType.ANSWER_ONLY; -import static com.hedera.hapi.node.base.ResponseType.ANSWER_STATE_PROOF; import static com.hedera.hapi.node.base.ResponseType.COST_ANSWER; import static com.hedera.node.app.spi.validation.Validations.mustExist; import static java.util.Objects.requireNonNull; @@ -30,7 +28,6 @@ import com.hedera.hapi.node.base.HederaFunctionality; import com.hedera.hapi.node.base.QueryHeader; import com.hedera.hapi.node.base.ResponseHeader; -import com.hedera.hapi.node.base.ResponseType; import com.hedera.hapi.node.base.Timestamp; import com.hedera.hapi.node.base.TokenFreezeStatus; import com.hedera.hapi.node.base.TokenID; @@ -91,16 +88,6 @@ public Response createEmptyResponse(@NonNull final ResponseHeader header) { return Response.newBuilder().accountDetails(response).build(); } - @Override - public boolean requiresNodePayment(@NonNull ResponseType responseType) { - return ANSWER_ONLY == responseType || ANSWER_STATE_PROOF == responseType; - } - - @Override - public boolean needsAnswerOnlyCost(@NonNull ResponseType responseType) { - return COST_ANSWER == responseType; - } - @Override public void validate(@NonNull final QueryContext context) throws PreCheckException { requireNonNull(context); diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetReceiptHandler.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetReceiptHandler.java index 1e36f40a2830..595e8e06f620 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetReceiptHandler.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetReceiptHandler.java @@ -19,8 +19,6 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSACTION_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; import static com.hedera.hapi.node.base.ResponseCodeEnum.RECEIPT_NOT_FOUND; -import static com.hedera.hapi.node.base.ResponseType.ANSWER_ONLY; -import static com.hedera.hapi.node.base.ResponseType.ANSWER_STATE_PROOF; import static com.hedera.hapi.node.base.ResponseType.COST_ANSWER; import static com.hedera.node.app.spi.validation.Validations.mustExist; import static java.util.Objects.requireNonNull; @@ -28,7 +26,6 @@ import com.hedera.hapi.node.base.HederaFunctionality; import com.hedera.hapi.node.base.QueryHeader; import com.hedera.hapi.node.base.ResponseHeader; -import com.hedera.hapi.node.base.ResponseType; import com.hedera.hapi.node.base.TransactionID; import com.hedera.hapi.node.transaction.Query; import com.hedera.hapi.node.transaction.Response; @@ -68,16 +65,6 @@ public Response createEmptyResponse(@NonNull final ResponseHeader header) { return Response.newBuilder().transactionGetReceipt(response).build(); } - @Override - public boolean requiresNodePayment(@NonNull ResponseType responseType) { - return ANSWER_ONLY == responseType || ANSWER_STATE_PROOF == responseType; - } - - @Override - public boolean needsAnswerOnlyCost(@NonNull ResponseType responseType) { - return COST_ANSWER == responseType; - } - @Override public void validate(@NonNull final QueryContext context) throws PreCheckException { requireNonNull(context); diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetRecordHandler.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetRecordHandler.java index 7e4aa127e557..04638aaf53a0 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetRecordHandler.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetRecordHandler.java @@ -16,18 +16,30 @@ package com.hedera.node.app.service.networkadmin.impl.handlers; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSACTION_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.RECORD_NOT_FOUND; +import static com.hedera.hapi.node.base.ResponseType.COST_ANSWER; +import static com.hedera.node.app.spi.validation.Validations.mustExist; import static java.util.Objects.requireNonNull; +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.HederaFunctionality; import com.hedera.hapi.node.base.QueryHeader; +import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.base.TransactionID; import com.hedera.hapi.node.transaction.Query; import com.hedera.hapi.node.transaction.Response; import com.hedera.hapi.node.transaction.TransactionGetRecordResponse; +import com.hedera.hapi.node.transaction.TransactionRecord; +import com.hedera.node.app.spi.records.RecordCache; import com.hedera.node.app.spi.workflows.PaidQueryHandler; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.QueryContext; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.List; import javax.inject.Inject; import javax.inject.Singleton; @@ -58,13 +70,73 @@ public Response createEmptyResponse(@NonNull final ResponseHeader header) { @Override public void validate(@NonNull final QueryContext context) throws PreCheckException { requireNonNull(context); - throw new UnsupportedOperationException("Not implemented"); + final var op = context.query().transactionGetRecordOrThrow(); + + // The transaction ID must be specified + if (!op.hasTransactionID()) throw new PreCheckException(INVALID_TRANSACTION_ID); + + // The record must exist for that transaction ID + final var txnId = op.transactionIDOrThrow(); + + // verify that the account id exist and not default + final var accountID = txnId.accountID(); + if (accountID == null || accountID.equals(AccountID.DEFAULT)) { + throw new PreCheckException(INVALID_ACCOUNT_ID); + } + + final var recordCache = context.createStore(RecordCache.class); + final var record = recordCache.getReceipt(txnId); + mustExist(record, INVALID_TRANSACTION_ID); } @Override public Response findResponse(@NonNull final QueryContext context, @NonNull final ResponseHeader header) { requireNonNull(context); requireNonNull(header); - throw new UnsupportedOperationException("Not implemented"); + final var query = context.query(); + final var op = query.transactionGetRecordOrThrow(); + final var responseBuilder = TransactionGetRecordResponse.newBuilder(); + final var transactionId = op.transactionIDOrThrow(); + final var responseType = op.headerOrElse(QueryHeader.DEFAULT).responseType(); + responseBuilder.header(header); + if (header.nodeTransactionPrecheckCode() == ResponseCodeEnum.OK && responseType != COST_ANSWER) { + final var recordCache = context.createStore(RecordCache.class); + final var transactionRecordPrimary = recordCache.getRecord(transactionId); + if (transactionRecordPrimary == null) { + responseBuilder.header(header.copyBuilder() + .nodeTransactionPrecheckCode(RECORD_NOT_FOUND) + .build()); + } else { + responseBuilder.transactionRecord(transactionRecordPrimary); + if (op.includeDuplicates()) { + final List allTransactionRecords = recordCache.getRecords(transactionId); + + // remove the primary record from the list + final List duplicateTransactionRecords = + allTransactionRecords.subList(1, allTransactionRecords.size()); + responseBuilder.duplicateTransactionRecords(duplicateTransactionRecords); + } + if (op.includeChildRecords()) { + responseBuilder.childTransactionRecords(transformedChildrenOf(transactionId, recordCache)); + } + } + } + + return Response.newBuilder().transactionGetRecord(responseBuilder).build(); + } + + private List transformedChildrenOf(TransactionID transactionID, RecordCache recordCache) { + final List children = new ArrayList<>(); + // In a transaction id if nonce is 0 it is a parent and if we have any other number it is a child + for (int nonce = 1; ; nonce++) { + var childTransactionId = transactionID.copyBuilder().nonce(nonce).build(); + var maybeChildRecord = recordCache.getRecord(childTransactionId); + if (maybeChildRecord == null) { + break; + } else { + children.add(maybeChildRecord); + } + } + return children; } } diff --git a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkAdminHandlerTestBase.java b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkAdminHandlerTestBase.java index 709c597b993f..b881abf8684e 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkAdminHandlerTestBase.java +++ b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkAdminHandlerTestBase.java @@ -121,7 +121,7 @@ public class NetworkAdminHandlerTestBase { protected TransactionID otherNonceOneTransactionID = transactionID(1); protected TransactionID otherNonceTwoTransactionID = transactionID(2); - protected TransactionID otherNonceTreeTransactionID = transactionID(3); + protected TransactionID otherNonceThreeTransactionID = transactionID(3); protected TransactionID transactionIDNotInCache = transactionID(5); private static final int MAX_QUERYABLE_PER_ACCOUNT = 10; @@ -223,7 +223,7 @@ private void givenRecordCacheState() { .receipt(receipt) .build(); final var recordThree = TransactionRecord.newBuilder() - .transactionID(otherNonceTreeTransactionID) + .transactionID(otherNonceThreeTransactionID) .receipt(receipt) .build(); cache.add(0, PAYER_ACCOUNT_ID, primaryRecord, Instant.now()); @@ -403,4 +403,13 @@ private TransactionID transactionID(int nanos, int nonce) { .nonce(nonce) .build(); } + + protected TransactionID transactionIDWithoutAccount(int nanos, int nonce) { + final var now = Instant.now(); + return TransactionID.newBuilder() + .transactionValidStart( + Timestamp.newBuilder().seconds(now.getEpochSecond()).nanos(nanos)) + .nonce(nonce) + .build(); + } } diff --git a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetReceiptHandlerTest.java b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetReceiptHandlerTest.java index edf2205394d9..965920f0d225 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetReceiptHandlerTest.java +++ b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetReceiptHandlerTest.java @@ -20,9 +20,7 @@ import static com.hedera.node.app.spi.fixtures.Assertions.assertThrowsPreCheck; import static org.assertj.core.api.Assertions.assertThatCode; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.when; @@ -30,7 +28,6 @@ import com.hedera.hapi.node.base.QueryHeader; import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.hapi.node.base.ResponseHeader; -import com.hedera.hapi.node.base.ResponseType; import com.hedera.hapi.node.base.TransactionID; import com.hedera.hapi.node.transaction.Query; import com.hedera.hapi.node.transaction.Response; @@ -83,22 +80,6 @@ void createsEmptyResponse() { assertEquals(expectedResponse, response); } - @Test - void requiresPayment() { - assertTrue(networkTransactionGetReceiptHandler.requiresNodePayment(ResponseType.ANSWER_ONLY)); - assertTrue(networkTransactionGetReceiptHandler.requiresNodePayment(ResponseType.ANSWER_STATE_PROOF)); - assertFalse(networkTransactionGetReceiptHandler.requiresNodePayment(ResponseType.COST_ANSWER)); - assertFalse(networkTransactionGetReceiptHandler.requiresNodePayment(ResponseType.COST_ANSWER_STATE_PROOF)); - } - - @Test - void needsAnswerOnlyCostForCostAnswer() { - assertFalse(networkTransactionGetReceiptHandler.needsAnswerOnlyCost(ResponseType.ANSWER_ONLY)); - assertFalse(networkTransactionGetReceiptHandler.needsAnswerOnlyCost(ResponseType.ANSWER_STATE_PROOF)); - assertTrue(networkTransactionGetReceiptHandler.needsAnswerOnlyCost(ResponseType.COST_ANSWER)); - assertFalse(networkTransactionGetReceiptHandler.needsAnswerOnlyCost(ResponseType.COST_ANSWER_STATE_PROOF)); - } - @Test void validatesQueryWhenValidReceipt() throws Throwable { diff --git a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetRecordHandlerTest.java b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetRecordHandlerTest.java new file mode 100644 index 000000000000..de40330770e0 --- /dev/null +++ b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetRecordHandlerTest.java @@ -0,0 +1,265 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.networkadmin.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSACTION_ID; +import static com.hedera.node.app.spi.fixtures.Assertions.assertThrowsPreCheck; +import static org.assertj.core.api.Assertions.assertThatCode; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.lenient; +import static org.mockito.Mockito.when; + +import com.hedera.hapi.node.base.QueryHeader; +import com.hedera.hapi.node.base.ResponseCodeEnum; +import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.base.ResponseType; +import com.hedera.hapi.node.base.TransactionID; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.hapi.node.transaction.TransactionGetRecordQuery; +import com.hedera.hapi.node.transaction.TransactionGetRecordResponse; +import com.hedera.hapi.node.transaction.TransactionReceipt; +import com.hedera.hapi.node.transaction.TransactionRecord; +import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkTransactionGetRecordHandler; +import com.hedera.node.app.spi.records.RecordCache; +import com.hedera.node.app.spi.workflows.QueryContext; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class NetworkTransactionGetRecordHandlerTest extends NetworkAdminHandlerTestBase { + @Mock + private QueryContext context; + + private NetworkTransactionGetRecordHandler networkTransactionGetRecordHandler; + + @BeforeEach + void setUp() { + networkTransactionGetRecordHandler = new NetworkTransactionGetRecordHandler(); + final var configuration = new HederaTestConfigBuilder().getOrCreateConfig(); + lenient().when(context.configuration()).thenReturn(configuration); + } + + @Test + void extractsHeader() { + final var query = createGetTransactionRecordQuery(transactionID, false, false); + final var header = networkTransactionGetRecordHandler.extractHeader(query); + final var op = query.transactionGetRecordOrThrow(); + assertEquals(op.header(), header); + } + + @Test + void createsEmptyResponse() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.FAIL_FEE) + .build(); + final var response = networkTransactionGetRecordHandler.createEmptyResponse(responseHeader); + final var expectedResponse = Response.newBuilder() + .transactionGetRecord(TransactionGetRecordResponse.newBuilder().header(responseHeader)) + .build(); + assertEquals(expectedResponse, response); + } + + @Test + void requiresPayment() { + assertTrue(networkTransactionGetRecordHandler.requiresNodePayment(ResponseType.ANSWER_ONLY)); + assertTrue(networkTransactionGetRecordHandler.requiresNodePayment(ResponseType.ANSWER_STATE_PROOF)); + assertFalse(networkTransactionGetRecordHandler.requiresNodePayment(ResponseType.COST_ANSWER)); + assertFalse(networkTransactionGetRecordHandler.requiresNodePayment(ResponseType.COST_ANSWER_STATE_PROOF)); + } + + @Test + void needsAnswerOnlyCostForCostAnswer() { + assertFalse(networkTransactionGetRecordHandler.needsAnswerOnlyCost(ResponseType.ANSWER_ONLY)); + assertFalse(networkTransactionGetRecordHandler.needsAnswerOnlyCost(ResponseType.ANSWER_STATE_PROOF)); + assertTrue(networkTransactionGetRecordHandler.needsAnswerOnlyCost(ResponseType.COST_ANSWER)); + assertFalse(networkTransactionGetRecordHandler.needsAnswerOnlyCost(ResponseType.COST_ANSWER_STATE_PROOF)); + } + + @Test + void validatesQueryWhenValidRecord() throws Throwable { + + final var query = createGetTransactionRecordQuery(transactionID, false, false); + given(context.query()).willReturn(query); + given(context.createStore(RecordCache.class)).willReturn(cache); + + assertThatCode(() -> networkTransactionGetRecordHandler.validate(context)) + .doesNotThrowAnyException(); + } + + @Test + void validatesQueryWhenNoTransactionId() throws Throwable { + + final var query = createEmptysQuery(); + given(context.query()).willReturn(query); + + assertThrowsPreCheck(() -> networkTransactionGetRecordHandler.validate(context), INVALID_TRANSACTION_ID); + } + + @Test + void validatesQueryWhenNoAccountId() throws Throwable { + + final var query = createGetTransactionRecordQuery(transactionIDWithoutAccount(0, 0), false, false); + given(context.query()).willReturn(query); + + assertThrowsPreCheck(() -> networkTransactionGetRecordHandler.validate(context), INVALID_ACCOUNT_ID); + } + + @Test + void getsResponseIfFailedResponse() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.FAIL_FEE) + .build(); + + final var query = createGetTransactionRecordQuery(transactionID, false, false); + when(context.query()).thenReturn(query); + + final var response = networkTransactionGetRecordHandler.findResponse(context, responseHeader); + final var op = response.transactionGetRecordOrThrow(); + assertEquals(ResponseCodeEnum.FAIL_FEE, op.header().nodeTransactionPrecheckCode()); + assertNull(op.transactionRecord()); + } + + @Test + void getsResponseIsEmptyWhenTransactionNotExist() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.OK) + .build(); + + final var query = createGetTransactionRecordQuery(transactionIDNotInCache, false, false); + when(context.query()).thenReturn(query); + when(context.createStore(RecordCache.class)).thenReturn(cache); + + final var response = networkTransactionGetRecordHandler.findResponse(context, responseHeader); + final var op = response.transactionGetRecordOrThrow(); + assertEquals(ResponseCodeEnum.RECORD_NOT_FOUND, op.header().nodeTransactionPrecheckCode()); + assertNull(op.transactionRecord()); + } + + @Test + void getsResponseIfOkResponse() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.OK) + .build(); + final var expectedRecord = getExpectedRecord(transactionID); + + final var query = createGetTransactionRecordQuery(transactionID, false, false); + when(context.query()).thenReturn(query); + when(context.createStore(RecordCache.class)).thenReturn(cache); + + final var response = networkTransactionGetRecordHandler.findResponse(context, responseHeader); + final var op = response.transactionGetRecordOrThrow(); + assertEquals(ResponseCodeEnum.OK, op.header().nodeTransactionPrecheckCode()); + assertEquals(expectedRecord, op.transactionRecord()); + } + + @Test + void getsResponseIfOkResponseWithDuplicates() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.OK) + .build(); + final var expectedRecord = getExpectedRecord(transactionID); + final List expectedDuplicateRecords = getExpectedDuplicateList(); + + final var query = createGetTransactionRecordQuery(transactionID, true, false); + when(context.query()).thenReturn(query); + when(context.createStore(RecordCache.class)).thenReturn(cache); + + final var response = networkTransactionGetRecordHandler.findResponse(context, responseHeader); + final var op = response.transactionGetRecordOrThrow(); + assertEquals(ResponseCodeEnum.OK, op.header().nodeTransactionPrecheckCode()); + assertEquals(expectedRecord, op.transactionRecord()); + assertEquals(expectedDuplicateRecords, op.duplicateTransactionRecords()); + assertEquals( + expectedDuplicateRecords.size(), + op.duplicateTransactionRecords().size()); + } + + @Test + void getsResponseIfOkResponseWithChildrenRecord() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.OK) + .build(); + final var expectedRecord = getExpectedRecord(transactionID); + final List expectedChildRecordList = getExpectedChildRecordList(); + + final var query = createGetTransactionRecordQuery(transactionID, false, true); + when(context.query()).thenReturn(query); + when(context.createStore(RecordCache.class)).thenReturn(cache); + + final var response = networkTransactionGetRecordHandler.findResponse(context, responseHeader); + final var op = response.transactionGetRecordOrThrow(); + assertEquals(ResponseCodeEnum.OK, op.header().nodeTransactionPrecheckCode()); + assertEquals(expectedRecord, op.transactionRecord()); + assertEquals(expectedChildRecordList, op.childTransactionRecords()); + assertEquals( + expectedChildRecordList.size(), op.childTransactionRecords().size()); + } + + private TransactionRecord getExpectedRecord(TransactionID transactionID) { + final var receipt = TransactionReceipt.newBuilder() + .accountID(accountId) + .status(ResponseCodeEnum.UNKNOWN) + .build(); + return TransactionRecord.newBuilder() + .transactionID(transactionID) + .receipt(receipt) + .build(); + } + + private List getExpectedDuplicateList() { + return List.of( + getExpectedRecord(transactionID), getExpectedRecord(transactionID), getExpectedRecord(transactionID)); + } + + private List getExpectedChildRecordList() { + return List.of( + getExpectedRecord(otherNonceOneTransactionID), + getExpectedRecord(otherNonceTwoTransactionID), + getExpectedRecord(otherNonceThreeTransactionID)); + } + + private Query createGetTransactionRecordQuery( + final TransactionID transactionID, final boolean includeDuplicates, final boolean includeChildRecords) { + final var data = TransactionGetRecordQuery.newBuilder() + .transactionID(transactionID) + .includeDuplicates(includeDuplicates) + .includeChildRecords(includeChildRecords) + .header(QueryHeader.newBuilder().build()) + .build(); + + return Query.newBuilder().transactionGetRecord(data).build(); + } + + private Query createEmptysQuery() { + final var data = TransactionGetRecordQuery.newBuilder() + .header(QueryHeader.newBuilder().build()) + .build(); + + return Query.newBuilder().transactionGetRecord(data).build(); + } +} From bfbc3c8221dfba52b12d012f570a053bcafeb061 Mon Sep 17 00:00:00 2001 From: Richard Bair Date: Sat, 10 Jun 2023 09:43:56 -0700 Subject: [PATCH 06/70] Update CODEOWNERS for `hedera-app` (#7037) Signed-off-by: Richard Bair Co-authored-by: Nathan Klick --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f819690c04cb..138047cf2caa 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -23,7 +23,7 @@ /hedera-node/hedera-mono-service/ @hashgraph/hedera-services /hedera-node/hapi*/ @hashgraph/hedera-services /hedera-node/hedera-admin*/ @hashgraph/hedera-services @kimbor @povolev15 -/hedera-node/hedera-app*/ @hashgraph/hedera-services @hashgraph/platform-base +/hedera-node/hedera-app*/ @hashgraph/hedera-base /hedera-node/hedera-consensus*/ @hashgraph/hedera-services @povolev15 @iwsimon /hedera-node/hedera-evm*/ @hashgraph/hedera-smart-contracts /hedera-node/hedera-file*/ @hashgraph/hedera-services @povolev15 @iwsimon From d7b9800fe1e1f066e979c6bf4ace258f417bb71a Mon Sep 17 00:00:00 2001 From: Richard Bair Date: Sat, 10 Jun 2023 09:44:25 -0700 Subject: [PATCH 07/70] Allow max response size of 100KiB (#7035) Signed-off-by: Richard Bair --- .../main/java/com/hedera/node/app/grpc/MethodBase.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/MethodBase.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/MethodBase.java index 090fd99b8ab0..3b16af0b8b7d 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/MethodBase.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/MethodBase.java @@ -27,14 +27,21 @@ import edu.umd.cs.findbugs.annotations.NonNull; import io.grpc.stub.ServerCalls; import io.grpc.stub.StreamObserver; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; /** * An instance of either {@link TransactionMethod} or {@link QueryMethod} is created per transaction * type and query type. */ abstract class MethodBase implements ServerCalls.UnaryMethod { + private static final Logger logger = LogManager.getLogger(MethodBase.class); + // To be set by configuration. See Issue #4294 private static final int MAX_MESSAGE_SIZE = Hedera.MAX_SIGNED_TXN_SIZE; + // To be set by configuration. See Issue #4294. Originally this was intended to be the same max size as + // a transaction, but some files and other responses are much larger. So we had to set this larger. + private static final int MAX_RESPONSE_SIZE = 1024 * 100; // Constants for metric names and descriptions private static final String COUNTER_HANDLED_NAME_TPL = "%sHdl"; @@ -53,7 +60,7 @@ abstract class MethodBase implements ServerCalls.UnaryMethod BUFFER_THREAD_LOCAL = - ThreadLocal.withInitial(() -> BufferedData.allocate(MAX_MESSAGE_SIZE)); + ThreadLocal.withInitial(() -> BufferedData.allocate(MAX_RESPONSE_SIZE)); /** The name of the service associated with this method. */ protected final String serviceName; @@ -129,6 +136,7 @@ public void invoke( callsHandledSpeedometer.cycle(); } catch (final Throwable th) { // Track the number of times we failed to handle a call + logger.error("Failed to handle call! Unexpected exception", th); callsFailedCounter.increment(); responseObserver.onError(th); } From 4bf845a6af841ed5d24367b3efcde499b89a7529 Mon Sep 17 00:00:00 2001 From: Neeharika Sompalli <52669918+Neeharika-Sompalli@users.noreply.github.com> Date: Mon, 12 Jun 2023 09:23:08 -0500 Subject: [PATCH 08/70] TokenMint handle() implementation (#6996) Signed-off-by: Neeharika-Sompalli --- .../SingleTransactionRecordBuilder.java | 13 +- .../NetworkGetAccountDetailsHandler.java | 6 +- .../impl/ReadableTokenRelationStoreImpl.java | 12 +- .../impl/WritableTokenRelationStore.java | 22 +- .../impl/handlers/BaseCryptoHandler.java | 5 + .../token/impl/handlers/BaseTokenHandler.java | 142 ++++++++++ .../CryptoGetAccountBalanceHandler.java | 9 +- .../TokenAssociateToAccountHandler.java | 12 +- .../TokenDissociateFromAccountHandler.java | 5 +- .../handlers/TokenFreezeAccountHandler.java | 4 +- .../TokenGrantKycToAccountHandler.java | 2 +- .../token/impl/handlers/TokenMintHandler.java | 199 +++++++++++++- .../TokenRevokeKycFromAccountHandler.java | 4 +- .../handlers/TokenUnfreezeAccountHandler.java | 4 +- .../impl/records/TokenMintRecordBuilder.java | 37 +++ .../impl/util/TokenRelListCalculator.java | 15 +- .../validators/ApproveAllowanceValidator.java | 2 +- .../impl/validators/CustomFeesValidator.java | 5 +- .../validators/DeleteAllowanceValidator.java | 2 +- .../TokenSupplyChangeOpsValidator.java | 91 +++++++ .../ReadableTokenRelationStoreImplTest.java | 4 +- .../test/WritableTokenRelationStoreTest.java | 8 +- .../TokenAssociateToAccountHandlerTest.java | 48 ++-- ...TokenDissociateFromAccountHandlerTest.java | 21 +- .../TokenFreezeAccountHandlerTest.java | 7 +- .../TokenGrantKycToAccountHandlerTest.java | 5 +- .../handlers/TokenMintHandlerParityTest.java | 19 +- .../test/handlers/TokenMintHandlerTest.java | 246 ++++++++++++++++++ .../TokenRevokeKycFromAccountHandlerTest.java | 5 +- .../TokenUnfreezeAccountHandlerTest.java | 7 +- .../util/CryptoTokenHandlerTestBase.java | 75 ++++-- .../token/ReadableTokenRelationStore.java | 8 +- 32 files changed, 886 insertions(+), 158 deletions(-) create mode 100644 hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java create mode 100644 hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/records/TokenMintRecordBuilder.java create mode 100644 hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerTest.java diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/SingleTransactionRecordBuilder.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/SingleTransactionRecordBuilder.java index ce360fbfe512..df78e68254b9 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/SingleTransactionRecordBuilder.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/SingleTransactionRecordBuilder.java @@ -47,6 +47,7 @@ import com.hedera.node.app.service.consensus.impl.records.ConsensusSubmitMessageRecordBuilder; import com.hedera.node.app.service.file.impl.records.CreateFileRecordBuilder; import com.hedera.node.app.service.token.impl.records.CryptoCreateRecordBuilder; +import com.hedera.node.app.service.token.impl.records.TokenMintRecordBuilder; import com.hedera.node.app.service.util.impl.records.PrngRecordBuilder; import com.hedera.node.app.spi.HapiUtils; import com.hedera.node.app.spi.records.SingleTransactionRecord; @@ -69,7 +70,8 @@ public class SingleTransactionRecordBuilder ConsensusSubmitMessageRecordBuilder, CreateFileRecordBuilder, CryptoCreateRecordBuilder, - PrngRecordBuilder { + PrngRecordBuilder, + TokenMintRecordBuilder { // base transaction data private Transaction transaction; private Bytes transactionBytes; @@ -407,6 +409,15 @@ public SingleTransactionRecordBuilder serialNumbers(List serialNumbers) { return this; } + /** + * @deprecated this method is only used temporarily during the migration + */ + @Deprecated(forRemoval = true) + @Nullable + public List serialNumbers() { + return serialNumbers; + } + // ------------------------------------------------------------------------------------------------------------------------ // Sidecar data, booleans are the migration flag public SingleTransactionRecordBuilder addContractStateChanges( diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkGetAccountDetailsHandler.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkGetAccountDetailsHandler.java index 05d63768f6ed..fa9ef73e6f71 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkGetAccountDetailsHandler.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkGetAccountDetailsHandler.java @@ -34,7 +34,6 @@ import com.hedera.hapi.node.base.TokenKycStatus; import com.hedera.hapi.node.base.TokenRelationship; import com.hedera.hapi.node.state.token.Account; -import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.hapi.node.token.AccountDetails; import com.hedera.hapi.node.token.GetAccountDetailsQuery; import com.hedera.hapi.node.token.GetAccountDetailsResponse; @@ -199,17 +198,16 @@ private static List getTokenRelationships( int count = 0; while (tokenNum != 0 && count <= maxRelsPerInfoQuery) { - final Optional optionalTokenRelation = tokenRelationStore.get( + final var tokenRelation = tokenRelationStore.get( AccountID.newBuilder().accountNum(account.accountNumber()).build(), TokenID.newBuilder().tokenNum(tokenNum).build()); - if (optionalTokenRelation.isPresent()) { + if (tokenRelation != null) { final var tokenId = TokenID.newBuilder() .shardNum(StaticProperties.getShard()) .realmNum(StaticProperties.getRealm()) .tokenNum(tokenNum) .build(); final TokenMetadata token = readableTokenStore.getTokenMeta(tokenId); - final var tokenRelation = optionalTokenRelation.get(); if (token != null) { final TokenRelationship tokenRelationship = TokenRelationship.newBuilder() .tokenId(tokenId) diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableTokenRelationStoreImpl.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableTokenRelationStoreImpl.java index badf131b2334..60ac22da5149 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableTokenRelationStoreImpl.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableTokenRelationStoreImpl.java @@ -26,8 +26,7 @@ import com.hedera.node.app.spi.state.ReadableKVState; import com.hedera.node.app.spi.state.ReadableStates; import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.Objects; -import java.util.Optional; +import edu.umd.cs.findbugs.annotations.Nullable; /** * Provides read-only methods for getting underlying data for working with TokenRelations. @@ -51,15 +50,14 @@ public ReadableTokenRelationStoreImpl(@NonNull final ReadableStates states) { * {@inheritDoc} */ @Override - public Optional get(@NonNull final AccountID accountId, @NonNull final TokenID tokenId) { + @Nullable + public TokenRelation get(@NonNull final AccountID accountId, @NonNull final TokenID tokenId) { requireNonNull(accountId); requireNonNull(tokenId); - if (AccountID.DEFAULT.equals(accountId) || TokenID.DEFAULT.equals(tokenId)) return Optional.empty(); + if (AccountID.DEFAULT.equals(accountId) || TokenID.DEFAULT.equals(tokenId)) return null; - final var tokenRelation = Objects.requireNonNull(readableTokenRelState) - .get(EntityNumPair.fromLongs(accountId.accountNum(), tokenId.tokenNum())); - return Optional.ofNullable(tokenRelation); + return readableTokenRelState.get(EntityNumPair.fromLongs(accountId.accountNum(), tokenId.tokenNum())); } /** diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableTokenRelationStore.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableTokenRelationStore.java index e6e7dfb63997..8312e3eff1f8 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableTokenRelationStore.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableTokenRelationStore.java @@ -25,8 +25,8 @@ import com.hedera.node.app.spi.state.WritableKVState; import com.hedera.node.app.spi.state.WritableStates; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Objects; -import java.util.Optional; import java.util.Set; /** @@ -56,10 +56,9 @@ public WritableTokenRelationStore(@NonNull final WritableStates states) { * @param tokenRelation - the tokenRelation to be persisted */ public void put(@NonNull final TokenRelation tokenRelation) { - requireNonNull(tokenRelState) - .put( - EntityNumPair.fromLongs(tokenRelation.accountNumber(), tokenRelation.tokenNumber()), - Objects.requireNonNull(tokenRelation)); + tokenRelState.put( + EntityNumPair.fromLongs(tokenRelation.accountNumber(), tokenRelation.tokenNumber()), + Objects.requireNonNull(tokenRelation)); } /** @@ -68,8 +67,7 @@ public void put(@NonNull final TokenRelation tokenRelation) { * @param tokenRelation the {@code TokenRelation} to be removed */ public void remove(@NonNull final TokenRelation tokenRelation) { - requireNonNull(tokenRelState) - .remove(EntityNumPair.fromLongs(tokenRelation.accountNumber(), tokenRelation.tokenNumber())); + tokenRelState.remove(EntityNumPair.fromLongs(tokenRelation.accountNumber(), tokenRelation.tokenNumber())); } /** @@ -79,16 +77,14 @@ public void remove(@NonNull final TokenRelation tokenRelation) { * @param accountId - the number of the account to be retrieved * @param tokenId - the number of the token to be retrieved */ - @NonNull - public Optional getForModify(@NonNull final AccountID accountId, @NonNull final TokenID tokenId) { + @Nullable + public TokenRelation getForModify(@NonNull final AccountID accountId, @NonNull final TokenID tokenId) { requireNonNull(accountId); requireNonNull(tokenId); - if (AccountID.DEFAULT.equals(accountId) || TokenID.DEFAULT.equals(tokenId)) return Optional.empty(); + if (AccountID.DEFAULT.equals(accountId) || TokenID.DEFAULT.equals(tokenId)) return null; - final var token = Objects.requireNonNull(tokenRelState) - .getForModify(EntityNumPair.fromLongs(accountId.accountNum(), tokenId.tokenNum())); - return Optional.ofNullable(token); + return tokenRelState.getForModify(EntityNumPair.fromLongs(accountId.accountNum(), tokenId.tokenNum())); } /** diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseCryptoHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseCryptoHandler.java index 90b1c1fb6a9e..f8aee5a1f584 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseCryptoHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseCryptoHandler.java @@ -51,4 +51,9 @@ protected long getStakedId( throw new IllegalStateException("StakedIdOneOfType is not set"); } } + + @NonNull + public static AccountID asAccount(final long num) { + return AccountID.newBuilder().accountNum(num).build(); + } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java new file mode 100644 index 000000000000..4e110da0169a --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.*; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static java.util.Objects.requireNonNull; + +import com.hedera.hapi.node.base.ResponseCodeEnum; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenSupplyType; +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.state.token.TokenRelation; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; +import com.hedera.node.app.service.token.impl.WritableTokenStore; +import edu.umd.cs.findbugs.annotations.NonNull; + +public class BaseTokenHandler { + /** + * Mints fungible tokens. This method is called in both token create and mint. + * @param token the new or existing token to mint + * @param treasuryRel the treasury relation for the token + * @param amount the amount to mint + * @param isMintOnTokenCreation true if this is a mint on token creation + * @param accountStore the account store + * @param tokenStore the token store + * @param tokenRelationStore the token relation store + */ + protected void mintFungible( + @NonNull final Token token, + @NonNull final TokenRelation treasuryRel, + final long amount, + final boolean isMintOnTokenCreation, + @NonNull final WritableAccountStore accountStore, + @NonNull final WritableTokenStore tokenStore, + @NonNull final WritableTokenRelationStore tokenRelationStore) { + requireNonNull(token); + requireNonNull(treasuryRel); + + validateTrue(amount >= 0, INVALID_TOKEN_MINT_AMOUNT); + // validate token supply key exists for mint or burn. + // But this flag is not set when mint is called on token creation with initial supply. + // We don't need to check the supply key ONLY in that case + if (!isMintOnTokenCreation) { + validateTrue(token.supplyKey() != null, TOKEN_HAS_NO_SUPPLY_KEY); + } + changeSupply( + token, treasuryRel, +amount, INVALID_TOKEN_MINT_AMOUNT, accountStore, tokenStore, tokenRelationStore); + } + + /** + * Since token mint and token burn change the supply on the token and treasury account, + * this method is used to change the supply. + * @param token the token that is minted or burned + * @param treasuryRel the treasury relation for the token + * @param amount the amount to mint or burn + * @param invalidSupplyCode the invalid supply code to use if the supply is invalid + * @param accountStore the account store + * @param tokenStore the token store + * @param tokenRelationStore the token relation store + */ + protected void changeSupply( + @NonNull final Token token, + @NonNull final TokenRelation treasuryRel, + final long amount, + @NonNull final ResponseCodeEnum invalidSupplyCode, + @NonNull final WritableAccountStore accountStore, + @NonNull final WritableTokenStore tokenStore, + @NonNull final WritableTokenRelationStore tokenRelationStore) { + requireNonNull(token); + requireNonNull(treasuryRel); + requireNonNull(invalidSupplyCode); + + validateTrue( + treasuryRel.accountNumber() == token.treasuryAccountNumber() + && token.tokenNumber() == treasuryRel.tokenNumber(), + FAIL_INVALID); + final long newTotalSupply = token.totalSupply() + amount; + + // validate that the new total supply is not negative after mint or burn or wipe + // FUTURE - All these checks that return FAIL_INVALID probably should end up in a + // finalize method in token service to validate everything before we commit + validateTrue(newTotalSupply >= 0, invalidSupplyCode); + + if (token.supplyType() == TokenSupplyType.FINITE) { + validateTrue(token.maxSupply() >= newTotalSupply, TOKEN_MAX_SUPPLY_REACHED); + } + + final var treasuryAccount = accountStore.get(asAccount(treasuryRel.accountNumber())); + validateTrue(treasuryAccount != null, INVALID_TREASURY_ACCOUNT_FOR_TOKEN); + + final long newTreasuryBalance = treasuryRel.balance() + amount; + validateTrue(newTreasuryBalance >= 0, INSUFFICIENT_TOKEN_BALANCE); + + // copy the token, treasury account and treasury relation + final var copyTreasuryAccount = treasuryAccount.copyBuilder(); + final var copyToken = token.copyBuilder(); + final var copyTreasuryRel = treasuryRel.copyBuilder(); + + if (treasuryRel.balance() == 0 && amount > 0) { + // On an account positive balances are incremented for newly added tokens. + // If treasury relation did mint any for this token till now, only then increment + // total positive balances on treasury account. + copyTreasuryAccount.numberPositiveBalances(treasuryAccount.numberPositiveBalances() + 1); + } else if (newTreasuryBalance == 0 && amount < 0) { + // On an account positive balances are decremented for burning tokens completely. + // If treasury relation did not burn any for this token till now or if this burn makes the balance to 0, + // only then decrement total positive balances on treasury account. + copyTreasuryAccount.numberPositiveBalances(treasuryAccount.numberPositiveBalances() - 1); + } + + // since we have either minted or burned tokens, we need to update the total supply + copyToken.totalSupply(newTotalSupply); + copyTreasuryRel.balance(newTreasuryBalance); + + // put the changed token, treasury account and treasury relation + accountStore.put(copyTreasuryAccount.build()); + tokenStore.put(copyToken.build()); + tokenRelationStore.put(copyTreasuryRel.build()); + } + + @NonNull + public static TokenID asToken(final long num) { + return TokenID.newBuilder().tokenNum(num).build(); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountBalanceHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountBalanceHandler.java index 7a350e100ce4..d2d35b4fd4b3 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountBalanceHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountBalanceHandler.java @@ -47,7 +47,6 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ArrayList; import java.util.List; -import java.util.Optional; import javax.inject.Inject; import javax.inject.Singleton; @@ -134,7 +133,7 @@ private List getTokenBalances( final var ret = new ArrayList(); var tokenNum = account.headTokenNumber(); int count = 0; - Optional tokenRelation; + TokenRelation tokenRelation; Token token; // token from readableToken store by tokenID TokenID tokenID; // build from tokenNum AccountID accountID; // build from accountNumber @@ -144,17 +143,17 @@ private List getTokenBalances( AccountID.newBuilder().accountNum(account.accountNumber()).build(); tokenID = TokenID.newBuilder().tokenNum(tokenNum).build(); tokenRelation = tokenRelationStore.get(accountID, tokenID); - if (tokenRelation.isPresent()) { + if (tokenRelation != null) { token = readableTokenStore.get(tokenID); if (token != null) { tokenBalance = TokenBalance.newBuilder() .tokenId(TokenID.newBuilder().tokenNum(tokenNum).build()) - .balance(tokenRelation.get().balance()) + .balance(tokenRelation.balance()) .decimals(token.decimals()) .build(); ret.add(tokenBalance); } - tokenNum = tokenRelation.get().nextToken(); + tokenNum = tokenRelation.nextToken(); } else { break; } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAssociateToAccountHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAssociateToAccountHandler.java index 60c8b20be62f..567846bcb3a4 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAssociateToAccountHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAssociateToAccountHandler.java @@ -150,13 +150,9 @@ private void createAndLinkTokenRels( // tokenRels at the beginning of the list of existing token numbers first. We start by // retrieving the token rel object with the currentHeadTokenNum at the head of the // account - final var headTokenRel = tokenRelStore - .get( - AccountID.newBuilder() - .accountNum(account.accountNumber()) - .build(), - TokenID.newBuilder().tokenNum(currentHeadTokenNum).build()) - .orElse(null); + final var headTokenRel = tokenRelStore.get( + AccountID.newBuilder().accountNum(account.accountNumber()).build(), + TokenID.newBuilder().tokenNum(currentHeadTokenNum).build()); if (headTokenRel != null) { // Recreate the current head token's tokenRel, but with its previous pointer set to // the last of the new tokenRels. This links the new token rels to the rest of the @@ -250,7 +246,7 @@ private Validated validateSemantics( // Check that a token rel doesn't already exist for each new token ID for (final TokenID tokenId : tokenIds) { - final var existingTokenRel = tokenRelStore.get(accountId, tokenId).orElse(null); + final var existingTokenRel = tokenRelStore.get(accountId, tokenId); validateTrue(existingTokenRel == null, TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT); } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDissociateFromAccountHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDissociateFromAccountHandler.java index 15eaf33299c4..93b62ce9c223 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDissociateFromAccountHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDissociateFromAccountHandler.java @@ -214,7 +214,7 @@ private ValidatedResult validateSemantics( // Construct the dissociation for each token ID final var dissociations = new ArrayList(); for (final var tokenId : tokenIds) { - final var tokenRel = tokenRelStore.get(accountId, tokenId).orElse(null); + final var tokenRel = tokenRelStore.get(accountId, tokenId); validateTrue(tokenRel != null, TOKEN_NOT_ASSOCIATED_TO_ACCOUNT); // Here we check/retrieve a token that may not be "usable," but since we are dissociating token relations, @@ -226,8 +226,7 @@ private ValidatedResult validateSemantics( final var tokenTreasuryAcct = AccountID.newBuilder() .accountNum(possiblyUnusableToken.treasuryAccountNumber()) .build(); - dissociatedTokenTreasuryRel = - tokenRelStore.get(tokenTreasuryAcct, tokenId).orElse(null); + dissociatedTokenTreasuryRel = tokenRelStore.get(tokenTreasuryAcct, tokenId); } else { // If the token isn't found, assume the treasury token rel is null dissociatedTokenTreasuryRel = null; diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenFreezeAccountHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenFreezeAccountHandler.java index 9d6ac911b8c5..cc62f285a3c4 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenFreezeAccountHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenFreezeAccountHandler.java @@ -126,9 +126,9 @@ private TokenRelation validateSemantics( // Check that the token is associated to the account final var tokenRel = tokenRelStore.getForModify(accountId, tokenId); - validateTrue(tokenRel.isPresent(), TOKEN_NOT_ASSOCIATED_TO_ACCOUNT); + validateTrue(tokenRel != null, TOKEN_NOT_ASSOCIATED_TO_ACCOUNT); // Return the token relation - return tokenRel.get(); + return tokenRel; } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGrantKycToAccountHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGrantKycToAccountHandler.java index 71d936915bf4..2820fb3b571f 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGrantKycToAccountHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGrantKycToAccountHandler.java @@ -111,7 +111,7 @@ private TokenRelation validateSemantics( @NonNull final TokenID tokenId, @NonNull final WritableTokenRelationStore tokenRelStore) throws HandleException { - final var tokenRel = tokenRelStore.getForModify(accountId, tokenId).orElse(null); + final var tokenRel = tokenRelStore.getForModify(accountId, tokenId); validateTrue(tokenRel != null, INVALID_TOKEN_ID); return tokenRel; diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java index 2cf18752ef4d..73dcb06d90e0 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java @@ -16,18 +16,37 @@ package com.hedera.node.app.service.token.impl.handlers; -import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.*; +import static com.hedera.node.app.service.mono.state.merkle.internals.BitPackUtils.MAX_NUM_ALLOWED; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.spi.workflows.HandleException.validateFalse; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck; import static java.util.Objects.requireNonNull; -import com.hedera.hapi.node.base.HederaFunctionality; -import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.*; +import com.hedera.hapi.node.state.common.UniqueTokenId; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.hapi.node.state.token.Nft; +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.state.token.TokenRelation; +import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.service.token.ReadableTokenStore; +import com.hedera.node.app.service.token.impl.*; +import com.hedera.node.app.service.token.impl.records.TokenMintRecordBuilder; +import com.hedera.node.app.service.token.impl.validators.TokenSupplyChangeOpsValidator; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.PreHandleContext; import com.hedera.node.app.spi.workflows.TransactionHandler; +import com.hedera.node.config.data.TokensConfig; +import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; import javax.inject.Inject; import javax.inject.Singleton; @@ -36,16 +55,20 @@ * HederaFunctionality#TOKEN_MINT}. */ @Singleton -public class TokenMintHandler implements TransactionHandler { +public class TokenMintHandler extends BaseTokenHandler implements TransactionHandler { + private final TokenSupplyChangeOpsValidator validator; + @Inject - public TokenMintHandler() { - // Exists for injection + public TokenMintHandler(@NonNull final TokenSupplyChangeOpsValidator validator) { + this.validator = requireNonNull(validator); } @Override public void preHandle(@NonNull final PreHandleContext context) throws PreCheckException { requireNonNull(context); - final var op = context.body().tokenMintOrThrow(); + final var txn = context.body(); + pureChecks(txn); + final var op = txn.tokenMintOrThrow(); final var tokenStore = context.createStore(ReadableTokenStore.class); final var tokenMeta = tokenStore.getTokenMeta(op.tokenOrElse(TokenID.DEFAULT)); if (tokenMeta == null) throw new PreCheckException(INVALID_TOKEN_ID); @@ -54,8 +77,168 @@ public void preHandle(@NonNull final PreHandleContext context) throws PreCheckEx } } + @Override + public void pureChecks(@NonNull final TransactionBody txn) throws PreCheckException { + requireNonNull(txn); + final var op = txn.tokenMintOrThrow(); + validateTruePreCheck(op.hasToken(), INVALID_TOKEN_ID); + validateFalsePreCheck(!op.metadata().isEmpty() && op.amount() > 0, INVALID_TRANSACTION_BODY); + validateFalsePreCheck(op.amount() < 0, INVALID_TOKEN_MINT_AMOUNT); + } + @Override public void handle(@NonNull final HandleContext context) throws HandleException { - throw new UnsupportedOperationException("Not implemented"); + final var op = context.body().tokenMintOrThrow(); + final var tokenId = context.body().tokenMintOrThrow().tokenOrThrow(); + + validateSemantics(context); + + final var tokenStore = context.writableStore(WritableTokenStore.class); + final var tokenRelStore = context.writableStore(WritableTokenRelationStore.class); + final var accountStore = context.writableStore(WritableAccountStore.class); + // validate token exists + final var token = tokenStore.get(tokenId); + validateTrue(token != null, INVALID_TOKEN_ID); + // validate treasury relation exists + final var treasuryRel = tokenRelStore.get( + AccountID.newBuilder().accountNum(token.treasuryAccountNumber()).build(), tokenId); + validateTrue(treasuryRel != null, INVALID_TREASURY_ACCOUNT_FOR_TOKEN); + + if (token.tokenType() == TokenType.FUNGIBLE_COMMON) { + // we need to know if treasury mint while creation to ignore supply key exist or not. + mintFungible(token, treasuryRel, op.amount(), false, accountStore, tokenStore, tokenRelStore); + } else { + // get the config needed for validation + final var tokensConfig = context.configuration().getConfigData(TokensConfig.class); + final var maxAllowedMints = tokensConfig.nftsMaxAllowedMints(); + final var nftStore = context.writableStore(WritableNftStore.class); + // validate resources exist for minting nft + final var meta = op.metadata(); + validateTrue( + nftStore.sizeOfState() + meta.size() < maxAllowedMints, MAX_NFTS_IN_PRICE_REGIME_HAVE_BEEN_MINTED); + // mint nft + final var mintedSerials = mintNonFungible( + token, + treasuryRel, + meta, + context.consensusNow(), + accountStore, + tokenStore, + tokenRelStore, + nftStore); + final var recordBuilder = context.recordBuilder(TokenMintRecordBuilder.class); + + recordBuilder.serialNumbers(mintedSerials); + // TODO: Need to build transfer ownership from list to transfer NFT to treasury + // This should probably be done in finalize method on token service which constructs the + // transfer list looking at state + } + } + + /** + * Validates the semantics of the token mint transaction that involve state or config. + * @param context - the handle context of the token mint transaction + */ + private void validateSemantics(final HandleContext context) { + requireNonNull(context); + final var op = context.body().tokenMintOrThrow(); + validator.validate(op.amount(), op.metadata()); + } + + /** + * Minting nfts creates new instances of the given non-fungible token. Increments the + * serial number of the given base unique token, and increments total owned nfts of the + * non-fungible token. + * + * @param token + * @param treasuryRel - the treasury relation of the token + * @param metadata - the metadata of the nft to be minted + * @param consensusTime - the consensus time of the transaction + * @param accountStore - the account store + * @param tokenStore - the token store + * @param tokenRelStore - the token relation store + * @param nftStore - the nft store + */ + private List mintNonFungible( + final Token token, + @NonNull final TokenRelation treasuryRel, + @NonNull final List metadata, + @NonNull final Instant consensusTime, + @NonNull final WritableAccountStore accountStore, + @NonNull final WritableTokenStore tokenStore, + @NonNull final WritableTokenRelationStore tokenRelStore, + @NonNull final WritableNftStore nftStore) { + final var metadataCount = metadata.size(); + validateFalse(metadata.isEmpty(), INVALID_TOKEN_MINT_METADATA); + + // validate token number from treasury relation + final var tokenId = asToken(treasuryRel.tokenNumber()); + validateTrue(treasuryRel.tokenNumber() == token.tokenNumber(), FAIL_INVALID); + + // get the treasury account + final var treasuryAccount = accountStore.get(asAccount(treasuryRel.accountNumber())); + validateTrue(treasuryAccount != null, INVALID_TREASURY_ACCOUNT_FOR_TOKEN); + + // get the latest serial number minted for the token + var currentSerialNumber = token.lastUsedSerialNumber(); + validateTrue((currentSerialNumber + metadataCount) <= MAX_NUM_ALLOWED, SERIAL_NUMBER_LIMIT_REACHED); + + // Change the supply on token + changeSupply(token, treasuryRel, metadataCount, FAIL_INVALID, accountStore, tokenStore, tokenRelStore); + + final var mintedSerials = new ArrayList(metadata.size()); + + // for each serial number minted increment serial numbers and create new unique token + for (final var meta : metadata) { + currentSerialNumber++; + // The default sentinel account is used (0.0.0) to represent unique tokens owned by the treasury + final var uniqueToken = + buildNewlyMintedNft(treasuryAccount, consensusTime, tokenId, meta, currentSerialNumber); + nftStore.put(uniqueToken); + // all minted serials should be added to the receipt + mintedSerials.add(currentSerialNumber); + } + // Update last used serial number and number of owned nfts and put the updated token and treasury + // into the store + final var copyToken = token.copyBuilder(); + final var copyTreasury = treasuryAccount.copyBuilder(); + // Update Token and treasury + copyToken.lastUsedSerialNumber(currentSerialNumber); + copyTreasury.numberOwnedNfts(treasuryAccount.numberOwnedNfts() + metadataCount); + + tokenStore.put(copyToken.build()); + accountStore.put(copyTreasury.build()); + + return mintedSerials; + } + + /** + * Builds a new unique token when minting a non-fungible token. + * @param treasuryAccount - the treasury account + * @param consensusTime - the consensus time of the transaction + * @param tokenId - the token id + * @param meta - the metadata of the nft + * @param currentSerialNumber - the current serial number of the nft + * @return - the newly built nft + */ + @NonNull + private Nft buildNewlyMintedNft( + @NonNull final Account treasuryAccount, + @NonNull final Instant consensusTime, + @NonNull final TokenID tokenId, + @NonNull final Bytes meta, + final long currentSerialNumber) { + return Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(tokenId.tokenNum()) + .serialNumber(currentSerialNumber) + .build()) + .ownerNumber(0L) + .mintTime(Timestamp.newBuilder() + .seconds(consensusTime.getEpochSecond()) + .nanos(consensusTime.getNano()) + .build()) + .metadata(meta) + .build(); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenRevokeKycFromAccountHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenRevokeKycFromAccountHandler.java index ce7ba380f139..58622f05d1a2 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenRevokeKycFromAccountHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenRevokeKycFromAccountHandler.java @@ -119,8 +119,8 @@ private TokenRelation validateSemantics( @NonNull final WritableTokenRelationStore tokenRelStore) throws HandleException { final var tokenRel = tokenRelStore.getForModify(accountId, tokenId); - validateTrue(tokenRel.isPresent(), INVALID_TOKEN_ID); + validateTrue(tokenRel != null, INVALID_TOKEN_ID); - return tokenRel.get(); + return tokenRel; } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenUnfreezeAccountHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenUnfreezeAccountHandler.java index ede2d5451fcc..8579d882b959 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenUnfreezeAccountHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenUnfreezeAccountHandler.java @@ -125,9 +125,9 @@ private TokenRelation validateSemantics( // Check that the token is associated to the account final var tokenRel = tokenRelStore.getForModify(accountId, tokenId); - validateTrue(tokenRel.isPresent(), TOKEN_NOT_ASSOCIATED_TO_ACCOUNT); + validateTrue(tokenRel != null, TOKEN_NOT_ASSOCIATED_TO_ACCOUNT); // Return the token relation - return tokenRel.get(); + return tokenRel; } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/records/TokenMintRecordBuilder.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/records/TokenMintRecordBuilder.java new file mode 100644 index 000000000000..c797d73c7e84 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/records/TokenMintRecordBuilder.java @@ -0,0 +1,37 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.records; + +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; + +/** + * A {@code RecordBuilder} specialization for tracking the side effects of a {@code CryptoCreate} + * transaction. + */ +public interface TokenMintRecordBuilder { + /** + * Tracks creation of a new account by number. Even if someday we support creating multiple + * accounts within a smart contract call, we will still only need to track one created account + * per child record. + * + * @param serialNumbers the list of new serial numbers minted + * @return this builder + */ + @NonNull + TokenMintRecordBuilder serialNumbers(@NonNull List serialNumbers); +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenRelListCalculator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenRelListCalculator.java index aceefec40079..66dc3f40062e 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenRelListCalculator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenRelListCalculator.java @@ -216,9 +216,8 @@ private TokenRelation getInPriorityOrder( // Finally, if we haven't found the token rel already, we resort to the token relation store to retrieve it (if // it exists) - return tokenRelStore - .get(accountId, TokenID.newBuilder().tokenNum(tokenNumToLookup).build()) - .orElse(null); + return tokenRelStore.get( + accountId, TokenID.newBuilder().tokenNum(tokenNumToLookup).build()); } /** @@ -283,13 +282,9 @@ private long calculateHeadTokenAfterDeletions( do { currentWalkedTokenRel = updatedTokenRels.containsKey(currentTokenNum) ? updatedTokenRels.get(currentTokenNum) - : tokenRelStore - .get( - accountId, - TokenID.newBuilder() - .tokenNum(currentTokenNum) - .build()) - .orElse(null); + : tokenRelStore.get( + accountId, + TokenID.newBuilder().tokenNum(currentTokenNum).build()); if (currentWalkedTokenRel != null) { if (!tokenRelsToDeleteByTokenId.containsKey(currentWalkedTokenRel.tokenNumber())) { // we found the first existing token rel that is not in the list of token rels to delete diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/ApproveAllowanceValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/ApproveAllowanceValidator.java index eda67b7db02d..b58355217704 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/ApproveAllowanceValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/ApproveAllowanceValidator.java @@ -212,6 +212,6 @@ private void validateTokenBasics( && owner.accountNumber() == spender.accountNumOrThrow(), SPENDER_ACCOUNT_SAME_AS_OWNER); final var relation = tokenRelStore.get(ownerId, tokenId); - validateTrue(relation.isPresent(), TOKEN_NOT_ASSOCIATED_TO_ACCOUNT); + validateTrue(relation != null, TOKEN_NOT_ASSOCIATED_TO_ACCOUNT); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CustomFeesValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CustomFeesValidator.java index a2fe2d140454..ab922e74a94e 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CustomFeesValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CustomFeesValidator.java @@ -119,7 +119,7 @@ public void validateForFeeScheduleUpdate( final var tokenId = TokenID.newBuilder().tokenNum(token.tokenNumber()).build(); final var relation = tokenRelationStore.get(collectorId, tokenId); - validateTrue(relation.isPresent(), TOKEN_NOT_ASSOCIATED_TO_FEE_COLLECTOR); + validateTrue(relation != null, TOKEN_NOT_ASSOCIATED_TO_FEE_COLLECTOR); } case ROYALTY_FEE -> { // royalty fee can be only applied to non-fungible unique tokens @@ -157,8 +157,7 @@ private void validateExplicitTokenDenomination( final var denomToken = tokenStore.get(tokenNum); validateTrue(denomToken != null, INVALID_TOKEN_ID_IN_CUSTOM_FEES); validateTrue(isFungibleCommon(denomToken.tokenType()), CUSTOM_FEE_DENOMINATION_MUST_BE_FUNGIBLE_COMMON); - validateTrue( - tokenRelationStore.get(feeCollectorNum, tokenNum).isPresent(), TOKEN_NOT_ASSOCIATED_TO_FEE_COLLECTOR); + validateTrue(tokenRelationStore.get(feeCollectorNum, tokenNum) != null, TOKEN_NOT_ASSOCIATED_TO_FEE_COLLECTOR); } /** diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/DeleteAllowanceValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/DeleteAllowanceValidator.java index 33af4c3752d1..44319236244a 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/DeleteAllowanceValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/DeleteAllowanceValidator.java @@ -104,7 +104,7 @@ private void validateNftDeleteAllowances( .accountNum(effectiveOwner.accountNumber()) .build(), TokenID.newBuilder().tokenNum(token.tokenNumber()).build()); - validateTrue(relation.isPresent(), TOKEN_NOT_ASSOCIATED_TO_ACCOUNT); + validateTrue(relation != null, TOKEN_NOT_ASSOCIATED_TO_ACCOUNT); validateDeleteSerialNums(serialNums, tokenId, nftStore); } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java new file mode 100644 index 000000000000..89019eda52a9 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java @@ -0,0 +1,91 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.validators; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.*; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static java.util.Objects.requireNonNull; + +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.TokensConfig; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; +import javax.inject.Inject; + +/** + * This class contains validations to be done in handle for Token Mint and Token Burn operations + */ +public class TokenSupplyChangeOpsValidator { + private final ConfigProvider configProvider; + + @Inject + public TokenSupplyChangeOpsValidator(@NonNull final ConfigProvider configProvider) { + this.configProvider = requireNonNull(configProvider); + } + + /** + * Validate the token operations mint/wipe/burn given the attributes of the transaction. + * + * @param fungibleCount The number of fungible common token to mint/wipe/burn. + * @param metaDataList either metadata of the nfts being minted or serialNumber list of the + * burn/wipe operations. + */ + public void validate(final long fungibleCount, final List metaDataList) { + final var nftCount = metaDataList.size(); + final var tokensConfig = configProvider.getConfiguration().getConfigData(TokensConfig.class); + + // Get needed configurations + final var maxNftMintBatchSize = tokensConfig.nftsMaxBatchSizeMint(); + final var nftsAreEnabled = tokensConfig.nftsAreEnabled(); + final var maxNftMetadataBytes = tokensConfig.nftsMaxMetadataBytes(); + // validate nft count and fungible count are valid + validateCounts(nftCount, fungibleCount, nftsAreEnabled, maxNftMintBatchSize); + // validate metadata length if only nft count is set + if (fungibleCount <= 0 && nftCount > 0) { + validateMetaData(metaDataList, maxNftMetadataBytes); + } + } + + /** + * Validate the fungible amount and metadata size for token operations mint/burn. + * @param nftCount The number of nfts to mint/burn. + * @param fungibleCount The amount of fungible common token to mint/burn. + * @param nftsAreEnabled Whether nfts are enabled based on config. + * @param maxBatchSize The max batch size for nft mint based on config. + */ + private void validateCounts( + final int nftCount, final long fungibleCount, final boolean nftsAreEnabled, final long maxBatchSize) { + if (nftCount > 0) { + validateTrue(nftsAreEnabled, NOT_SUPPORTED); + } + if (fungibleCount <= 0 && nftCount > 0) { + validateTrue(nftCount <= maxBatchSize, BATCH_SIZE_LIMIT_EXCEEDED); + } + } + + /** + * Validate the metadata size for token operations mint. + * @param metaDataList The metadata list of the nfts to mint. + * @param maxNftMetadataBytes The max metadata size for nft mint based on config. + */ + private void validateMetaData(final List metaDataList, final int maxNftMetadataBytes) { + for (var bytes : metaDataList) { + validateTrue(bytes.toByteArray().length <= maxNftMetadataBytes, METADATA_TOO_LONG); + } + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableTokenRelationStoreImplTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableTokenRelationStoreImplTest.java index d20ec47aa211..28fa59cf5e5a 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableTokenRelationStoreImplTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableTokenRelationStoreImplTest.java @@ -75,7 +75,7 @@ void testGet() { given(tokenRelState.get(notNull())).willReturn(tokenRelation); final var result = subject.get(ACCOUNT_20_ID, TOKEN_10_ID); - Assertions.assertThat(result.orElseThrow()).isEqualTo(tokenRelation); + Assertions.assertThat(result).isEqualTo(tokenRelation); } @Test @@ -84,7 +84,7 @@ void testGetEmpty() { final var result = subject.get(ACCOUNT_20_ID, TokenID.newBuilder().tokenNum(-1L).build()); - Assertions.assertThat(result).isEmpty(); + Assertions.assertThat(result).isNull(); } @Test diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableTokenRelationStoreTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableTokenRelationStoreTest.java index 6e6ded7e0ecc..cf68bcddd0b9 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableTokenRelationStoreTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableTokenRelationStoreTest.java @@ -95,7 +95,7 @@ void testGet() { given(tokenRelState.get(EntityNumPair.fromLongs(ACCOUNT_20, TOKEN_10))).willReturn(tokenRelation); final var result = subject.get(ACCOUNT_20_ID, TOKEN_10_ID); - Assertions.assertThat(result.orElseThrow()).isEqualTo(tokenRelation); + Assertions.assertThat(result).isEqualTo(tokenRelation); } @Test @@ -104,7 +104,7 @@ void testGetEmpty() { final var result = subject.get(ACCOUNT_20_ID, TokenID.newBuilder().tokenNum(-1L).build()); - Assertions.assertThat(result).isEmpty(); + Assertions.assertThat(result).isNull(); } @Test @@ -114,7 +114,7 @@ void testGetForModify() { .willReturn(tokenRelation); final var result = subject.getForModify(ACCOUNT_20_ID, TOKEN_10_ID); - Assertions.assertThat(result.orElseThrow()).isEqualTo(tokenRelation); + Assertions.assertThat(result).isEqualTo(tokenRelation); } @Test @@ -124,7 +124,7 @@ void testGetForModifyEmpty() { final var result = subject.getForModify(AccountID.newBuilder().accountNum(-2L).build(), TOKEN_10_ID); - Assertions.assertThat(result).isEmpty(); + Assertions.assertThat(result).isNull(); } @Test diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAssociateToAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAssociateToAccountHandlerTest.java index 1fb2dea4ccc0..4c21fe06bdca 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAssociateToAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAssociateToAccountHandlerTest.java @@ -298,21 +298,16 @@ void tokensAssociateToAccountWithNoTokenRels() { final var headToken = TokenID.newBuilder() .tokenNum(writableAccountStore.getAccountById(newAcctId).headTokenNumber()) .build(); - final var headTokenRel = - writableTokenRelStore.get(newAcctId, headToken).get(); + final var headTokenRel = writableTokenRelStore.get(newAcctId, headToken); Assertions.assertThat(headTokenRel.frozen()).isFalse(); Assertions.assertThat(headTokenRel.kycGranted()).isFalse(); Assertions.assertThat(headTokenRel.previousToken()).isNotPositive(); Assertions.assertThat(headTokenRel.tokenNumber()) .isEqualTo(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY.getTokenNum()); Assertions.assertThat(headTokenRel.nextToken()).isEqualTo(KNOWN_TOKEN_WITH_WIPE.getTokenNum()); - final var nextToHeadTokenRel = writableTokenRelStore - .get( - newAcctId, - TokenID.newBuilder() - .tokenNum(headTokenRel.nextToken()) - .build()) - .get(); + final var nextToHeadTokenRel = writableTokenRelStore.get( + newAcctId, + TokenID.newBuilder().tokenNum(headTokenRel.nextToken()).build()); Assertions.assertThat(nextToHeadTokenRel.frozen()).isFalse(); Assertions.assertThat(nextToHeadTokenRel.kycGranted()).isFalse(); Assertions.assertThat(nextToHeadTokenRel.previousToken()) @@ -367,8 +362,7 @@ void tokensAssociateToAccountWithExistingTokenRels() { final var headTokenId = TokenID.newBuilder() .tokenNum(writableAccountStore.getAccountById(newAcctId).headTokenNumber()) .build(); - final var headTokenRel = - writableTokenRelStore.get(newAcctId, headTokenId).get(); + final var headTokenRel = writableTokenRelStore.get(newAcctId, headTokenId); Assertions.assertThat(headTokenRel.previousToken()).isNotPositive(); Assertions.assertThat(headTokenRel.tokenNumber()).isEqualTo(KNOWN_TOKEN_WITH_FREEZE.getTokenNum()); Assertions.assertThat(headTokenRel.nextToken()).isEqualTo(KNOWN_TOKEN_WITH_KYC.getTokenNum()); @@ -376,13 +370,9 @@ void tokensAssociateToAccountWithExistingTokenRels() { Assertions.assertThat(headTokenRel.kycGranted()).isFalse(); Assertions.assertThat(headTokenRel.automaticAssociation()).isFalse(); - final var nextToHeadTokenRel = writableTokenRelStore - .get( - newAcctId, - TokenID.newBuilder() - .tokenNum(headTokenRel.nextToken()) - .build()) - .get(); + final var nextToHeadTokenRel = writableTokenRelStore.get( + newAcctId, + TokenID.newBuilder().tokenNum(headTokenRel.nextToken()).build()); Assertions.assertThat(nextToHeadTokenRel.previousToken()).isEqualTo(KNOWN_TOKEN_WITH_FREEZE.getTokenNum()); Assertions.assertThat(nextToHeadTokenRel.tokenNumber()).isEqualTo(KNOWN_TOKEN_WITH_KYC.getTokenNum()); Assertions.assertThat(nextToHeadTokenRel.nextToken()).isEqualTo(KNOWN_TOKEN_WITH_WIPE.getTokenNum()); @@ -392,13 +382,11 @@ void tokensAssociateToAccountWithExistingTokenRels() { Assertions.assertThat(nextToHeadTokenRel.kycGranted()).isFalse(); Assertions.assertThat(nextToHeadTokenRel.automaticAssociation()).isFalse(); - final var thirdTokenRel = writableTokenRelStore - .get( - newAcctId, - TokenID.newBuilder() - .tokenNum(nextToHeadTokenRel.nextToken()) - .build()) - .get(); + final var thirdTokenRel = writableTokenRelStore.get( + newAcctId, + TokenID.newBuilder() + .tokenNum(nextToHeadTokenRel.nextToken()) + .build()); Assertions.assertThat(thirdTokenRel.previousToken()).isEqualTo(KNOWN_TOKEN_WITH_KYC.getTokenNum()); Assertions.assertThat(thirdTokenRel.tokenNumber()).isEqualTo(KNOWN_TOKEN_WITH_WIPE.getTokenNum()); Assertions.assertThat(thirdTokenRel.nextToken()).isEqualTo(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY.getTokenNum()); @@ -406,13 +394,9 @@ void tokensAssociateToAccountWithExistingTokenRels() { Assertions.assertThat(thirdTokenRel.kycGranted()).isFalse(); Assertions.assertThat(thirdTokenRel.automaticAssociation()).isFalse(); - final var fourthTokenRel = writableTokenRelStore - .get( - newAcctId, - TokenID.newBuilder() - .tokenNum(thirdTokenRel.nextToken()) - .build()) - .get(); + final var fourthTokenRel = writableTokenRelStore.get( + newAcctId, + TokenID.newBuilder().tokenNum(thirdTokenRel.nextToken()).build()); Assertions.assertThat(fourthTokenRel.previousToken()).isEqualTo(KNOWN_TOKEN_WITH_WIPE.getTokenNum()); Assertions.assertThat(fourthTokenRel.tokenNumber()) .isEqualTo(KNOWN_TOKEN_WITH_FEE_SCHEDULE_KEY.getTokenNum()); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java index 50ff3ad39bcb..bbd58d77a4d0 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java @@ -375,8 +375,7 @@ void tokenRelForDeletedTokenIsRemoved() { // Since the account had no positive balances, its number of positive balances should still be zero Assertions.assertThat(savedAcct.numberPositiveBalances()).isZero(); // Verify that the token rel was removed - final var supposedlyDeletedTokenRel = - writableTokenRelStore.get(ACCOUNT_1339, TOKEN_555_ID).orElse(null); + final var supposedlyDeletedTokenRel = writableTokenRelStore.get(ACCOUNT_1339, TOKEN_555_ID); Assertions.assertThat(supposedlyDeletedTokenRel).isNull(); } @@ -420,8 +419,7 @@ void tokenRelForNonexistingTokenIsRemoved() { // Since the account had no positive balances, its number of positive balances should still be zero Assertions.assertThat(savedAcct.numberPositiveBalances()).isZero(); // Verify that the token rel was removed - final var supposedlyDeletedTokenRel = - writableTokenRelStore.get(ACCOUNT_1339, TOKEN_555_ID).orElse(null); + final var supposedlyDeletedTokenRel = writableTokenRelStore.get(ACCOUNT_1339, TOKEN_555_ID); Assertions.assertThat(supposedlyDeletedTokenRel).isNull(); } @@ -498,13 +496,11 @@ void tokenRelAndTreasuryTokenRelAreUpdatedForFungible() { Assertions.assertThat(treasuryAcct.headTokenNumber()).isEqualTo(TOKEN_555_ID.tokenNum()); // Verify that the token rel with account 1339 was removed - final var supposedlyDeletedTokenRel = - writableTokenRelStore.get(ACCOUNT_1339, TOKEN_555_ID).orElse(null); + final var supposedlyDeletedTokenRel = writableTokenRelStore.get(ACCOUNT_1339, TOKEN_555_ID); Assertions.assertThat(supposedlyDeletedTokenRel).isNull(); // Verify that the token rel with the treasury account was updated - final var treasuryTokenRel = - writableTokenRelStore.get(ACCOUNT_2020, TOKEN_555_ID).orElse(null); + final var treasuryTokenRel = writableTokenRelStore.get(ACCOUNT_2020, TOKEN_555_ID); Assertions.assertThat(treasuryTokenRel).isNotNull(); // Verify that the treasury balance is now equal to its supply Assertions.assertThat(treasuryTokenRel.balance()).isEqualTo(totalSupply); @@ -583,14 +579,11 @@ void multipleTokenRelsAreRemoved() { Assertions.assertThat(savedAcct.numberPositiveBalances()).isZero(); // Verify that the token rels were removed - final var token444Rel = - writableTokenRelStore.get(ACCOUNT_1339, token444Id).orElse(null); + final var token444Rel = writableTokenRelStore.get(ACCOUNT_1339, token444Id); Assertions.assertThat(token444Rel).isNull(); - final var token555Rel = - writableTokenRelStore.get(ACCOUNT_1339, TOKEN_555_ID).orElse(null); + final var token555Rel = writableTokenRelStore.get(ACCOUNT_1339, TOKEN_555_ID); Assertions.assertThat(token555Rel).isNull(); - final var token666Rel = - writableTokenRelStore.get(ACCOUNT_1339, TOKEN_666_ID).orElse(null); + final var token666Rel = writableTokenRelStore.get(ACCOUNT_1339, TOKEN_666_ID); Assertions.assertThat(token666Rel).isNull(); } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenFreezeAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenFreezeAccountHandlerTest.java index a91a645e146c..fea9514b60e5 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenFreezeAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenFreezeAccountHandlerTest.java @@ -56,7 +56,6 @@ import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; -import java.util.Optional; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; @@ -228,7 +227,7 @@ void tokenRelNotFound() throws HandleException { given(readableAccountStore.getAccountById(ACCOUNT_13257)) .willReturn( Account.newBuilder().accountNumber(accountNumber).build()); - given(tokenRelStore.getForModify(ACCOUNT_13257, token)).willReturn(Optional.empty()); + given(tokenRelStore.getForModify(ACCOUNT_13257, token)).willReturn(null); final var txn = newFreezeTxn(token); given(context.body()).willReturn(txn); @@ -247,10 +246,10 @@ void tokenRelFreezeSuccessful() { .willReturn( Account.newBuilder().accountNumber(accountNumber).build()); given(tokenRelStore.getForModify(ACCOUNT_13257, token)) - .willReturn(Optional.of(TokenRelation.newBuilder() + .willReturn(TokenRelation.newBuilder() .tokenNumber(token.tokenNum()) .accountNumber(accountNumber) - .build())); + .build()); final var txn = newFreezeTxn(token); given(context.body()).willReturn(txn); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGrantKycToAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGrantKycToAccountHandlerTest.java index e859f462d0fa..ff915b4fd845 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGrantKycToAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGrantKycToAccountHandlerTest.java @@ -55,7 +55,6 @@ import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; import java.util.Collections; -import java.util.Optional; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; @@ -204,7 +203,7 @@ void nullAccountIdThrowsException() { @Test @DisplayName("When getForModify returns empty, should not put or commit") void emptyGetForModifyShouldNotPersist() { - given(tokenRelStore.getForModify(notNull(), notNull())).willReturn(Optional.empty()); + given(tokenRelStore.getForModify(notNull(), notNull())).willReturn(null); final var txnBody = newTxnBody(true, true); given(handleContext.body()).willReturn(txnBody); @@ -220,7 +219,7 @@ void emptyGetForModifyShouldNotPersist() { void kycGrantedAndPersisted() { final var stateTokenRel = newTokenRelationBuilder().kycGranted(false).build(); - given(tokenRelStore.getForModify(payerId, tokenId)).willReturn(Optional.of(stateTokenRel)); + given(tokenRelStore.getForModify(payerId, tokenId)).willReturn(stateTokenRel); final var txnBody = newTxnBody(true, true); given(handleContext.body()).willReturn(txnBody); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerParityTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerParityTest.java index 55bb9f767b97..2eecb284bd10 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerParityTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerParityTest.java @@ -30,12 +30,29 @@ import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.handlers.TokenMintHandler; import com.hedera.node.app.service.token.impl.test.handlers.util.ParityTestBase; +import com.hedera.node.app.service.token.impl.validators.TokenSupplyChangeOpsValidator; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.config.ConfigProvider; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +@ExtendWith(MockitoExtension.class) class TokenMintHandlerParityTest extends ParityTestBase { - private final TokenMintHandler subject = new TokenMintHandler(); + @Mock + private ConfigProvider configProvider; + + private TokenSupplyChangeOpsValidator validator; + private TokenMintHandler subject; + + @BeforeEach + void setup() { + validator = new TokenSupplyChangeOpsValidator(configProvider); + subject = new TokenMintHandler(validator); + } @Test void tokenMintWithSupplyKeyedTokenScenario() throws PreCheckException { diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerTest.java new file mode 100644 index 000000000000..8d3e5f627bf8 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerTest.java @@ -0,0 +1,246 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.*; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatNoException; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; +import static org.mockito.BDDMockito.given; + +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TransactionID; +import com.hedera.hapi.node.token.TokenMintTransactionBody; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.config.VersionedConfigImpl; +import com.hedera.node.app.records.SingleTransactionRecordBuilder; +import com.hedera.node.app.service.token.ReadableTokenStore; +import com.hedera.node.app.service.token.impl.handlers.TokenMintHandler; +import com.hedera.node.app.service.token.impl.records.TokenMintRecordBuilder; +import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoTokenHandlerTestBase; +import com.hedera.node.app.service.token.impl.validators.TokenSupplyChangeOpsValidator; +import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import java.time.Instant; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; + +class TokenMintHandlerTest extends CryptoTokenHandlerTestBase { + @Mock(strictness = Mock.Strictness.LENIENT) + private ConfigProvider configProvider; + + @Mock(strictness = Mock.Strictness.LENIENT) + private HandleContext handleContext; + + private final Bytes metadata1 = Bytes.wrap("memo".getBytes()); + private final Bytes metadata2 = Bytes.wrap("memo2".getBytes()); + private final Instant consensusNow = Instant.ofEpochSecond(1_234_567L); + private SingleTransactionRecordBuilder recordBuilder; + private TokenMintHandler subject; + + @BeforeEach + public void setUp() { + super.setUp(); + refreshWritableStores(); + givenStoresAndConfig(configProvider, handleContext); + subject = new TokenMintHandler(new TokenSupplyChangeOpsValidator(configProvider)); + recordBuilder = new SingleTransactionRecordBuilder(consensusNow); + } + + @Test + void rejectsNftMintsWhenNftsNotEnabled() { + givenMintTxn(nonFungibleTokenId, List.of(metadata1, metadata2), null); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.areEnabled", false) + .getOrCreateConfig(); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + given(handleContext.configuration()).willReturn(configuration); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void acceptsValidFungibleTokenMintTxn() { + givenMintTxn(fungibleTokenId, null, 10L); + + assertThat(writableTokenRelStore.get(treasuryId, fungibleTokenId).balance()) + .isEqualTo(1000L); + assertThat(writableAccountStore.get(treasuryId).tinybarBalance()).isEqualTo(10000L); + assertThat(writableAccountStore.get(treasuryId).numberPositiveBalances()) + .isEqualTo(2); + assertThat(writableTokenStore.get(fungibleTokenId).totalSupply()).isEqualTo(1000L); + + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + + // treasury relation balance will increase + assertThat(writableTokenRelStore.get(treasuryId, fungibleTokenId).balance()) + .isEqualTo(1010L); + // tinybar balance should not get affected + assertThat(writableAccountStore.get(treasuryId).tinybarBalance()).isEqualTo(10000L); + + // since there are positive balances for this token relation already, it will not be increased. + assertThat(writableAccountStore.get(treasuryId).numberPositiveBalances()) + .isEqualTo(2); + // supply of fungible token increases + assertThat(writableTokenStore.get(fungibleTokenId).totalSupply()).isEqualTo(1010L); + } + + @Test + void acceptsValidNonFungibleTokenMintTxn() { + givenMintTxn(nonFungibleTokenId, List.of(metadata1, metadata2), null); + + assertThat(writableTokenRelStore.get(treasuryId, nonFungibleTokenId).balance()) + .isEqualTo(1000L); + assertThat(writableAccountStore.get(treasuryId).tinybarBalance()).isEqualTo(10000L); + assertThat(writableAccountStore.get(treasuryId).numberOwnedNfts()).isEqualTo(2); + assertThat(writableTokenStore.get(nonFungibleTokenId).totalSupply()).isEqualTo(1000L); + assertThat(recordBuilder.serialNumbers()).isNull(); + + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + + // treasury relation balance will increase by metadata list size + assertThat(writableTokenRelStore.get(treasuryId, nonFungibleTokenId).balance()) + .isEqualTo(1002L); + // tinybar balance should not get affected + assertThat(writableAccountStore.get(treasuryId).tinybarBalance()).isEqualTo(10000L); + + // number of owned NFTs should increase + assertThat(writableAccountStore.get(treasuryId).numberOwnedNfts()).isEqualTo(4); + // treasury relation supply will not increase since its not fungible token change + assertThat(writableTokenStore.get(nonFungibleTokenId).totalSupply()).isEqualTo(1000L); + assertThat(recordBuilder.serialNumbers()).isEqualTo(List.of(1L, 2L)); + } + + @Test + void failsOnMissingToken() { + givenMintTxn(TokenID.newBuilder().tokenNum(100000L).build(), List.of(metadata1, metadata2), null); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_TOKEN_ID)); + } + + @Test + void failsOnNegativeAmount() { + givenMintTxn(fungibleTokenId, null, -2L); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_TOKEN_MINT_AMOUNT)); + } + + @Test + void allowsZeroAmount() { + givenMintTxn(fungibleTokenId, null, 0L); + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + } + + @Test + void rejectsBothAMountAndMetadataFields() throws PreCheckException { + final var txn = givenMintTxn(fungibleTokenId, List.of(metadata1), 2L); + final var context = new FakePreHandleContext(readableAccountStore, txn); + assertThatThrownBy(() -> subject.preHandle(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TRANSACTION_BODY)); + } + + @Test + void allowsTxnBodyWithNoProps() throws PreCheckException { + final var txn = givenMintTxn(fungibleTokenId, null, null); + refreshReadableStores(); + final var context = new FakePreHandleContext(readableAccountStore, txn); + context.registerStore(ReadableTokenStore.class, readableTokenStore); + + assertThatNoException().isThrownBy(() -> subject.preHandle(context)); + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + } + + @Test + void propagatesErrorOnBadMetadata() { + givenMintTxn(nonFungibleTokenId, List.of(Bytes.wrap("test".getBytes())), null); + + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.maxMetadataBytes", 1) + .getOrCreateConfig(); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + given(handleContext.configuration()).willReturn(configuration); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(METADATA_TOO_LONG)); + } + + @Test + void propagatesErrorOnMaxBatchSizeReached() { + givenMintTxn(nonFungibleTokenId, List.of(metadata1, metadata2), null); + + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.maxBatchSizeMint", 1) + .getOrCreateConfig(); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + given(handleContext.configuration()).willReturn(configuration); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(BATCH_SIZE_LIMIT_EXCEEDED)); + } + + @Test + void validatesMintingResourcesLimit() { + givenMintTxn(nonFungibleTokenId, List.of(Bytes.wrap("test".getBytes()), Bytes.wrap("test1".getBytes())), null); + + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.maxAllowedMints", 1) + .getOrCreateConfig(); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + given(handleContext.configuration()).willReturn(configuration); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(MAX_NFTS_IN_PRICE_REGIME_HAVE_BEEN_MINTED)); + } + + private TransactionBody givenMintTxn(final TokenID tokenId, final List metadata, final Long amount) { + final var transactionID = + TransactionID.newBuilder().accountID(payerId).transactionValidStart(consensusTimestamp); + final var builder = TokenMintTransactionBody.newBuilder().token(tokenId); + if (metadata != null) { + builder.metadata(metadata); + } + if (amount != null) { + builder.amount(amount); + } + final var txn = TransactionBody.newBuilder() + .transactionID(transactionID) + .tokenMint(builder.build()) + .build(); + + given(handleContext.body()).willReturn(txn); + given(handleContext.consensusNow()).willReturn(Instant.ofEpochSecond(1_234_567L)); + given(handleContext.recordBuilder(TokenMintRecordBuilder.class)).willReturn(recordBuilder); + + return txn; + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenRevokeKycFromAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenRevokeKycFromAccountHandlerTest.java index 3f89c1febb1a..b23b8987ffac 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenRevokeKycFromAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenRevokeKycFromAccountHandlerTest.java @@ -60,7 +60,6 @@ import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; -import java.util.Optional; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; @@ -197,7 +196,7 @@ void nullTokenRevokeKycThrowsException() { @Test @DisplayName("When getForModify returns empty, should not put or commit") void emptyGetForModifyShouldNotPersist() { - given(tokenRelStore.getForModify(notNull(), notNull())).willReturn(Optional.empty()); + given(tokenRelStore.getForModify(notNull(), notNull())).willReturn(null); final var txnBody = newTxnBody(); given(handleContext.body()).willReturn(txnBody); @@ -217,7 +216,7 @@ void kycRevokedAndPersisted() { .accountNumber(ACCOUNT_100.accountNumOrThrow()) .kycGranted(true) .build(); - given(tokenRelStore.getForModify(ACCOUNT_100, TOKEN_10)).willReturn(Optional.of(stateTokenRel)); + given(tokenRelStore.getForModify(ACCOUNT_100, TOKEN_10)).willReturn(stateTokenRel); final var txnBody = newTxnBody(); given(handleContext.body()).willReturn(txnBody); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUnfreezeAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUnfreezeAccountHandlerTest.java index 28803a2c2f68..af3e40b3324a 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUnfreezeAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUnfreezeAccountHandlerTest.java @@ -61,7 +61,6 @@ import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; -import java.util.Optional; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; @@ -222,7 +221,7 @@ void tokenRelNotFound() throws HandleException { given(accountStore.getAccountById(ACCOUNT_13257)) .willReturn( Account.newBuilder().accountNumber(accountNumber).build()); - given(tokenRelStore.getForModify(ACCOUNT_13257, token)).willReturn(Optional.empty()); + given(tokenRelStore.getForModify(ACCOUNT_13257, token)).willReturn(null); final var txn = newUnfreezeTxn(token); given(context.body()).willReturn(txn); @@ -241,10 +240,10 @@ void tokenRelUnfreezeSuccessful() { .willReturn( Account.newBuilder().accountNumber(accountNumber).build()); given(tokenRelStore.getForModify(ACCOUNT_13257, token)) - .willReturn(Optional.of(TokenRelation.newBuilder() + .willReturn(TokenRelation.newBuilder() .tokenNumber(token.tokenNum()) .accountNumber(accountNumber) - .build())); + .build()); final var txn = newUnfreezeTxn(token); given(context.body()).willReturn(txn); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java index 5347efbe71cb..9d262482098a 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java @@ -96,6 +96,7 @@ public class CryptoTokenHandlerTestBase extends StateBuilderUtil { protected final Key feeScheduleKey = A_COMPLEX_KEY; protected final Key supplyKey = A_COMPLEX_KEY; protected final Key freezeKey = A_COMPLEX_KEY; + protected final Key treasuryKey = C_COMPLEX_KEY; /* ---------- Account IDs */ protected final AccountID payerId = AccountID.newBuilder().accountNum(3).build(); @@ -108,7 +109,7 @@ public class CryptoTokenHandlerTestBase extends StateBuilderUtil { protected final AccountID ownerId = AccountID.newBuilder().accountNum(123456).build(); protected final AccountID treasuryId = - AccountID.newBuilder().accountNum(100).build(); + AccountID.newBuilder().accountNum(1000000).build(); protected final AccountID autoRenewId = AccountID.newBuilder().accountNum(4).build(); protected final AccountID spenderId = AccountID.newBuilder().accountNum(12345).build(); @@ -140,6 +141,11 @@ public class CryptoTokenHandlerTestBase extends StateBuilderUtil { EntityNumPair.fromLongs(ownerId.accountNum(), fungibleTokenNum.longValue()); protected final EntityNumPair ownerNFTPair = EntityNumPair.fromLongs(ownerId.accountNum(), nonFungibleTokenNum.longValue()); + + protected final EntityNumPair treasuryFTPair = + EntityNumPair.fromLongs(treasuryId.accountNum(), fungibleTokenNum.longValue()); + protected final EntityNumPair treasuryNFTPair = + EntityNumPair.fromLongs(treasuryId.accountNum(), nonFungibleTokenNum.longValue()); protected final UniqueTokenId uniqueTokenIdSl1 = UniqueTokenId.newBuilder() .tokenTypeNumber(nonFungibleTokenId.tokenNum()) .serialNumber(1L) @@ -194,7 +200,7 @@ public class CryptoTokenHandlerTestBase extends StateBuilderUtil { .build(); protected List customFees = List.of(withFixedFee(fixedFee), withFractionalFee(fractionalFee)); - /* ---------- Misc */ + /* ---------- Misc ---------- */ protected final Timestamp consensusTimestamp = Timestamp.newBuilder().seconds(1_234_567L).build(); protected final String tokenName = "test token"; @@ -203,7 +209,7 @@ public class CryptoTokenHandlerTestBase extends StateBuilderUtil { protected final long expirationTime = 1_234_567L; protected final long autoRenewSecs = 100L; protected static final long payerBalance = 10_000L; - /* ---------- States */ + /* ---------- States ---------- */ protected MapReadableKVState readableAliases; protected MapReadableKVState readableAccounts; protected MapWritableKVState writableAliases; @@ -226,27 +232,32 @@ public class CryptoTokenHandlerTestBase extends StateBuilderUtil { protected WritableTokenRelationStore writableTokenRelStore; protected ReadableNftStore readableNftStore; protected WritableNftStore writableNftStore; - /* ---------- Tokens */ + /* ---------- Tokens ---------- */ protected Token fungibleToken; protected Token nonFungibleToken; protected Nft nftSl1; protected Nft nftSl2; - /* ---------- Token Relations */ + /* ---------- Token Relations ---------- */ protected TokenRelation fungibleTokenRelation; protected TokenRelation nonFungibleTokenRelation; protected TokenRelation ownerFTRelation; protected TokenRelation ownerNFTRelation; - /* ---------- Accounts */ + protected TokenRelation treasuryFTRelation; + protected TokenRelation treasuryNFTRelation; + + /* ---------- Accounts ---------- */ protected Account account; protected Account deleteAccount; protected Account transferAccount; protected Account ownerAccount; protected Account spenderAccount; protected Account delegatingSpenderAccount; + protected Account treasuryAccount; private Map accountsMap; - private Map aliasesMap; + private Map aliasesMap; private Map tokensMap; + private Map tokenRelsMap; @Mock protected ReadableStates readableStates; @@ -274,12 +285,21 @@ private void setUpAllEntities() { accountsMap.put(ownerId, ownerAccount); accountsMap.put(delegatingSpenderId, delegatingSpenderAccount); accountsMap.put(spenderId, spenderAccount); + accountsMap.put(treasuryId, treasuryAccount); tokensMap = new HashMap<>(); tokensMap.put(fungibleTokenNum, fungibleToken); tokensMap.put(nonFungibleTokenNum, nonFungibleToken); aliasesMap = new HashMap<>(); + + tokenRelsMap = new HashMap<>(); + tokenRelsMap.put(fungiblePair, fungibleTokenRelation); + tokenRelsMap.put(nonFungiblePair, nonFungibleTokenRelation); + tokenRelsMap.put(ownerFTPair, ownerFTRelation); + tokenRelsMap.put(ownerNFTPair, ownerNFTRelation); + tokenRelsMap.put(treasuryFTPair, treasuryFTRelation); + tokenRelsMap.put(treasuryNFTPair, treasuryNFTRelation); } protected void basicMetaAssertions(final PreHandleContext context, final int keysSize) { @@ -343,21 +363,13 @@ private void givenTokensInWritableStore() { } private void givenReadableTokenRelsStore() { - readableTokenRelState = emptyReadableTokenRelsStateBuilder() - .value(fungiblePair, fungibleTokenRelation) - .value(nonFungiblePair, nonFungibleTokenRelation) - .value(ownerFTPair, ownerFTRelation) - .value(ownerNFTPair, ownerNFTRelation) - .build(); + readableTokenRelState = readableTokenRelState(); given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates); } private void givenWritableTokenRelsStore() { - writableTokenRelState = emptyWritableTokenRelsStateBuilder() - .value(fungiblePair, fungibleTokenRelation) - .value(nonFungiblePair, nonFungibleTokenRelation) - .build(); + writableTokenRelState = writableTokenRelState(); given(writableStates.get(TOKEN_RELS)).willReturn(writableTokenRelState); writableTokenRelStore = new WritableTokenRelationStore(writableStates); } @@ -398,6 +410,22 @@ protected MapReadableKVState readableAccountState() { return builder.build(); } + private MapWritableKVState writableTokenRelState() { + final var builder = emptyWritableTokenRelsStateBuilder(); + for (final var entry : tokenRelsMap.entrySet()) { + builder.value(entry.getKey(), entry.getValue()); + } + return builder.build(); + } + + private MapReadableKVState readableTokenRelState() { + final var builder = emptyReadableTokenRelsStateBuilder(); + for (final var entry : tokenRelsMap.entrySet()) { + builder.value(entry.getKey(), entry.getValue()); + } + return builder.build(); + } + @NonNull protected MapWritableKVState writableAliasesState() { return emptyWritableAliasStateBuilder() @@ -443,6 +471,14 @@ private void givenValidTokenRelations() { .copyBuilder() .accountNumber(ownerId.accountNum()) .build(); + treasuryFTRelation = givenFungibleTokenRelation() + .copyBuilder() + .accountNumber(treasuryId.accountNum()) + .build(); + treasuryNFTRelation = givenNonFungibleTokenRelation() + .copyBuilder() + .accountNumber(treasuryId.accountNum()) + .build(); } private void givenValidTokens() { @@ -481,6 +517,11 @@ private void givenValidAccounts() { .copyBuilder() .accountNumber(delegatingSpenderId.accountNum()) .build(); + treasuryAccount = givenValidAccount() + .copyBuilder() + .accountNumber(treasuryId.accountNum()) + .key(treasuryKey) + .build(); } protected Token givenValidFungibleToken() { diff --git a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableTokenRelationStore.java b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableTokenRelationStore.java index 225bf6fefd7c..0e8f4e00d98b 100644 --- a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableTokenRelationStore.java +++ b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableTokenRelationStore.java @@ -20,7 +20,7 @@ import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.state.token.TokenRelation; import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.Optional; +import edu.umd.cs.findbugs.annotations.Nullable; /** * Provides read-only methods for getting underlying data for working with TokenRelations. @@ -29,12 +29,14 @@ */ public interface ReadableTokenRelationStore { /** - * Returns the {@link TokenRelation} with the given IDs. If no such token relation exists, returns {@code Optional.empty()} + * Returns the {@link TokenRelation} with the given IDs. If no such token relation exists, + * returns {@code null} * * @param accountId - the id of the account in the token-relation to be retrieved * @param tokenId - the id of the token in the token-relation to be retrieved */ - Optional get(@NonNull final AccountID accountId, @NonNull final TokenID tokenId); + @Nullable + TokenRelation get(@NonNull final AccountID accountId, @NonNull final TokenID tokenId); /** * Returns the number of tokens in the state. From 7fd1b5b5bb45979c410a06fdb822967e008abe36 Mon Sep 17 00:00:00 2001 From: Kim Rader Date: Mon, 12 Jun 2023 07:37:14 -0700 Subject: [PATCH 09/70] Unsupported token queries (#7040) Signed-off-by: Kim Rader Signed-off-by: Lev Povolotsky Co-authored-by: Lev Povolotsky --- .../TokenGetAccountNftInfosHandler.java | 9 +- .../impl/handlers/TokenGetNftInfoHandler.java | 2 +- .../handlers/TokenGetNftInfosHandler.java | 8 +- .../TokenGetAccountNftInfosHandlerTest.java | 86 +++++++++++++++++++ .../handlers/TokenGetNftInfosHandlerTest.java | 85 ++++++++++++++++++ 5 files changed, 185 insertions(+), 5 deletions(-) create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetAccountNftInfosHandlerTest.java create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetNftInfosHandlerTest.java diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetAccountNftInfosHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetAccountNftInfosHandler.java index 60fdb5aaa7ab..4b5036ae8a82 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetAccountNftInfosHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetAccountNftInfosHandler.java @@ -16,6 +16,7 @@ package com.hedera.node.app.service.token.impl.handlers; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.HederaFunctionality; @@ -34,6 +35,9 @@ /** * This class contains all workflow-related functionality regarding {@link * HederaFunctionality#TOKEN_GET_ACCOUNT_NFT_INFOS}. + *

+ * This token service call has been deprecated. Because protobufs promise backwards compatibility, + * we cannot remove it. However, it should not be used. */ @Singleton public class TokenGetAccountNftInfosHandler extends FreeQueryHandler { @@ -58,13 +62,14 @@ public Response createEmptyResponse(@NonNull final ResponseHeader header) { @Override public void validate(@NonNull final QueryContext context) throws PreCheckException { requireNonNull(context); - throw new UnsupportedOperationException("Not implemented"); + throw new PreCheckException(NOT_SUPPORTED); } @Override public Response findResponse(@NonNull final QueryContext context, @NonNull final ResponseHeader header) { + // this code should never be executed, as validate() should fail before we get here requireNonNull(context); requireNonNull(header); - throw new UnsupportedOperationException("Not implemented"); + throw new UnsupportedOperationException(NOT_SUPPORTED.toString()); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfoHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfoHandler.java index 048f5e15b72f..575d10771822 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfoHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfoHandler.java @@ -45,7 +45,7 @@ public TokenGetNftInfoHandler() { @Override public QueryHeader extractHeader(@NonNull final Query query) { requireNonNull(query); - return query.tokenGetInfoOrThrow().header(); + return query.tokenGetNftInfoOrThrow().header(); } @Override diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfosHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfosHandler.java index 930f148d1129..da73ec318943 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfosHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfosHandler.java @@ -16,6 +16,7 @@ package com.hedera.node.app.service.token.impl.handlers; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.HederaFunctionality; @@ -34,6 +35,9 @@ /** * This class contains all workflow-related functionality regarding {@link * HederaFunctionality#TOKEN_GET_NFT_INFOS}. + *

+ * This token service call has been deprecated. Because protobufs promise backwards compatibility, + * we cannot remove it. However, it should not be used. */ @Singleton public class TokenGetNftInfosHandler extends FreeQueryHandler { @@ -58,13 +62,13 @@ public Response createEmptyResponse(@NonNull final ResponseHeader header) { @Override public void validate(@NonNull final QueryContext context) throws PreCheckException { requireNonNull(context); - throw new UnsupportedOperationException("Not implemented"); + throw new PreCheckException(NOT_SUPPORTED); } @Override public Response findResponse(@NonNull final QueryContext context, @NonNull final ResponseHeader header) { requireNonNull(context); requireNonNull(header); - throw new UnsupportedOperationException("Not implemented"); + throw new UnsupportedOperationException(NOT_SUPPORTED.toString()); } } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetAccountNftInfosHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetAccountNftInfosHandlerTest.java new file mode 100644 index 000000000000..0ef546531994 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetAccountNftInfosHandlerTest.java @@ -0,0 +1,86 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.hedera.hapi.node.base.QueryHeader; +import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.token.TokenGetAccountNftInfosQuery; +import com.hedera.hapi.node.token.TokenGetAccountNftInfosResponse; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.node.app.service.token.impl.handlers.TokenGetAccountNftInfosHandler; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.spi.workflows.QueryContext; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class TokenGetAccountNftInfosHandlerTest { + @Mock + private QueryContext context; + + private TokenGetAccountNftInfosHandler subject; + + @BeforeEach + void setUp() { + subject = new TokenGetAccountNftInfosHandler(); + } + + @Test + void extractsHeader() { + final var data = TokenGetAccountNftInfosQuery.newBuilder() + .header(QueryHeader.newBuilder().build()) + .build(); + final var query = Query.newBuilder().tokenGetAccountNftInfos(data).build(); + final var header = subject.extractHeader(query); + final var op = query.tokenGetAccountNftInfosOrThrow(); + assertThat(op.header()).isEqualTo(header); + } + + @Test + void createsEmptyResponse() { + final var responseHeader = ResponseHeader.newBuilder().build(); + final var response = subject.createEmptyResponse(responseHeader); + final var expectedResponse = Response.newBuilder() + .tokenGetAccountNftInfos( + TokenGetAccountNftInfosResponse.newBuilder().header(responseHeader)) + .build(); + assertThat(expectedResponse).isEqualTo(response); + } + + @Test + void validateThrowsPreCheck() { + assertThatThrownBy(() -> subject.validate(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void findResponseThrowsUnsupported() { + final var responseHeader = ResponseHeader.newBuilder().build(); + assertThatThrownBy(() -> subject.findResponse(context, responseHeader)) + .isInstanceOf(UnsupportedOperationException.class); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetNftInfosHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetNftInfosHandlerTest.java new file mode 100644 index 000000000000..3e42ea8325c1 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetNftInfosHandlerTest.java @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.hedera.hapi.node.base.QueryHeader; +import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.token.TokenGetNftInfosQuery; +import com.hedera.hapi.node.token.TokenGetNftInfosResponse; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.node.app.service.token.impl.handlers.TokenGetNftInfosHandler; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.spi.workflows.QueryContext; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class TokenGetNftInfosHandlerTest { + @Mock + private QueryContext context; + + private TokenGetNftInfosHandler subject; + + @BeforeEach + void setUp() { + subject = new TokenGetNftInfosHandler(); + } + + @Test + void extractsHeader() { + final var data = TokenGetNftInfosQuery.newBuilder() + .header(QueryHeader.newBuilder().build()) + .build(); + final var query = Query.newBuilder().tokenGetNftInfos(data).build(); + final var header = subject.extractHeader(query); + final var op = query.tokenGetNftInfosOrThrow(); + assertThat(op.header()).isEqualTo(header); + } + + @Test + void createsEmptyResponse() { + final var responseHeader = ResponseHeader.newBuilder().build(); + final var response = subject.createEmptyResponse(responseHeader); + final var expectedResponse = Response.newBuilder() + .tokenGetNftInfos(TokenGetNftInfosResponse.newBuilder().header(responseHeader)) + .build(); + assertThat(expectedResponse).isEqualTo(response); + } + + @Test + void validateThrowsPreCheck() { + assertThatThrownBy(() -> subject.validate(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void findResponseThrowsUnsupported() { + final var responseHeader = ResponseHeader.newBuilder().build(); + assertThatThrownBy(() -> subject.findResponse(context, responseHeader)) + .isInstanceOf(UnsupportedOperationException.class); + } +} From 666b607127eb7a77204b6b9482cde0e7342d0ca8 Mon Sep 17 00:00:00 2001 From: David Bakin <117694041+david-bakin-sl@users.noreply.github.com> Date: Mon, 12 Jun 2023 10:35:50 -0700 Subject: [PATCH 10/70] Retire old security model for smart contracts by changing HAPI signature-check block limit to LOW (#6960) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Set `contracts.maxNumWithHapiSigsSuccess` to `0` to disable old security model. - Ensure (appropriate) unit tests check both low and high limits. * In fact the unit tests don't test this feature at all, just have to be corrected for mockability. - "Fix" BDD tests by ... making them pass for the V1 security model - ⮕ Does **not** have new BDD tests for the V2 security model to _replace_ the ones that now really only work for the V1 security model - There are some of those created when the V2 security model was created but there needs to be a more comprehensive set - ⮕ Does **not** confirm that _negative_ tests in the same affected suites (or elsewhere for that matter) return failure (that is, the transaction returns failure so the test returns success) _for the right reason_. - I.e., some of them may now be failing due to a V2 security model violation and _not_ because of the reason they're _supposed_ to fail Addresses #6767 Signed-off-by: David Bakin <117694041+david-bakin-sl@users.noreply.github.com> Signed-off-by: Stoyan Panayotov Co-authored-by: Stoyan Panayotov --- .../src/test/resources/bootstrap.properties | 2 +- .../node/config/data/ContractsConfig.java | 2 +- .../src/main/resources/bootstrap.properties | 2 +- .../properties/BootstrapPropertiesTest.java | 2 +- .../precompile/ERC20PrecompilesTest.java | 45 +- .../precompile/ERC721PrecompilesTest.java | 36 +- .../precompile/TransferPrecompilesTest.java | 28 +- .../src/test/resources/bootstrap.properties | 2 +- .../resources/bootstrap/standard.properties | 2 +- .../src/eet/java/EndToEndTests.java | 1 - .../src/itest/java/AllIntegrationTests.java | 11 +- .../src/itest/java/ConcurrentSuites.java | 3 - .../src/itest/java/SequentialSuites.java | 16 +- .../bdd/spec/transactions/TxnUtils.java | 4 +- .../services/bdd/suites/SuiteRunner.java | 4 - .../services/bdd/suites/contract/Utils.java | 85 ++ .../contract/hapi/ContractCallSuite.java | 326 ----- .../ContractCallV1SecurityModelSuite.java | 435 +++++++ .../contract/hapi/ContractCreateSuite.java | 60 +- .../ContractCreateV1SecurityModelSuite.java | 117 ++ .../opcodes/Create2OperationSuite.java | 32 - .../Create2OperationV1SecurityModelSuite.java | 100 ++ .../contract/opcodes/SelfDestructSuite.java | 2 +- .../precompile/AssociatePrecompileSuite.java | 160 +-- ...sociatePrecompileV1SecurityModelSuite.java | 240 ++++ .../precompile/ContractBurnHTSSuite.java | 323 +---- .../ContractBurnHTSV1SecurityModelSuite.java | 434 +++++++ .../contract/precompile/ContractHTSSuite.java | 777 +----------- .../ContractHTSV1SecurityModelSuite.java | 913 ++++++++++++++ .../precompile/ContractKeysHTSSuite.java | 203 +--- .../ContractKeysHTSV1SecurityModelSuite.java | 322 +++++ .../ContractKeysStillWorkAsExpectedSuite.java | 2 +- .../precompile/ContractMintHTSSuite.java | 467 +------ .../ContractMintHTSV1SecurityModelSuite.java | 518 ++++++++ .../precompile/CreatePrecompileSuite.java | 543 +-------- .../CreatePrecompileV1SecurityModelSuite.java | 662 ++++++++++ .../precompile/CryptoTransferHTSSuite.java | 716 ----------- ...CryptoTransferHTSV1SecurityModelSuite.java | 858 +++++++++++++ .../precompile/DelegatePrecompileSuite.java | 7 +- .../DeleteTokenPrecompileSuite.java | 149 +-- ...teTokenPrecompileV1SecurityModelSuite.java | 209 ++++ .../precompile/DissociatePrecompileSuite.java | 348 +----- ...sociatePrecompileV1SecurityModelSuite.java | 397 ++++++ .../precompile/ERCPrecompileSuite.java | 90 -- .../ERCPrecompileV1SecurityModelSuite.java | 181 +++ .../FreezeUnfreezeTokenPrecompileSuite.java | 268 +---- ...zeTokenPrecompileV1SecurityModelSuite.java | 365 ++++++ .../precompile/GrantRevokeKycSuite.java | 97 +- .../GrantRevokeKycV1SecurityModelSuite.java | 188 +++ .../LazyCreateThroughPrecompileSuite.java | 879 +------------- ...ThroughPrecompileV1SecurityModelSuite.java | 1069 +++++++++++++++++ ...SPrecompileTestsV1SecurityModelSuite.java} | 21 +- ...useUnpauseTokenAccountPrecompileSuite.java | 257 +--- ...AccountPrecompileV1SecurityModelSuite.java | 362 ++++++ .../contract/precompile/SigningReqsSuite.java | 305 +---- .../SigningReqsV1SecurityModelSuite.java | 405 +++++++ .../precompile/TokenExpiryInfoSuite.java | 225 +--- .../TokenExpiryInfoV1SecurityModelSuite.java | 316 +++++ .../precompile/TokenInfoHTSSuite.java | 451 +------ .../TokenInfoHTSV1SecurityModelSuite.java | 796 ++++++++++++ .../TokenUpdatePrecompileSuite.java | 907 +------------- ...nUpdatePrecompileV1SecurityModelSuite.java | 1021 ++++++++++++++++ ...SigsCanBeToggledByPrecompileTypeSuite.java | 92 +- .../precompile/V1SecurityModelOverrides.java | 27 + .../WipeTokenAccountPrecompileSuite.java | 299 +---- ...AccountPrecompileV1SecurityModelSuite.java | 359 ++++++ .../traceability/TraceabilitySuite.java | 2 +- .../bdd/suites/ethereum/EthereumSuite.java | 388 +----- .../EthereumV1SecurityModelSuite.java | 503 ++++++++ .../suites/leaky/LeakyContractTestsSuite.java | 40 +- .../leaky/LeakySecurityModelV1Suite.java | 108 ++ .../PrecompileMintThrottlingCheck.java | 2 +- .../suites/token/TokenAssociationSpecs.java | 48 +- .../TokenAssociationV1SecurityModelSpecs.java | 120 ++ .../src/main/resource/bootstrap.properties | 2 +- .../src/test/java/EndToEndPackageRunner.java | 8 +- 76 files changed, 11363 insertions(+), 8403 deletions(-) create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCallV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCreateV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/opcodes/Create2OperationV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/AssociatePrecompileV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractBurnHTSV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractHTSV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractKeysHTSV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractMintHTSV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CreatePrecompileV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CryptoTransferHTSV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DeleteTokenPrecompileV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DissociatePrecompileV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ERCPrecompileV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/FreezeUnfreezeTokenPrecompileV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/GrantRevokeKycV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/LazyCreateThroughPrecompileV1SecurityModelSuite.java rename hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/{MixedHTSPrecompileTestsSuite.java => MixedHTSPrecompileTestsV1SecurityModelSuite.java} (91%) create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/PauseUnpauseTokenAccountPrecompileV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/SigningReqsV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenExpiryInfoV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenInfoHTSV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenUpdatePrecompileV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/V1SecurityModelOverrides.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/WipeTokenAccountPrecompileV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/EthereumV1SecurityModelSuite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/leaky/LeakySecurityModelV1Suite.java create mode 100644 hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenAssociationV1SecurityModelSpecs.java diff --git a/hedera-node/hedera-app/src/test/resources/bootstrap.properties b/hedera-node/hedera-app/src/test/resources/bootstrap.properties index 26a77f913097..678f2fe5a253 100644 --- a/hedera-node/hedera-app/src/test/resources/bootstrap.properties +++ b/hedera-node/hedera-app/src/test/resources/bootstrap.properties @@ -61,7 +61,7 @@ balances.compressOnCreation=true cache.records.ttl=180 contracts.allowAutoAssociations=false contracts.allowSystemUseOfHapiSigs=TokenAssociateToAccount,TokenDissociateFromAccount,TokenFreezeAccount,TokenUnfreezeAccount,TokenGrantKycToAccount,TokenRevokeKycFromAccount,TokenAccountWipe,TokenBurn,TokenDelete,TokenMint,TokenUnpause,TokenPause,TokenCreate,TokenUpdate,ContractCall,CryptoTransfer -contracts.maxNumWithHapiSigsAccess=10_000_000 +contracts.maxNumWithHapiSigsAccess=0 contracts.withSpecialHapiSigsAccess= contracts.allowCreate2=true contracts.chainId=295 diff --git a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/ContractsConfig.java b/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/ContractsConfig.java index 1ab6d7bd1f13..77d51bf80192 100644 --- a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/ContractsConfig.java +++ b/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/ContractsConfig.java @@ -35,7 +35,7 @@ public record ContractsConfig( @ConfigProperty(defaultValue = "false") boolean allowAutoAssociations, // @ConfigProperty(defaultValue = // "TokenAssociateToAccount,TokenDissociateFromAccount,TokenFreezeAccount,TokenUnfreezeAccount,TokenGrantKycToAccount,TokenRevokeKycFromAccount,TokenAccountWipe,TokenBurn,TokenDelete,TokenMint,TokenUnpause,TokenPause,TokenCreate,TokenUpdate,ContractCall,CryptoTransfer") Set allowSystemUseOfHapiSigs, - @ConfigProperty(defaultValue = "10000000") long maxNumWithHapiSigsAccess, + @ConfigProperty(defaultValue = "0") long maxNumWithHapiSigsAccess, // @ConfigProperty(defaultValue = "") Set

withSpecialHapiSigsAccess, @ConfigProperty(defaultValue = "false") boolean enforceCreationThrottle, @ConfigProperty(defaultValue = "15000000") long maxGasPerSec, diff --git a/hedera-node/hedera-mono-service/src/main/resources/bootstrap.properties b/hedera-node/hedera-mono-service/src/main/resources/bootstrap.properties index 616b837ceadc..c6eaaf471ed8 100644 --- a/hedera-node/hedera-mono-service/src/main/resources/bootstrap.properties +++ b/hedera-node/hedera-mono-service/src/main/resources/bootstrap.properties @@ -61,7 +61,7 @@ balances.compressOnCreation=true cache.records.ttl=180 contracts.allowAutoAssociations=false contracts.allowSystemUseOfHapiSigs=TokenAssociateToAccount,TokenDissociateFromAccount,TokenFreezeAccount,TokenUnfreezeAccount,TokenGrantKycToAccount,TokenRevokeKycFromAccount,TokenAccountWipe,TokenBurn,TokenDelete,TokenMint,TokenUnpause,TokenPause,TokenCreate,TokenUpdate,ContractCall,CryptoTransfer -contracts.maxNumWithHapiSigsAccess=10_000_000 +contracts.maxNumWithHapiSigsAccess=0 contracts.withSpecialHapiSigsAccess= contracts.allowCreate2=true contracts.chainId=295 diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/properties/BootstrapPropertiesTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/properties/BootstrapPropertiesTest.java index 59f2a43df5f9..0b0780b4b314 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/properties/BootstrapPropertiesTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/properties/BootstrapPropertiesTest.java @@ -344,7 +344,7 @@ class BootstrapPropertiesTest { entry(AUTO_RENEW_GRANT_FREE_RENEWALS, false), entry(CONTRACTS_ALLOW_CREATE2, true), entry(CONTRACTS_ALLOW_AUTO_ASSOCIATIONS, false), - entry(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, 10_000_000L), + entry(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, 0L), entry(CONTRACTS_WITH_SPECIAL_HAPI_SIGS_ACCESS, Set.
of()), entry( CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/ERC20PrecompilesTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/ERC20PrecompilesTest.java index 15157c76db39..3d08b72401b7 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/ERC20PrecompilesTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/ERC20PrecompilesTest.java @@ -165,6 +165,7 @@ import java.util.Optional; import java.util.Set; import java.util.TreeMap; +import java.util.function.Consumer; import org.apache.commons.lang3.tuple.Pair; import org.apache.tuweni.bytes.Bytes; import org.hyperledger.besu.datatypes.Address; @@ -177,6 +178,8 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.MockedStatic; @@ -326,6 +329,22 @@ class ERC20PrecompilesTest { private static final int CENTS_RATE = 12; private static final int HBAR_RATE = 1; + private enum WithHapiBlockLimit { + LOW, + HIGH + }; + + private static final Map> setHapiBlockLimitGivens = Map.of( + WithHapiBlockLimit.LOW, + props -> { + given(props.maxNumWithHapiSigsAccess()).willReturn(0L); + }, + WithHapiBlockLimit.HIGH, + props -> { + given(props.maxNumWithHapiSigsAccess()).willReturn(Long.MAX_VALUE); + given(props.systemContractsWithTopLevelSigsAccess()).willReturn(Set.of(CryptoTransfer)); + }); + private HTSPrecompiledContract subject; private MockedStatic entityIdUtils; private MockedStatic ercTransferPrecompile; @@ -1280,15 +1299,14 @@ void transferFrom() throws InvalidProtocolBufferException { verify(worldUpdater).manageInProgressRecord(recordsHistorian, mockRecordBuilder, mockSynthBodyBuilder); } - @Test - void transferFromHapiFungible() throws InvalidProtocolBufferException { + @ParameterizedTest + @EnumSource + void transferFromHapiFungible(final WithHapiBlockLimit limit) throws InvalidProtocolBufferException { final var pretendArguments = Bytes.of(Integers.toBytes(ABI_ID_TRANSFER_FROM)); givenMinimalFrameContext(Bytes.EMPTY); givenLedgers(); givenPricingUtilsContext(); - given(dynamicProperties.maxNumWithHapiSigsAccess()).willReturn(Long.MAX_VALUE); - given(dynamicProperties.systemContractsWithTopLevelSigsAccess()).willReturn(Set.of(CryptoTransfer)); - + setHapiBlockLimitGivens.get(limit).accept(dynamicProperties); given(frame.getContractAddress()).willReturn(contractAddr); given(syntheticTxnFactory.createCryptoTransfer(Collections.singletonList(TOKEN_TRANSFER_FROM_WRAPPER))) .willReturn(mockSynthBodyBuilder); @@ -1354,11 +1372,11 @@ void transferFromHapiFungible() throws InvalidProtocolBufferException { .build()); } - @Test - void transferFromNFTHapi() throws InvalidProtocolBufferException { + @ParameterizedTest + @EnumSource + void transferFromNFTHapi(final WithHapiBlockLimit limit) throws InvalidProtocolBufferException { final var pretendArguments = Bytes.of(Integers.toBytes(ABI_ID_TRANSFER_FROM_NFT)); - given(dynamicProperties.maxNumWithHapiSigsAccess()).willReturn(Long.MAX_VALUE); - given(dynamicProperties.systemContractsWithTopLevelSigsAccess()).willReturn(Set.of(CryptoTransfer)); + setHapiBlockLimitGivens.get(limit).accept(dynamicProperties); givenMinimalFrameContext(Bytes.EMPTY); givenLedgers(); givenPricingUtilsContext(); @@ -1485,12 +1503,13 @@ void transferFails() throws InvalidProtocolBufferException { assertEquals(invalidFullPrefix, result); } - @Test - void onlyFallsBackToApprovalWithoutTopLevelSigs() throws InvalidProtocolBufferException { + @ParameterizedTest + @EnumSource + void onlyFallsBackToApprovalWithoutTopLevelSigs(final WithHapiBlockLimit limit) + throws InvalidProtocolBufferException { final Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_TRANSFER)); final Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments); - given(dynamicProperties.maxNumWithHapiSigsAccess()).willReturn(Long.MAX_VALUE); - given(dynamicProperties.systemContractsWithTopLevelSigsAccess()).willReturn(Set.of(CryptoTransfer)); + setHapiBlockLimitGivens.get(limit).accept(dynamicProperties); givenLedgers(); givenPricingUtilsContext(); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/ERC721PrecompilesTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/ERC721PrecompilesTest.java index 680e8353d7ff..54e88628039e 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/ERC721PrecompilesTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/ERC721PrecompilesTest.java @@ -160,9 +160,11 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.TreeSet; +import java.util.function.Consumer; import org.apache.commons.lang3.tuple.Pair; import org.apache.tuweni.bytes.Bytes; import org.hyperledger.besu.datatypes.Address; @@ -174,6 +176,8 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; import org.mockito.Mock; import org.mockito.MockedStatic; import org.mockito.Mockito; @@ -316,6 +320,22 @@ class ERC721PrecompilesTest { private static final int CENTS_RATE = 12; private static final int HBAR_RATE = 1; + private enum WithHapiBlockLimit { + LOW, + HIGH + }; + + private static final Map> setHapiBlockLimitGivens = Map.of( + WithHapiBlockLimit.LOW, + props -> { + given(props.maxNumWithHapiSigsAccess()).willReturn(0L); + }, + WithHapiBlockLimit.HIGH, + props -> { + given(props.maxNumWithHapiSigsAccess()).willReturn(Long.MAX_VALUE); + given(props.systemContractsWithTopLevelSigsAccess()).willReturn(Set.of(CryptoTransfer)); + }); + private HTSPrecompiledContract subject; private MockedStatic entityIdUtils; private MockedStatic isApprovedForAllPrecompile; @@ -1280,12 +1300,12 @@ void ownerOfRevertsWithMissingNft() { assertEquals(missingNftResult, result); } - @Test - void transferFrom() throws InvalidProtocolBufferException { + @ParameterizedTest + @EnumSource + void transferFrom(final WithHapiBlockLimit limit) throws InvalidProtocolBufferException { final Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_TRANSFER_FROM)); final Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments); - given(dynamicProperties.maxNumWithHapiSigsAccess()).willReturn(Long.MAX_VALUE); - given(dynamicProperties.systemContractsWithTopLevelSigsAccess()).willReturn(Set.of(CryptoTransfer)); + setHapiBlockLimitGivens.get(limit).accept(dynamicProperties); givenLedgers(); givenPricingUtilsContext(); @@ -1362,14 +1382,14 @@ void transferFrom() throws InvalidProtocolBufferException { verify(frame).addLog(log); } - @Test - void transferFromFailsForInvalidSig() throws InvalidProtocolBufferException { + @ParameterizedTest + @EnumSource + void transferFromFailsForInvalidSig(final WithHapiBlockLimit limit) throws InvalidProtocolBufferException { final Bytes nestedPretendArguments = Bytes.of(Integers.toBytes(ABI_ID_ERC_TRANSFER_FROM)); final Bytes pretendArguments = givenMinimalFrameContext(nestedPretendArguments); givenLedgers(); - given(dynamicProperties.maxNumWithHapiSigsAccess()).willReturn(Long.MAX_VALUE); + setHapiBlockLimitGivens.get(limit).accept(dynamicProperties); givenPricingUtilsContext(); - given(dynamicProperties.systemContractsWithTopLevelSigsAccess()).willReturn(Set.of(CryptoTransfer)); given(frame.getContractAddress()).willReturn(contractAddr); given(syntheticTxnFactory.createCryptoTransfer(Collections.singletonList(nftTransferList))) diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/TransferPrecompilesTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/TransferPrecompilesTest.java index 2eecf88563be..1c7e6176b709 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/TransferPrecompilesTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/TransferPrecompilesTest.java @@ -170,8 +170,10 @@ import java.util.Deque; import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.function.Consumer; import java.util.function.Predicate; import java.util.function.UnaryOperator; import org.apache.commons.lang3.tuple.Pair; @@ -187,6 +189,8 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.MockedStatic; @@ -349,6 +353,22 @@ class TransferPrecompilesTest { private static final Bytes TRANSFER_NFTS_INPUT = Bytes.fromHexString( "0x2c4ba191000000000000000000000000000000000000000000000000000000000000047a000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000001400000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000047700000000000000000000000000000000000000000000000000000000000004770000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000047c000000000000000000000000000000000000000000000010000000000000047c0000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000007b00000000000000000000000000000000000000000000000000000000000000ea"); + private enum WithHapiBlockLimit { + LOW, + HIGH + }; + + private static final Map> setHapiBlockLimitGivens = Map.of( + WithHapiBlockLimit.LOW, + props -> { + given(props.maxNumWithHapiSigsAccess()).willReturn(0L); + }, + WithHapiBlockLimit.HIGH, + props -> { + given(props.maxNumWithHapiSigsAccess()).willReturn(Long.MAX_VALUE); + given(props.systemContractsWithTopLevelSigsAccess()).willReturn(Set.of(CryptoTransfer)); + }); + private HTSPrecompiledContract subject; private MockedStatic transferPrecompile; final Predicate accoundIdExists = acc -> true; @@ -1720,15 +1740,15 @@ void hbarNFTTransferHappyPathWorks() throws InvalidProtocolBufferException { verify(worldUpdater).manageInProgressRecord(recordsHistorian, mockRecordBuilder, mockSynthBodyBuilder); } - @Test - void transferFailsAndCatchesProperly() throws InvalidProtocolBufferException { + @ParameterizedTest + @EnumSource + void transferFailsAndCatchesProperly(final WithHapiBlockLimit limit) throws InvalidProtocolBufferException { final Bytes pretendArguments = Bytes.of(Integers.toBytes(ABI_ID_TRANSFER_TOKEN)); givenMinimalFrameContext(); givenLedgers(); givenPricingUtilsContext(); - given(dynamicProperties.maxNumWithHapiSigsAccess()).willReturn(Long.MAX_VALUE); - given(dynamicProperties.systemContractsWithTopLevelSigsAccess()).willReturn(Set.of(CryptoTransfer)); + setHapiBlockLimitGivens.get(limit).accept(dynamicProperties); given(infrastructureFactory.newSideEffects()).willReturn(sideEffects); given(infrastructureFactory.newImpliedTransfersMarshal(any())).willReturn(impliedTransfersMarshal); given(worldUpdater.permissivelyUnaliased(any())) diff --git a/hedera-node/hedera-mono-service/src/test/resources/bootstrap.properties b/hedera-node/hedera-mono-service/src/test/resources/bootstrap.properties index bab000848734..2e9a5427934d 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/bootstrap.properties +++ b/hedera-node/hedera-mono-service/src/test/resources/bootstrap.properties @@ -61,7 +61,7 @@ balances.compressOnCreation=true cache.records.ttl=180 contracts.allowAutoAssociations=false contracts.allowSystemUseOfHapiSigs=TokenAssociateToAccount,TokenDissociateFromAccount,TokenFreezeAccount,TokenUnfreezeAccount,TokenGrantKycToAccount,TokenRevokeKycFromAccount,TokenAccountWipe,TokenBurn,TokenDelete,TokenMint,TokenUnpause,TokenPause,TokenCreate,TokenUpdate,ContractCall,CryptoTransfer -contracts.maxNumWithHapiSigsAccess=10_000_000 +contracts.maxNumWithHapiSigsAccess=0 contracts.withSpecialHapiSigsAccess= contracts.allowCreate2=true contracts.chainId=295 diff --git a/hedera-node/hedera-mono-service/src/test/resources/bootstrap/standard.properties b/hedera-node/hedera-mono-service/src/test/resources/bootstrap/standard.properties index 000d3a06edeb..0b673a91ac8a 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/bootstrap/standard.properties +++ b/hedera-node/hedera-mono-service/src/test/resources/bootstrap/standard.properties @@ -61,7 +61,7 @@ balances.compressOnCreation=true cache.records.ttl=180 contracts.allowAutoAssociations=false contracts.allowSystemUseOfHapiSigs=TokenAssociateToAccount,TokenDissociateFromAccount,TokenFreezeAccount,TokenUnfreezeAccount,TokenGrantKycToAccount,TokenRevokeKycFromAccount,TokenAccountWipe,TokenBurn,TokenDelete,TokenMint,TokenUnpause,TokenPause,TokenCreate,TokenUpdate,ContractCall,CryptoTransfer -contracts.maxNumWithHapiSigsAccess=10_000_000 +contracts.maxNumWithHapiSigsAccess=0 contracts.withSpecialHapiSigsAccess= contracts.allowCreate2=true contracts.chainId=295 diff --git a/hedera-node/test-clients/src/eet/java/EndToEndTests.java b/hedera-node/test-clients/src/eet/java/EndToEndTests.java index c6402e3b9ebe..4c0b32f5e09f 100644 --- a/hedera-node/test-clients/src/eet/java/EndToEndTests.java +++ b/hedera-node/test-clients/src/eet/java/EndToEndTests.java @@ -135,7 +135,6 @@ Collection contractPrecompile2() { new DynamicContainer[] { // extractSpecsFromSuite(CryptoTransferHTSSuite::new), // extractSpecsFromSuite(DelegatePrecompileSuite::new), - // extractSpecsFromSuite(DissociatePrecompileSuite::new), // extractSpecsFromSuite(DynamicGasCostSuite::new), // extractSpecsFromSuite(MixedHTSPrecompileTestsSuite::new) }); diff --git a/hedera-node/test-clients/src/itest/java/AllIntegrationTests.java b/hedera-node/test-clients/src/itest/java/AllIntegrationTests.java index 3e4fa2d010fd..37bd3258d6d8 100644 --- a/hedera-node/test-clients/src/itest/java/AllIntegrationTests.java +++ b/hedera-node/test-clients/src/itest/java/AllIntegrationTests.java @@ -48,11 +48,20 @@ class AllIntegrationTests extends IntegrationTestBase { private static final String TEST_CONTAINER_NODE0_STREAMS = "build/network/itest/records/node_0"; + @Tag("integration") + @Order(0) + @TestFactory + Collection globalPrerequisiteSpecsBySuite() { + return Arrays.stream(SequentialSuites.globalPrerequisiteSuites()) + .map(this::extractSpecsFromSuite) + .toList(); + } + @Tag("integration") @Order(1) @TestFactory Collection sequentialSpecsBySuite() { - return Arrays.stream(SequentialSuites.all()) + return Arrays.stream(SequentialSuites.sequentialSuites()) .map(this::extractSpecsFromSuite) .toList(); } diff --git a/hedera-node/test-clients/src/itest/java/ConcurrentSuites.java b/hedera-node/test-clients/src/itest/java/ConcurrentSuites.java index 7851292b4f84..c82ba70a313a 100644 --- a/hedera-node/test-clients/src/itest/java/ConcurrentSuites.java +++ b/hedera-node/test-clients/src/itest/java/ConcurrentSuites.java @@ -60,7 +60,6 @@ import com.hedera.services.bdd.suites.contract.precompile.FreezeUnfreezeTokenPrecompileSuite; import com.hedera.services.bdd.suites.contract.precompile.GrantRevokeKycSuite; import com.hedera.services.bdd.suites.contract.precompile.LazyCreateThroughPrecompileSuite; -import com.hedera.services.bdd.suites.contract.precompile.MixedHTSPrecompileTestsSuite; import com.hedera.services.bdd.suites.contract.precompile.PauseUnpauseTokenAccountPrecompileSuite; import com.hedera.services.bdd.suites.contract.precompile.PrngPrecompileSuite; import com.hedera.services.bdd.suites.contract.precompile.SigningReqsSuite; @@ -179,7 +178,6 @@ static Supplier[] all() { FreezeUnfreezeTokenPrecompileSuite::new, GrantRevokeKycSuite::new, LazyCreateThroughPrecompileSuite::new, - MixedHTSPrecompileTestsSuite::new, PauseUnpauseTokenAccountPrecompileSuite::new, PrngPrecompileSuite::new, TokenAndTypeCheckSuite::new, @@ -234,7 +232,6 @@ static Supplier[] ethereumSuites() { DefaultTokenStatusSuite::new, DelegatePrecompileSuite::new, DeleteTokenPrecompileSuite::new, - DissociatePrecompileSuite::new, CreatePrecompileSuite::new, ERCPrecompileSuite::new, FreezeUnfreezeTokenPrecompileSuite::new, diff --git a/hedera-node/test-clients/src/itest/java/SequentialSuites.java b/hedera-node/test-clients/src/itest/java/SequentialSuites.java index 2f25cbcc309a..20590aa1c15c 100644 --- a/hedera-node/test-clients/src/itest/java/SequentialSuites.java +++ b/hedera-node/test-clients/src/itest/java/SequentialSuites.java @@ -21,21 +21,31 @@ import com.hedera.services.bdd.suites.leaky.FeatureFlagSuite; import com.hedera.services.bdd.suites.leaky.LeakyContractTestsSuite; import com.hedera.services.bdd.suites.leaky.LeakyCryptoTestsSuite; +import com.hedera.services.bdd.suites.leaky.LeakySecurityModelV1Suite; import com.hedera.services.bdd.suites.regression.TargetNetworkPrep; import com.hedera.services.bdd.suites.throttling.PrivilegedOpsSuite; import java.util.function.Supplier; +import org.apache.commons.lang3.ArrayUtils; public class SequentialSuites { - @SuppressWarnings("unchecked") static Supplier[] all() { + return ArrayUtils.addAll(globalPrerequisiteSuites(), sequentialSuites()); + } + + @SuppressWarnings("unchecked") + static Supplier[] globalPrerequisiteSuites() { + return (Supplier[]) new Supplier[] {TargetNetworkPrep::new, FeatureFlagSuite::new}; + } + + @SuppressWarnings("unchecked") + static Supplier[] sequentialSuites() { return (Supplier[]) new Supplier[] { - TargetNetworkPrep::new, - FeatureFlagSuite::new, SpecialAccountsAreExempted::new, PrivilegedOpsSuite::new, TraceabilitySuite::new, LeakyContractTestsSuite::new, LeakyCryptoTestsSuite::new, + LeakySecurityModelV1Suite::new, Create2OperationSuite::new, }; } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/TxnUtils.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/TxnUtils.java index 12f43489f803..9c6554cb7fea 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/TxnUtils.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/TxnUtils.java @@ -613,8 +613,8 @@ public static String toReadableString(final Transaction grpcTransaction) throws } public static String bytecodePath(final String contractName) { - // Quick fix for https://github.com/hashgraph/hedera-services/issues/6821, a better solution will be provided - // when the issue is resolved + // TODO: Quick fix for https://github.com/hashgraph/hedera-services/issues/6821, a better solution + // will be provided when the issue is resolved return Utils.getResourcePath(contractName, ".bin"); } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java index 9251cb6482d2..f6d722ba4b40 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/SuiteRunner.java @@ -64,8 +64,6 @@ import com.hedera.services.bdd.suites.contract.precompile.ContractMintHTSSuite; import com.hedera.services.bdd.suites.contract.precompile.CryptoTransferHTSSuite; import com.hedera.services.bdd.suites.contract.precompile.DelegatePrecompileSuite; -import com.hedera.services.bdd.suites.contract.precompile.DissociatePrecompileSuite; -import com.hedera.services.bdd.suites.contract.precompile.MixedHTSPrecompileTestsSuite; import com.hedera.services.bdd.suites.contract.records.LogsSuite; import com.hedera.services.bdd.suites.contract.records.RecordsSuite; import com.hedera.services.bdd.suites.crypto.AutoAccountCreationSuite; @@ -437,8 +435,6 @@ public class SuiteRunner { put("ContractMintHTSSuite", aof(ContractMintHTSSuite::new)); put("CryptoTransferHTSSuite", aof(CryptoTransferHTSSuite::new)); put("DelegatePrecompileSuite", aof(DelegatePrecompileSuite::new)); - put("DissociatePrecompileSuite", aof(DissociatePrecompileSuite::new)); - put("MixedHTSPrecompileTestsSuite", aof(MixedHTSPrecompileTestsSuite::new)); /* Functional tests - AUTORENEW */ put("AccountAutoRenewalSuite", aof(AccountAutoRenewalSuite::new)); /* Functional tests - MIXED (record emphasis) */ diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/Utils.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/Utils.java index 0b783710901a..57c24240e677 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/Utils.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/Utils.java @@ -20,25 +20,34 @@ import static com.hedera.services.bdd.spec.HapiPropertySource.asDotDelimitedLongArray; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.assertionsHold; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.Utils.FunctionType.CONSTRUCTOR; +import static com.hederahashgraph.api.proto.java.HederaFunctionality.ContractCall; +import static com.hederahashgraph.api.proto.java.SubType.DEFAULT; import static com.swirlds.common.utility.CommonUtils.hex; import static com.swirlds.common.utility.CommonUtils.unhex; import static java.lang.System.arraycopy; import static org.apache.commons.lang3.StringUtils.EMPTY; import static org.junit.jupiter.api.Assertions.assertEquals; +import com.esaulpaugh.headlong.abi.Address; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; import com.google.protobuf.ByteString; +import com.hedera.node.app.hapi.fees.pricing.AssetsLoader; import com.hedera.services.bdd.spec.HapiPropertySource; +import com.hedera.services.bdd.spec.HapiSpec; import com.hedera.services.bdd.spec.HapiSpecOperation; import com.hedera.services.bdd.spec.transactions.TxnUtils; +import com.hedera.services.bdd.spec.utilops.CustomSpecAssert; import com.hederahashgraph.api.proto.java.AccountAmount; import com.hederahashgraph.api.proto.java.AccountID; import com.hederahashgraph.api.proto.java.ContractID; +import com.hederahashgraph.api.proto.java.HederaFunctionality; import com.hederahashgraph.api.proto.java.Key; import com.hederahashgraph.api.proto.java.NftTransfer; +import com.hederahashgraph.api.proto.java.SubType; import com.hederahashgraph.api.proto.java.Timestamp; import com.hederahashgraph.api.proto.java.TokenID; import com.swirlds.common.utility.CommonUtils; @@ -47,12 +56,16 @@ import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; +import java.math.BigDecimal; +import java.math.BigInteger; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.time.Instant; +import java.util.Collections; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.IntStream; +import java.util.stream.LongStream; import org.apache.commons.io.FileUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -60,12 +73,14 @@ import org.apache.tuweni.bytes.Bytes32; import org.bouncycastle.util.encoders.Hex; import org.hyperledger.besu.crypto.Hash; +import org.jetbrains.annotations.NotNull; import org.json.JSONArray; import org.json.JSONObject; import org.json.JSONTokener; public class Utils { public static final String RESOURCE_PATH = "src/main/resource/contract/contracts/%1$s/%1$s%2$s"; + public static final String UNIQUE_CLASSPATH_RESOURCE_TPL = "contract/contracts/%s/%s"; private static final Logger log = LogManager.getLogger(Utils.class); private static final String JSON_EXTENSION = ".json"; @@ -323,4 +338,74 @@ public static HapiSpecOperation captureChildCreate2MetaFor( public static Instant asInstant(final Timestamp timestamp) { return Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos()); } + + public static Address[] nCopiesOfSender(final int n, final Address mirrorAddr) { + return Collections.nCopies(n, mirrorAddr).toArray(Address[]::new); + } + + public static Address[] nNonMirrorAddressFrom(final int n, final long m) { + return LongStream.range(m, m + n).mapToObj(Utils::nonMirrorAddrWith).toArray(Address[]::new); + } + + public static Address headlongFromHexed(final String addr) { + return Address.wrap(Address.toChecksumAddress("0x" + addr)); + } + + public static Address mirrorAddrWith(final long num) { + return Address.wrap( + Address.toChecksumAddress(new BigInteger(1, HapiPropertySource.asSolidityAddress(0, 0, num)))); + } + + public static Address nonMirrorAddrWith(final long num) { + return nonMirrorAddrWith(666, num); + } + + public static Address nonMirrorAddrWith(final long seed, final long num) { + return Address.wrap(Address.toChecksumAddress( + new BigInteger(1, HapiPropertySource.asSolidityAddress((int) seed, seed, num)))); + } + + public static long expectedPrecompileGasFor( + final HapiSpec spec, final HederaFunctionality function, final SubType type) { + final var gasThousandthsOfTinycentPrice = spec.fees() + .getCurrentOpFeeData() + .get(ContractCall) + .get(DEFAULT) + .getServicedata() + .getGas(); + final var assetsLoader = new AssetsLoader(); + final BigDecimal hapiUsdPrice; + try { + hapiUsdPrice = assetsLoader.loadCanonicalPrices().get(function).get(type); + } catch (final IOException e) { + throw new UncheckedIOException(e); + } + final var precompileTinycentPrice = hapiUsdPrice + .multiply(BigDecimal.valueOf(1.2)) + .multiply(BigDecimal.valueOf(100 * 100_000_000L)) + .longValueExact(); + return (precompileTinycentPrice * 1000 / gasThousandthsOfTinycentPrice); + } + + @NotNull + public static String getNestedContractAddress(final String outerContract, final HapiSpec spec) { + return HapiPropertySource.asHexedSolidityAddress(spec.registry().getContractId(outerContract)); + } + + @NotNull + @SuppressWarnings("java:S5960") + public static CustomSpecAssert assertTxnRecordHasNoTraceabilityEnrichedContractFnResult( + final String nestedTransferTxn) { + return assertionsHold((spec, log) -> { + final var subOp = getTxnRecord(nestedTransferTxn); + allRunFor(spec, subOp); + + final var rcd = subOp.getResponseRecord(); + + final var contractCallResult = rcd.getContractCallResult(); + assertEquals(0L, contractCallResult.getGas(), "Result not expected to externalize gas"); + assertEquals(0L, contractCallResult.getAmount(), "Result not expected to externalize amount"); + assertEquals(ByteString.EMPTY, contractCallResult.getFunctionParameters()); + }); + } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCallSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCallSuite.java index bf111e2741b0..2b4383f46855 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCallSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCallSuite.java @@ -35,12 +35,9 @@ import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractRecords; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenNftInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import static com.hedera.services.bdd.spec.transactions.TxnUtils.asId; import static com.hedera.services.bdd.spec.transactions.TxnUtils.literalInitcodeFor; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.burnToken; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCallWithFunctionAbi; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; @@ -49,15 +46,11 @@ import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.fileCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenUpdate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; import static com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoTransfer.tinyBarsFromAccountToAlias; -import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.assertionsHold; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.balanceSnapshot; @@ -66,15 +59,12 @@ import static com.hedera.services.bdd.spec.utilops.UtilVerbs.logIt; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyListNamed; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sleepFor; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.updateLargeFile; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.Utils.FunctionType.FUNCTION; import static com.hedera.services.bdd.suites.contract.Utils.asAddress; import static com.hedera.services.bdd.suites.contract.Utils.asToken; import static com.hedera.services.bdd.suites.contract.Utils.captureChildCreate2MetaFor; -import static com.hedera.services.bdd.suites.contract.Utils.extractByteCode; import static com.hedera.services.bdd.suites.contract.Utils.getABIFor; import static com.hedera.services.bdd.suites.contract.Utils.getABIForContract; import static com.hedera.services.bdd.suites.utils.contracts.SimpleBytesResult.bigIntResult; @@ -91,7 +81,6 @@ import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.OBTAINER_SAME_CONTRACT_ID; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.OK; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; -import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; import static com.swirlds.common.utility.CommonUtils.unhex; import com.esaulpaugh.headlong.abi.ABIType; @@ -104,9 +93,7 @@ import com.hedera.services.bdd.spec.HapiPropertySource; import com.hedera.services.bdd.spec.HapiSpec; import com.hedera.services.bdd.spec.HapiSpecSetup; -import com.hedera.services.bdd.spec.keys.KeyShape; import com.hedera.services.bdd.spec.keys.SigControl; -import com.hedera.services.bdd.spec.transactions.contract.HapiContractCreate; import com.hedera.services.bdd.spec.transactions.token.TokenMovement; import com.hedera.services.bdd.spec.utilops.CustomSpecAssert; import com.hedera.services.bdd.suites.HapiSuite; @@ -115,7 +102,6 @@ import com.hederahashgraph.api.proto.java.ResponseCodeEnum; import com.hederahashgraph.api.proto.java.Timestamp; import com.hederahashgraph.api.proto.java.TokenID; -import com.hederahashgraph.api.proto.java.TokenSupplyType; import com.hederahashgraph.api.proto.java.TokenType; import com.swirlds.common.utility.CommonUtils; import java.math.BigInteger; @@ -173,7 +159,6 @@ public class ContractCallSuite extends HapiSuite { private static final String FAIL_INVALID_INITIAL_BALANCE = "failInvalidInitialBalance"; private static final String SUCCESS_WITH_ZERO_INITIAL_BALANCE = "successWithZeroInitialBalance"; private static final String KILL_ME = "killMe"; - private static final String CONTRACT_CALLER = "contractCaller"; private static final String RECEIVABLE_SIG_REQ_ACCOUNT = "receivableSigReqAccount"; private static final String RECEIVABLE_SIG_REQ_ACCOUNT_INFO = "receivableSigReqAccountInfo"; private static final String TRANSFER_TO_ADDRESS = "transferToAddress"; @@ -193,8 +178,6 @@ public class ContractCallSuite extends HapiSuite { private static final String RECEIVER_1_INFO = "receiver1Info"; private static final String RECEIVER_2_INFO = "receiver2Info"; private static final String RECEIVER_3_INFO = "receiver3Info"; - public static final String STATE_MUTABILITY_NONPAYABLE_TYPE_FUNCTION = - " \"stateMutability\": \"nonpayable\", \"type\": \"function\" }"; public static final String DELEGATE_CALL_SPECIFIC = "delegateCallSpecific"; public static void main(String... args) { @@ -220,7 +203,6 @@ public List getSpecsInSuite() { smartContractInlineAssemblyCheck(), ocToken(), erc721TokenUriAndHtsNftInfoTreatNonUtf8BytesDifferently(), - contractTransferToSigReqAccountWithKeySucceeds(), minChargeIsTXGasUsedByContractCall(), hscsEvm005TransferOfHBarsWorksBetweenContracts(), hscsEvm006ContractHBarTransferToAccount(), @@ -247,9 +229,6 @@ public List getSpecsInSuite() { whitelistingAliasedContract(), cannotUseMirrorAddressOfAliasedContractInPrecompileMethod(), exchangeRatePrecompileWorks(), - canMintAndTransferInSameContractOperation(), - workingHoursDemo(), - lpFarmSimulation(), nestedContractCannotOverSendValue(), depositMoreThanBalanceFailsGracefully(), lowLevelEcrecCallBehavior(), @@ -747,142 +726,6 @@ private HapiSpec bitcarbonTestStillPasses() { .gas(1_000_000))); } - private HapiSpec workingHoursDemo() { - final var gasToOffer = 4_000_000; - final var contract = "WorkingHours"; - final var ticketToken = "ticketToken"; - final var adminKey = "admin"; - final var treasury = "treasury"; - final var newSupplyKey = "newSupplyKey"; - - final var ticketTaking = "ticketTaking"; - final var ticketWorking = "ticketWorking"; - final var mint = "minting"; - final var burn = "burning"; - final var preMints = List.of(ByteString.copyFromUtf8("HELLO"), ByteString.copyFromUtf8("GOODBYE")); - - final AtomicLong ticketSerialNo = new AtomicLong(); - - return defaultHapiSpec("WorkingHoursDemo") - .given( - newKeyNamed(adminKey), - cryptoCreate(treasury), - // we need a new user, expiry to 1 Jan 2100 costs 11M gas for token - // associate - tokenCreate(ticketToken) - .treasury(treasury) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .initialSupply(0L) - .supplyType(TokenSupplyType.INFINITE) - .adminKey(adminKey) - .supplyKey(adminKey), - mintToken(ticketToken, preMints).via(mint), - burnToken(ticketToken, List.of(1L)).via(burn), - uploadInitCode(contract)) - .when( - withOpContext((spec, opLog) -> { - final var registry = spec.registry(); - final var tokenId = registry.getTokenID(ticketToken); - final var treasuryId = registry.getAccountID(treasury); - final var creation = contractCreate( - contract, - asHeadlongAddress(asAddress(tokenId)), - asHeadlongAddress(asAddress(treasuryId))) - .gas(gasToOffer); - allRunFor(spec, creation); - }), - newKeyNamed(newSupplyKey).shape(KeyShape.CONTRACT.signedWith(contract)), - tokenUpdate(ticketToken).supplyKey(newSupplyKey)) - .then( - /* Take a ticket */ - contractCall(contract, "takeTicket") - .alsoSigningWithFullPrefix(DEFAULT_CONTRACT_SENDER, treasury) - .gas(4_000_000) - .via(ticketTaking) - .exposingResultTo(result -> { - LOG.info("Explicit mint result is {}", result); - ticketSerialNo.set(((Long) result[0])); - }), - getTxnRecord(ticketTaking), - getAccountBalance(DEFAULT_CONTRACT_SENDER).logged().hasTokenBalance(ticketToken, 1L), - /* Our ticket number is 3 (b/c of the two pre-mints), so we must call - * work twice before the contract will actually accept our ticket. */ - sourcing(() -> contractCall(contract, "workTicket", ticketSerialNo.get()) - .gas(2_000_000) - .alsoSigningWithFullPrefix(DEFAULT_CONTRACT_SENDER)), - getAccountBalance(DEFAULT_CONTRACT_SENDER).hasTokenBalance(ticketToken, 1L), - sourcing(() -> contractCall(contract, "workTicket", ticketSerialNo.get()) - .gas(2_000_000) - .alsoSigningWithFullPrefix(DEFAULT_CONTRACT_SENDER) - .via(ticketWorking)), - getAccountBalance(DEFAULT_CONTRACT_SENDER).hasTokenBalance(ticketToken, 0L), - getTokenInfo(ticketToken).hasTotalSupply(1L), - /* Review the history */ - getTxnRecord(ticketTaking).andAllChildRecords().logged(), - getTxnRecord(ticketWorking).andAllChildRecords().logged()); - } - - private HapiSpec canMintAndTransferInSameContractOperation() { - final AtomicReference tokenMirrorAddr = new AtomicReference<>(); - final AtomicReference aCivilianMirrorAddr = new AtomicReference<>(); - final var nfToken = "nfToken"; - final var multiKey = "multiKey"; - final var aCivilian = "aCivilian"; - final var treasuryContract = "SomeERC721Scenarios"; - final var mintAndTransferTxn = "mintAndTransferTxn"; - final var mintAndTransferAndBurnTxn = "mintAndTransferAndBurnTxn"; - - return defaultHapiSpec("CanMintAndTransferInSameContractOperation") - .given( - newKeyNamed(multiKey), - cryptoCreate(aCivilian) - .exposingCreatedIdTo(id -> aCivilianMirrorAddr.set(asHexedSolidityAddress(id))), - uploadInitCode(treasuryContract), - contractCreate(treasuryContract).adminKey(multiKey), - tokenCreate(nfToken) - .supplyKey(multiKey) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(treasuryContract) - .initialSupply(0) - .exposingCreatedIdTo(idLit -> - tokenMirrorAddr.set(asHexedSolidityAddress(HapiPropertySource.asToken(idLit)))), - mintToken( - nfToken, - List.of( - // 1 - ByteString.copyFromUtf8("A penny for"), - // 2 - ByteString.copyFromUtf8("the Old Guy"))), - tokenAssociate(aCivilian, nfToken), - cryptoTransfer(movingUnique(nfToken, 2L).between(treasuryContract, aCivilian))) - .when(sourcing(() -> contractCall( - treasuryContract, - "nonSequiturMintAndTransfer", - asHeadlongAddress(tokenMirrorAddr.get()), - asHeadlongAddress(aCivilianMirrorAddr.get())) - .via(mintAndTransferTxn) - .gas(4_000_000) - .alsoSigningWithFullPrefix(multiKey))) - .then( - getTokenInfo(nfToken).hasTotalSupply(4L), - getTokenNftInfo(nfToken, 3L) - .hasSerialNum(3L) - .hasAccountID(aCivilian) - .hasMetadata(ByteString.copyFrom(new byte[] {(byte) 0xee})), - getTokenNftInfo(nfToken, 4L) - .hasSerialNum(4L) - .hasAccountID(aCivilian) - .hasMetadata(ByteString.copyFrom(new byte[] {(byte) 0xff})), - sourcing(() -> contractCall( - treasuryContract, - "nonSequiturMintAndTransferAndBurn", - asHeadlongAddress(tokenMirrorAddr.get()), - asHeadlongAddress(aCivilianMirrorAddr.get())) - .via(mintAndTransferAndBurnTxn) - .gas(4_000_000) - .alsoSigningWithFullPrefix(multiKey, aCivilian))); - } - private HapiSpec exchangeRatePrecompileWorks() { final var valueToTinycentCall = "recoverUsd"; final var rateAware = "ExchangeRatePrecompile"; @@ -1626,50 +1469,6 @@ HapiSpec payTestSelfDestructCall() { getAccountBalance(RECEIVER).hasTinyBars(2_000L)); } - private HapiSpec contractTransferToSigReqAccountWithKeySucceeds() { - return defaultHapiSpec("ContractTransferToSigReqAccountWithKeySucceeds") - .given( - cryptoCreate(CONTRACT_CALLER).balance(1_000_000_000_000L), - cryptoCreate(RECEIVABLE_SIG_REQ_ACCOUNT) - .balance(1_000_000_000_000L) - .receiverSigRequired(true), - getAccountInfo(CONTRACT_CALLER).savingSnapshot("contractCallerInfo"), - getAccountInfo(RECEIVABLE_SIG_REQ_ACCOUNT).savingSnapshot(RECEIVABLE_SIG_REQ_ACCOUNT_INFO), - uploadInitCode(TRANSFERRING_CONTRACT)) - .when(contractCreate(TRANSFERRING_CONTRACT).gas(300_000L).balance(5000L)) - .then(withOpContext((spec, opLog) -> { - final var accountAddress = spec.registry() - .getAccountInfo(RECEIVABLE_SIG_REQ_ACCOUNT_INFO) - .getContractAccountID(); - final var receivableAccountKey = spec.registry() - .getAccountInfo(RECEIVABLE_SIG_REQ_ACCOUNT_INFO) - .getKey(); - final var contractCallerKey = - spec.registry().getAccountInfo("contractCallerInfo").getKey(); - spec.registry().saveKey("receivableKey", receivableAccountKey); - spec.registry().saveKey("contractCallerKey", contractCallerKey); - /* if any of the keys are missing, INVALID_SIGNATURE is returned */ - final var call = contractCall( - TRANSFERRING_CONTRACT, - TRANSFER_TO_ADDRESS, - asHeadlongAddress(accountAddress), - BigInteger.ONE) - .payingWith(CONTRACT_CALLER) - .gas(300_000) - .alsoSigningWithFullPrefix("receivableKey"); - /* calling with the receivableSigReqAccount should pass without adding keys */ - final var callWithReceivable = contractCall( - TRANSFERRING_CONTRACT, - TRANSFER_TO_ADDRESS, - asHeadlongAddress(accountAddress), - BigInteger.ONE) - .payingWith(RECEIVABLE_SIG_REQ_ACCOUNT) - .gas(300_000) - .hasKnownStatus(SUCCESS); - allRunFor(spec, call, callWithReceivable); - })); - } - private HapiSpec contractTransferToSigReqAccountWithoutKeyFails() { return defaultHapiSpec("ContractTransferToSigReqAccountWithoutKeyFails") .given( @@ -2274,131 +2073,6 @@ private HapiSpec transferZeroHbars() { getAccountBalance(RECEIVER).hasTinyBars(10_000L)); } - private HapiSpec lpFarmSimulation() { - final var adminKey = "adminKey"; - final var gasToOffer = 4_000_000; - final var farmInitcodeLoc = "src/main/resource/contract/bytecodes/farmInitcode.bin"; - final var consAbi = "{ \"inputs\": [ { \"internalType\": \"address\", \"name\": \"_devaddr\", \"type\":" - + " \"address\" }, { \"internalType\": \"address\", \"name\": \"_rentPayer\"," - + " \"type\": \"address\" }, { \"internalType\": \"uint256\", \"name\":" - + " \"_saucePerSecond\", \"type\": \"uint256\" }, { \"internalType\":" - + " \"uint256\", \"name\": \"_hbarPerSecond\", \"type\": \"uint256\" }, {" - + " \"internalType\": \"uint256\", \"name\": \"_maxSauceSupply\", \"type\":" - + " \"uint256\" }, { \"internalType\": \"uint256\", \"name\":" - + " \"_depositFeeTinyCents\", \"type\": \"uint256\" } ], \"stateMutability\":" - + " \"nonpayable\", \"type\": \"constructor\" }"; - final var addPoolAbi = "{ \"inputs\": [ { \"internalType\": \"uint256\", \"name\": \"_allocPoint\"," - + " \"type\": \"uint256\" }, { \"internalType\": \"address\", \"name\":" - + " \"_lpToken\", \"type\": \"address\" } ], \"name\": \"add\"," - + " \"outputs\": [], \"stateMutability\": \"nonpayable\", \"type\":" - + " \"function\" }"; - final var depositAbi = "{ \"inputs\": [ { \"internalType\": \"uint256\", \"name\": \"_pid\", \"type\":" - + " \"uint256\" }, { \"internalType\": \"uint256\", \"name\": \"_amount\"," - + " \"type\": \"uint256\" } ], \"name\": \"deposit\", \"outputs\": []," - + " \"stateMutability\": \"payable\", \"type\": \"function\" }"; - final var withdrawAbi = "{ \"inputs\": [ { \"internalType\": \"uint256\", \"name\": \"_pid\", \"type\":" - + " \"uint256\" }, { \"internalType\": \"uint256\", \"name\": \"_amount\"," - + " \"type\": \"uint256\" } ], \"name\": \"withdraw\", \"outputs\": []," - + STATE_MUTABILITY_NONPAYABLE_TYPE_FUNCTION; - final var setSauceAbi = "{ \"inputs\": [ { \"internalType\": \"address\", \"name\": \"_sauce\", \"type\":" - + " \"address\" } ], \"name\": \"setSauceAddress\", \"outputs\": []," - + STATE_MUTABILITY_NONPAYABLE_TYPE_FUNCTION; - final var transferAbi = "{ \"inputs\": [ { \"internalType\": \"address\", \"name\": \"newOwner\", \"type\":" - + " \"address\" } ], \"name\": \"transferOwnership\", \"outputs\": []," - + STATE_MUTABILITY_NONPAYABLE_TYPE_FUNCTION; - final var initcode = "farmInitcode"; - final var farm = "farm"; - final var dev = "dev"; - final var lp = "lp"; - final var sauce = "sauce"; - final var rentPayer = "rentPayer"; - final AtomicReference devAddr = new AtomicReference<>(); - final AtomicReference ownerAddr = new AtomicReference<>(); - final AtomicReference sauceAddr = new AtomicReference<>(); - final AtomicReference lpTokenAddr = new AtomicReference<>(); - final AtomicReference rentPayerAddr = new AtomicReference<>(); - - return defaultHapiSpec("lpFarmSimulation") - .given( - newKeyNamed(adminKey), - fileCreate(initcode), - cryptoCreate(OWNER) - .balance(ONE_MILLION_HBARS) - .exposingCreatedIdTo(id -> ownerAddr.set(asHexedSolidityAddress(id))), - cryptoCreate(dev) - .balance(ONE_MILLION_HBARS) - .exposingCreatedIdTo(id -> devAddr.set(asHexedSolidityAddress(id))), - cryptoCreate(rentPayer) - .balance(ONE_MILLION_HBARS) - .exposingCreatedIdTo(id -> rentPayerAddr.set(asHexedSolidityAddress(id))), - updateLargeFile(GENESIS, initcode, extractByteCode(farmInitcodeLoc)), - sourcing(() -> new HapiContractCreate( - farm, - consAbi, - asHeadlongAddress(devAddr.get()), - asHeadlongAddress(rentPayerAddr.get()), - BigInteger.valueOf(4804540L), - BigInteger.valueOf(10000L), - BigInteger.valueOf(1000000000000000L), - BigInteger.valueOf(2500000000L)) - .bytecode(initcode)), - tokenCreate(sauce) - .supplyType(TokenSupplyType.FINITE) - .initialSupply(300_000_000) - .maxSupply(1_000_000_000) - .treasury(farm) - .adminKey(adminKey) - .supplyKey(adminKey) - .exposingCreatedIdTo(idLit -> - sauceAddr.set(asHexedSolidityAddress(HapiPropertySource.asToken(idLit)))), - tokenCreate(lp) - .treasury(dev) - .initialSupply(1_000_000_000) - .exposingCreatedIdTo(idLit -> - lpTokenAddr.set(asHexedSolidityAddress(HapiPropertySource.asToken(idLit)))), - tokenAssociate(dev, sauce), - sourcing( - () -> contractCallWithFunctionAbi(farm, setSauceAbi, asHeadlongAddress(sauceAddr.get())) - .gas(gasToOffer) - .refusingEthConversion()), - sourcing( - () -> contractCallWithFunctionAbi(farm, transferAbi, asHeadlongAddress(ownerAddr.get())) - .gas(gasToOffer) - .refusingEthConversion())) - .when( - sourcing(() -> contractCallWithFunctionAbi( - farm, - addPoolAbi, - BigInteger.valueOf(2392L), - asHeadlongAddress(lpTokenAddr.get())) - .via("add") - .payingWith(OWNER) - .gas(gasToOffer) - .refusingEthConversion()), - newKeyNamed("contractControl").shape(KeyShape.CONTRACT.signedWith(farm)), - tokenUpdate(sauce).supplyKey("contractControl"), - sourcing(() -> contractCallWithFunctionAbi( - farm, depositAbi, BigInteger.ZERO, BigInteger.valueOf(100_000)) - .sending(ONE_HUNDRED_HBARS) - .payingWith(dev) - .gas(gasToOffer) - .refusingEthConversion()), - sleepFor(1000), - sourcing(() -> contractCallWithFunctionAbi( - farm, depositAbi, BigInteger.ZERO, BigInteger.valueOf(100_000)) - .sending(ONE_HUNDRED_HBARS) - .payingWith(dev) - .gas(gasToOffer) - .via("second") - .refusingEthConversion()), - getTxnRecord("second").andAllChildRecords().logged()) - .then(sourcing(() -> contractCallWithFunctionAbi( - farm, withdrawAbi, BigInteger.ZERO, BigInteger.valueOf(200_000)) - .payingWith(dev) - .gas(gasToOffer) - .refusingEthConversion())); - } - private HapiSpec consTimeManagementWorksWithRevertedInternalCreations() { final var contract = "ConsTimeRepro"; final var failingCall = "FailingCall"; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCallV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCallV1SecurityModelSuite.java new file mode 100644 index 000000000000..a2217c59f1a9 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCallV1SecurityModelSuite.java @@ -0,0 +1,435 @@ +/* + * Copyright (C) 2020-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.hapi; + +import static com.hedera.services.bdd.spec.HapiPropertySource.asHexedSolidityAddress; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenNftInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.burnToken; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCallWithFunctionAbi; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.fileCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenUpdate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overridingTwo; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sleepFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.updateLargeFile; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.extractByteCode; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; + +import com.google.protobuf.ByteString; +import com.hedera.services.bdd.spec.HapiPropertySource; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.keys.KeyShape; +import com.hedera.services.bdd.spec.transactions.contract.HapiContractCreate; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.TokenSupplyType; +import com.hederahashgraph.api.proto.java.TokenType; +import java.math.BigInteger; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class ContractCallV1SecurityModelSuite extends HapiSuite { + + private static final Logger LOG = LogManager.getLogger(ContractCallV1SecurityModelSuite.class); + + public static final String PAY_RECEIVABLE_CONTRACT = "PayReceivable"; + public static final String TRANSFERRING_CONTRACT = "Transferring"; + private static final String OWNER = "owner"; + private static final String CONTRACT_CALLER = "contractCaller"; + private static final String RECEIVABLE_SIG_REQ_ACCOUNT = "receivableSigReqAccount"; + private static final String RECEIVABLE_SIG_REQ_ACCOUNT_INFO = "receivableSigReqAccountInfo"; + private static final String TRANSFER_TO_ADDRESS = "transferToAddress"; + public static final String STATE_MUTABILITY_NONPAYABLE_TYPE_FUNCTION = + " \"stateMutability\": \"nonpayable\", \"type\": \"function\" }"; + + public static void main(String... args) { + new ContractCallV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + public List getSpecsInSuite() { + return List.of( + contractTransferToSigReqAccountWithKeySucceeds(), + canMintAndTransferInSameContractOperation(), + workingHoursDemo(), + lpFarmSimulation()); + } + + private HapiSpec workingHoursDemo() { + final var gasToOffer = 4_000_000; + final var contract = "WorkingHours"; + final var ticketToken = "ticketToken"; + final var adminKey = "admin"; + final var treasury = "treasury"; + final var newSupplyKey = "newSupplyKey"; + + final var ticketTaking = "ticketTaking"; + final var ticketWorking = "ticketWorking"; + final var mint = "minting"; + final var burn = "burning"; + final var preMints = List.of(ByteString.copyFromUtf8("HELLO"), ByteString.copyFromUtf8("GOODBYE")); + + final AtomicLong ticketSerialNo = new AtomicLong(); + + return propertyPreservingHapiSpec("WorkingHoursDemo") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenBurn,TokenCreate,TokenMint,TokenUpdate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(adminKey), + cryptoCreate(treasury), + // we need a new user, expiry to 1 Jan 2100 costs 11M gas for token + // associate + tokenCreate(ticketToken) + .treasury(treasury) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .initialSupply(0L) + .supplyType(TokenSupplyType.INFINITE) + .adminKey(adminKey) + .supplyKey(adminKey), + mintToken(ticketToken, preMints).via(mint), + burnToken(ticketToken, List.of(1L)).via(burn), + uploadInitCode(contract)) + .when( + withOpContext((spec, opLog) -> { + final var registry = spec.registry(); + final var tokenId = registry.getTokenID(ticketToken); + final var treasuryId = registry.getAccountID(treasury); + final var creation = contractCreate( + contract, + asHeadlongAddress(asAddress(tokenId)), + asHeadlongAddress(asAddress(treasuryId))) + .gas(gasToOffer); + allRunFor(spec, creation); + }), + newKeyNamed(newSupplyKey).shape(KeyShape.CONTRACT.signedWith(contract)), + tokenUpdate(ticketToken).supplyKey(newSupplyKey)) + .then( + /* Take a ticket */ + contractCall(contract, "takeTicket") + .alsoSigningWithFullPrefix(DEFAULT_CONTRACT_SENDER, treasury) + .gas(4_000_000) + .via(ticketTaking) + .exposingResultTo(result -> { + LOG.info("Explicit mint result is {}", result); + ticketSerialNo.set(((Long) result[0])); + }), + getTxnRecord(ticketTaking), + getAccountBalance(DEFAULT_CONTRACT_SENDER).logged().hasTokenBalance(ticketToken, 1L), + /* Our ticket number is 3 (b/c of the two pre-mints), so we must call + * work twice before the contract will actually accept our ticket. */ + sourcing(() -> contractCall(contract, "workTicket", ticketSerialNo.get()) + .gas(2_000_000) + .alsoSigningWithFullPrefix(DEFAULT_CONTRACT_SENDER)), + getAccountBalance(DEFAULT_CONTRACT_SENDER).hasTokenBalance(ticketToken, 1L), + sourcing(() -> contractCall(contract, "workTicket", ticketSerialNo.get()) + .gas(2_000_000) + .alsoSigningWithFullPrefix(DEFAULT_CONTRACT_SENDER) + .via(ticketWorking)), + getAccountBalance(DEFAULT_CONTRACT_SENDER).hasTokenBalance(ticketToken, 0L), + getTokenInfo(ticketToken).hasTotalSupply(1L), + /* Review the history */ + getTxnRecord(ticketTaking).andAllChildRecords().logged(), + getTxnRecord(ticketWorking).andAllChildRecords().logged()); + } + + private HapiSpec canMintAndTransferInSameContractOperation() { + final AtomicReference tokenMirrorAddr = new AtomicReference<>(); + final AtomicReference aCivilianMirrorAddr = new AtomicReference<>(); + final var nfToken = "nfToken"; + final var multiKey = "multiKey"; + final var aCivilian = "aCivilian"; + final var treasuryContract = "SomeERC721Scenarios"; + final var mintAndTransferTxn = "mintAndTransferTxn"; + final var mintAndTransferAndBurnTxn = "mintAndTransferAndBurnTxn"; + + return propertyPreservingHapiSpec("CanMintAndTransferInSameContractOperation") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenMint", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(multiKey), + cryptoCreate(aCivilian) + .exposingCreatedIdTo(id -> aCivilianMirrorAddr.set(asHexedSolidityAddress(id))), + uploadInitCode(treasuryContract), + contractCreate(treasuryContract).adminKey(multiKey), + tokenCreate(nfToken) + .supplyKey(multiKey) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(treasuryContract) + .initialSupply(0) + .exposingCreatedIdTo(idLit -> + tokenMirrorAddr.set(asHexedSolidityAddress(HapiPropertySource.asToken(idLit)))), + mintToken( + nfToken, + List.of( + // 1 + ByteString.copyFromUtf8("A penny for"), + // 2 + ByteString.copyFromUtf8("the Old Guy"))), + tokenAssociate(aCivilian, nfToken), + cryptoTransfer(movingUnique(nfToken, 2L).between(treasuryContract, aCivilian))) + .when(sourcing(() -> contractCall( + treasuryContract, + "nonSequiturMintAndTransfer", + asHeadlongAddress(tokenMirrorAddr.get()), + asHeadlongAddress(aCivilianMirrorAddr.get())) + .via(mintAndTransferTxn) + .gas(4_000_000) + .alsoSigningWithFullPrefix(multiKey))) + .then( + getTokenInfo(nfToken).hasTotalSupply(4L), + getTokenNftInfo(nfToken, 3L) + .hasSerialNum(3L) + .hasAccountID(aCivilian) + .hasMetadata(ByteString.copyFrom(new byte[] {(byte) 0xee})), + getTokenNftInfo(nfToken, 4L) + .hasSerialNum(4L) + .hasAccountID(aCivilian) + .hasMetadata(ByteString.copyFrom(new byte[] {(byte) 0xff})), + sourcing(() -> contractCall( + treasuryContract, + "nonSequiturMintAndTransferAndBurn", + asHeadlongAddress(tokenMirrorAddr.get()), + asHeadlongAddress(aCivilianMirrorAddr.get())) + .via(mintAndTransferAndBurnTxn) + .gas(4_000_000) + .alsoSigningWithFullPrefix(multiKey, aCivilian))); + } + + private HapiSpec contractTransferToSigReqAccountWithKeySucceeds() { + return propertyPreservingHapiSpec("ContractTransferToSigReqAccountWithKeySucceeds") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(CONTRACT_CALLER).balance(1_000_000_000_000L), + cryptoCreate(RECEIVABLE_SIG_REQ_ACCOUNT) + .balance(1_000_000_000_000L) + .receiverSigRequired(true), + getAccountInfo(CONTRACT_CALLER).savingSnapshot("contractCallerInfo"), + getAccountInfo(RECEIVABLE_SIG_REQ_ACCOUNT).savingSnapshot(RECEIVABLE_SIG_REQ_ACCOUNT_INFO), + uploadInitCode(TRANSFERRING_CONTRACT)) + .when(contractCreate(TRANSFERRING_CONTRACT).gas(300_000L).balance(5000L)) + .then(withOpContext((spec, opLog) -> { + final var accountAddress = spec.registry() + .getAccountInfo(RECEIVABLE_SIG_REQ_ACCOUNT_INFO) + .getContractAccountID(); + final var receivableAccountKey = spec.registry() + .getAccountInfo(RECEIVABLE_SIG_REQ_ACCOUNT_INFO) + .getKey(); + final var contractCallerKey = + spec.registry().getAccountInfo("contractCallerInfo").getKey(); + spec.registry().saveKey("receivableKey", receivableAccountKey); + spec.registry().saveKey("contractCallerKey", contractCallerKey); + /* if any of the keys are missing, INVALID_SIGNATURE is returned */ + final var call = contractCall( + TRANSFERRING_CONTRACT, + TRANSFER_TO_ADDRESS, + asHeadlongAddress(accountAddress), + BigInteger.ONE) + .payingWith(CONTRACT_CALLER) + .gas(300_000) + .alsoSigningWithFullPrefix("receivableKey"); + /* calling with the receivableSigReqAccount should pass without adding keys */ + final var callWithReceivable = contractCall( + TRANSFERRING_CONTRACT, + TRANSFER_TO_ADDRESS, + asHeadlongAddress(accountAddress), + BigInteger.ONE) + .payingWith(RECEIVABLE_SIG_REQ_ACCOUNT) + .gas(300_000) + .hasKnownStatus(SUCCESS); + allRunFor(spec, call, callWithReceivable); + })); + } + + private HapiSpec lpFarmSimulation() { + final var adminKey = "adminKey"; + final var gasToOffer = 4_000_000; + final var farmInitcodeLoc = "src/main/resource/contract/bytecodes/farmInitcode.bin"; + final var consAbi = "{ \"inputs\": [ { \"internalType\": \"address\", \"name\": \"_devaddr\", \"type\":" + + " \"address\" }, { \"internalType\": \"address\", \"name\": \"_rentPayer\"," + + " \"type\": \"address\" }, { \"internalType\": \"uint256\", \"name\":" + + " \"_saucePerSecond\", \"type\": \"uint256\" }, { \"internalType\":" + + " \"uint256\", \"name\": \"_hbarPerSecond\", \"type\": \"uint256\" }, {" + + " \"internalType\": \"uint256\", \"name\": \"_maxSauceSupply\", \"type\":" + + " \"uint256\" }, { \"internalType\": \"uint256\", \"name\":" + + " \"_depositFeeTinyCents\", \"type\": \"uint256\" } ], \"stateMutability\":" + + " \"nonpayable\", \"type\": \"constructor\" }"; + final var addPoolAbi = "{ \"inputs\": [ { \"internalType\": \"uint256\", \"name\": \"_allocPoint\"," + + " \"type\": \"uint256\" }, { \"internalType\": \"address\", \"name\":" + + " \"_lpToken\", \"type\": \"address\" } ], \"name\": \"add\"," + + " \"outputs\": [], \"stateMutability\": \"nonpayable\", \"type\":" + + " \"function\" }"; + final var depositAbi = "{ \"inputs\": [ { \"internalType\": \"uint256\", \"name\": \"_pid\", \"type\":" + + " \"uint256\" }, { \"internalType\": \"uint256\", \"name\": \"_amount\"," + + " \"type\": \"uint256\" } ], \"name\": \"deposit\", \"outputs\": []," + + " \"stateMutability\": \"payable\", \"type\": \"function\" }"; + final var withdrawAbi = "{ \"inputs\": [ { \"internalType\": \"uint256\", \"name\": \"_pid\", \"type\":" + + " \"uint256\" }, { \"internalType\": \"uint256\", \"name\": \"_amount\"," + + " \"type\": \"uint256\" } ], \"name\": \"withdraw\", \"outputs\": []," + + STATE_MUTABILITY_NONPAYABLE_TYPE_FUNCTION; + final var setSauceAbi = "{ \"inputs\": [ { \"internalType\": \"address\", \"name\": \"_sauce\", \"type\":" + + " \"address\" } ], \"name\": \"setSauceAddress\", \"outputs\": []," + + STATE_MUTABILITY_NONPAYABLE_TYPE_FUNCTION; + final var transferAbi = "{ \"inputs\": [ { \"internalType\": \"address\", \"name\": \"newOwner\", \"type\":" + + " \"address\" } ], \"name\": \"transferOwnership\", \"outputs\": []," + + STATE_MUTABILITY_NONPAYABLE_TYPE_FUNCTION; + final var initcode = "farmInitcode"; + final var farm = "farm"; + final var dev = "dev"; + final var lp = "lp"; + final var sauce = "sauce"; + final var rentPayer = "rentPayer"; + final AtomicReference devAddr = new AtomicReference<>(); + final AtomicReference ownerAddr = new AtomicReference<>(); + final AtomicReference sauceAddr = new AtomicReference<>(); + final AtomicReference lpTokenAddr = new AtomicReference<>(); + final AtomicReference rentPayerAddr = new AtomicReference<>(); + + return propertyPreservingHapiSpec("lpFarmSimulation") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenMint,TokenUpdate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(adminKey), + fileCreate(initcode), + cryptoCreate(OWNER) + .balance(ONE_MILLION_HBARS) + .exposingCreatedIdTo(id -> ownerAddr.set(asHexedSolidityAddress(id))), + cryptoCreate(dev) + .balance(ONE_MILLION_HBARS) + .exposingCreatedIdTo(id -> devAddr.set(asHexedSolidityAddress(id))), + cryptoCreate(rentPayer) + .balance(ONE_MILLION_HBARS) + .exposingCreatedIdTo(id -> rentPayerAddr.set(asHexedSolidityAddress(id))), + updateLargeFile(GENESIS, initcode, extractByteCode(farmInitcodeLoc)), + sourcing(() -> new HapiContractCreate( + farm, + consAbi, + asHeadlongAddress(devAddr.get()), + asHeadlongAddress(rentPayerAddr.get()), + BigInteger.valueOf(4804540L), + BigInteger.valueOf(10000L), + BigInteger.valueOf(1000000000000000L), + BigInteger.valueOf(2500000000L)) + .bytecode(initcode)), + tokenCreate(sauce) + .supplyType(TokenSupplyType.FINITE) + .initialSupply(300_000_000) + .maxSupply(1_000_000_000) + .treasury(farm) + .adminKey(adminKey) + .supplyKey(adminKey) + .exposingCreatedIdTo(idLit -> + sauceAddr.set(asHexedSolidityAddress(HapiPropertySource.asToken(idLit)))), + tokenCreate(lp) + .treasury(dev) + .initialSupply(1_000_000_000) + .exposingCreatedIdTo(idLit -> + lpTokenAddr.set(asHexedSolidityAddress(HapiPropertySource.asToken(idLit)))), + tokenAssociate(dev, sauce), + sourcing( + () -> contractCallWithFunctionAbi(farm, setSauceAbi, asHeadlongAddress(sauceAddr.get())) + .gas(gasToOffer) + .refusingEthConversion()), + sourcing( + () -> contractCallWithFunctionAbi(farm, transferAbi, asHeadlongAddress(ownerAddr.get())) + .gas(gasToOffer) + .refusingEthConversion())) + .when( + sourcing(() -> contractCallWithFunctionAbi( + farm, + addPoolAbi, + BigInteger.valueOf(2392L), + asHeadlongAddress(lpTokenAddr.get())) + .via("add") + .payingWith(OWNER) + .gas(gasToOffer) + .refusingEthConversion()), + newKeyNamed("contractControl").shape(KeyShape.CONTRACT.signedWith(farm)), + tokenUpdate(sauce).supplyKey("contractControl"), + sourcing(() -> contractCallWithFunctionAbi( + farm, depositAbi, BigInteger.ZERO, BigInteger.valueOf(100_000)) + .sending(ONE_HUNDRED_HBARS) + .payingWith(dev) + .gas(gasToOffer) + .refusingEthConversion()), + sleepFor(1000), + sourcing(() -> contractCallWithFunctionAbi( + farm, depositAbi, BigInteger.ZERO, BigInteger.valueOf(100_000)) + .sending(ONE_HUNDRED_HBARS) + .payingWith(dev) + .gas(gasToOffer) + .via("second") + .refusingEthConversion()), + getTxnRecord("second").andAllChildRecords().logged()) + .then(sourcing(() -> contractCallWithFunctionAbi( + farm, withdrawAbi, BigInteger.ZERO, BigInteger.valueOf(200_000)) + .payingWith(dev) + .gas(gasToOffer) + .refusingEthConversion())); + } + + @Override + protected Logger getResultsLogger() { + return LOG; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCreateSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCreateSuite.java index 80cbde02577f..79d3d758b82f 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCreateSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCreateSuite.java @@ -35,7 +35,6 @@ import static com.hedera.services.bdd.spec.keys.SigControl.ON; import static com.hedera.services.bdd.spec.queries.QueryVerbs.contractCallLocal; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import static com.hedera.services.bdd.spec.transactions.TxnUtils.bytecodePath; @@ -118,7 +117,6 @@ public List getSpecsInSuite() { childCreationsHaveExpectedKeysWithOmittedAdminKey(), cannotCreateTooLargeContract(), revertedTryExtCallHasNoSideEffects(), - receiverSigReqTransferRecipientMustSignWithFullPubKeyPrefix(), cannotSendToNonExistentAccount(), delegateContractIdRequiredForTransferInDelegateCall(), vanillaSuccess(), @@ -416,57 +414,6 @@ private HapiSpec delegateContractIdRequiredForTransferInDelegateCall() { getAccountBalance(beneficiary).hasTinyBars(3 * (totalToSend / 2))); } - private HapiSpec receiverSigReqTransferRecipientMustSignWithFullPubKeyPrefix() { - final var sendInternalAndDelegateContract = "SendInternalAndDelegate"; - final var justSendContract = "JustSend"; - final var beneficiary = "civilian"; - final var balanceToDistribute = 1_000L; - - final AtomicLong justSendContractNum = new AtomicLong(); - final AtomicLong beneficiaryAccountNum = new AtomicLong(); - - return defaultHapiSpec("ReceiverSigReqTransferRecipientMustSignWithFullPubKeyPrefix") - .given( - cryptoCreate(beneficiary) - .balance(0L) - .receiverSigRequired(true) - .exposingCreatedIdTo(id -> beneficiaryAccountNum.set(id.getAccountNum())), - uploadInitCode(sendInternalAndDelegateContract, justSendContract)) - .when( - contractCreate(justSendContract).gas(300_000L).exposingNumTo(justSendContractNum::set), - contractCreate(sendInternalAndDelegateContract) - .gas(300_000L) - .balance(balanceToDistribute)) - .then( - /* Sending requires receiver signature */ - sourcing(() -> contractCall( - sendInternalAndDelegateContract, - "sendRepeatedlyTo", - BigInteger.valueOf(justSendContractNum.get()), - BigInteger.valueOf(beneficiaryAccountNum.get()), - BigInteger.valueOf(balanceToDistribute / 2)) - .hasKnownStatus(INVALID_SIGNATURE)), - /* But it's not enough to just sign using an incomplete prefix */ - sourcing(() -> contractCall( - sendInternalAndDelegateContract, - "sendRepeatedlyTo", - BigInteger.valueOf(justSendContractNum.get()), - BigInteger.valueOf(beneficiaryAccountNum.get()), - BigInteger.valueOf(balanceToDistribute / 2)) - .signedBy(DEFAULT_PAYER, beneficiary) - .hasKnownStatus(INVALID_SIGNATURE)), - /* We have to specify the full prefix so the sig can be verified async */ - getAccountInfo(beneficiary).logged(), - sourcing(() -> contractCall( - sendInternalAndDelegateContract, - "sendRepeatedlyTo", - BigInteger.valueOf(justSendContractNum.get()), - BigInteger.valueOf(beneficiaryAccountNum.get()), - BigInteger.valueOf(balanceToDistribute / 2)) - .alsoSigningWithFullPrefix(beneficiary)), - getAccountBalance(beneficiary).logged()); - } - private HapiSpec cannotCreateTooLargeContract() { ByteString contents; try { @@ -559,11 +506,8 @@ HapiSpec vanillaSuccess() { return defaultHapiSpec("VanillaSuccess") .given( uploadInitCode(contract), - contractCreate(contract).adminKey(THRESHOLD).maxAutomaticTokenAssociations(10), - getContractInfo(contract) - .has(contractWith().maxAutoAssociations(10)) - .logged() - .saveToRegistry(PARENT_INFO)) + contractCreate(contract).adminKey(THRESHOLD), + getContractInfo(contract).saveToRegistry(PARENT_INFO)) .when( contractCall(contract, "create").gas(1_000_000L).via("createChildTxn"), contractCall(contract, "getIndirect").gas(1_000_000L).via("getChildResultTxn"), diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCreateV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCreateV1SecurityModelSuite.java new file mode 100644 index 000000000000..ebdc5fc55428 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/hapi/ContractCreateV1SecurityModelSuite.java @@ -0,0 +1,117 @@ +/* + * Copyright (C) 2020-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.hapi; + +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overriding; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE; + +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.suites.HapiSuite; +import java.math.BigInteger; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse +public class ContractCreateV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(ContractCreateV1SecurityModelSuite.class); + + public static final String EMPTY_CONSTRUCTOR_CONTRACT = "EmptyConstructor"; + + public static void main(String... args) { + new ContractCreateV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public List getSpecsInSuite() { + return List.of(receiverSigReqTransferRecipientMustSignWithFullPubKeyPrefix()); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + private HapiSpec receiverSigReqTransferRecipientMustSignWithFullPubKeyPrefix() { + final var sendInternalAndDelegateContract = "SendInternalAndDelegate"; + final var justSendContract = "JustSend"; + final var beneficiary = "civilian"; + final var balanceToDistribute = 1_000L; + + final AtomicLong justSendContractNum = new AtomicLong(); + final AtomicLong beneficiaryAccountNum = new AtomicLong(); + + return propertyPreservingHapiSpec("ReceiverSigReqTransferRecipientMustSignWithFullPubKeyPrefix") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(beneficiary) + .balance(0L) + .receiverSigRequired(true) + .exposingCreatedIdTo(id -> beneficiaryAccountNum.set(id.getAccountNum())), + uploadInitCode(sendInternalAndDelegateContract, justSendContract)) + .when( + contractCreate(justSendContract).gas(300_000L).exposingNumTo(justSendContractNum::set), + contractCreate(sendInternalAndDelegateContract) + .gas(300_000L) + .balance(balanceToDistribute)) + .then( + /* Sending requires receiver signature */ + sourcing(() -> contractCall( + sendInternalAndDelegateContract, + "sendRepeatedlyTo", + BigInteger.valueOf(justSendContractNum.get()), + BigInteger.valueOf(beneficiaryAccountNum.get()), + BigInteger.valueOf(balanceToDistribute / 2)) + .hasKnownStatus(INVALID_SIGNATURE)), + /* But it's not enough to just sign using an incomplete prefix */ + sourcing(() -> contractCall( + sendInternalAndDelegateContract, + "sendRepeatedlyTo", + BigInteger.valueOf(justSendContractNum.get()), + BigInteger.valueOf(beneficiaryAccountNum.get()), + BigInteger.valueOf(balanceToDistribute / 2)) + .signedBy(DEFAULT_PAYER, beneficiary) + .hasKnownStatus(INVALID_SIGNATURE)), + /* We have to specify the full prefix so the sig can be verified async */ + getAccountInfo(beneficiary).logged(), + sourcing(() -> contractCall( + sendInternalAndDelegateContract, + "sendRepeatedlyTo", + BigInteger.valueOf(justSendContractNum.get()), + BigInteger.valueOf(beneficiaryAccountNum.get()), + BigInteger.valueOf(balanceToDistribute / 2)) + .alsoSigningWithFullPrefix(beneficiary)), + getAccountBalance(beneficiary).logged()); + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/opcodes/Create2OperationSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/opcodes/Create2OperationSuite.java index 7306c70b6d52..fa81d881acac 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/opcodes/Create2OperationSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/opcodes/Create2OperationSuite.java @@ -30,7 +30,6 @@ import static com.hedera.services.bdd.spec.assertions.ContractInfoAsserts.contractWith; import static com.hedera.services.bdd.spec.assertions.ContractLogAsserts.logWith; import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; -import static com.hedera.services.bdd.spec.keys.TrieSigMapGenerator.uniqueWithFullPrefixesFor; import static com.hedera.services.bdd.spec.queries.QueryVerbs.contractCallLocal; import static com.hedera.services.bdd.spec.queries.QueryVerbs.contractCallLocalWithFunctionAbi; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; @@ -181,7 +180,6 @@ public List getSpecsInSuite() { allLogOpcodesResolveExpectedContractId(), eip1014AliasIsPriorityInErcOwnerPrecompile(), canAssociateInConstructor(), - childInheritanceOfAdminKeyAuthorizesParentAssociationInConstructor(), /* --- HIP 583 --- */ canMergeCreate2ChildWithHollowAccount(), canMergeCreate2MultipleCreatesWithHollowAccount()); @@ -780,36 +778,6 @@ private HapiSpec eip1014AliasIsPriorityInErcOwnerPrecompile() { .withOwner(unhex(userAliasAddr.get()))))))); } - private HapiSpec childInheritanceOfAdminKeyAuthorizesParentAssociationInConstructor() { - final var ft = "fungibleToken"; - final var multiKey = SWISS; - final var creationAndAssociation = "creationAndAssociation"; - final var immediateChildAssoc = "ImmediateChildAssociation"; - - final AtomicReference tokenMirrorAddr = new AtomicReference<>(); - final AtomicReference childMirrorAddr = new AtomicReference<>(); - - return propertyPreservingHapiSpec("childInheritanceOfAdminKeyAuthorizesParentAssociationInConstructor") - .preserving("contracts.maxNumWithHapiSigsAccess") - .given( - overriding("contracts.maxNumWithHapiSigsAccess", "10_000_000"), - newKeyNamed(multiKey), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(ft) - .exposingCreatedIdTo(id -> - tokenMirrorAddr.set(hex(asSolidityAddress(HapiPropertySource.asToken(id)))))) - .when(uploadInitCode(immediateChildAssoc), sourcing(() -> contractCreate( - immediateChildAssoc, asHeadlongAddress(tokenMirrorAddr.get())) - .gas(2_000_000) - .adminKey(multiKey) - .payingWith(GENESIS) - .sigMapPrefixes(uniqueWithFullPrefixesFor(GENESIS, multiKey)) - .signedBy(GENESIS, multiKey) - .exposingNumTo(n -> childMirrorAddr.set("0.0." + (n + 1))) - .via(creationAndAssociation))) - .then(sourcing(() -> getContractInfo(childMirrorAddr.get()).logged())); - } - @SuppressWarnings("java:S5669") private HapiSpec canUseAliasesInPrecompilesAndContractKeys() { final var creation2 = CREATE_2_TXN; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/opcodes/Create2OperationV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/opcodes/Create2OperationV1SecurityModelSuite.java new file mode 100644 index 000000000000..f9eb230bed58 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/opcodes/Create2OperationV1SecurityModelSuite.java @@ -0,0 +1,100 @@ +/* + * Copyright (C) 2022-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.opcodes; + +import static com.hedera.services.bdd.spec.HapiPropertySource.asSolidityAddress; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.keys.TrieSigMapGenerator.uniqueWithFullPrefixesFor; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overridingTwo; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.swirlds.common.utility.CommonUtils.hex; + +import com.hedera.services.bdd.spec.HapiPropertySource; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.suites.HapiSuite; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class Create2OperationV1SecurityModelSuite extends HapiSuite { + + private static final Logger LOG = LogManager.getLogger(Create2OperationV1SecurityModelSuite.class); + private static final String SWISS = "swiss"; + private static final String CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS = "contracts.allowSystemUseOfHapiSigs"; + + public static void main(String... args) { + new Create2OperationV1SecurityModelSuite().runSuiteSync(); + } + + @Override + protected Logger getResultsLogger() { + return LOG; + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + public List getSpecsInSuite() { + return List.of(childInheritanceOfAdminKeyAuthorizesParentAssociationInConstructor()); + } + + private HapiSpec childInheritanceOfAdminKeyAuthorizesParentAssociationInConstructor() { + final var ft = "fungibleToken"; + final var multiKey = SWISS; + final var creationAndAssociation = "creationAndAssociation"; + final var immediateChildAssoc = "ImmediateChildAssociation"; + + final AtomicReference tokenMirrorAddr = new AtomicReference<>(); + final AtomicReference childMirrorAddr = new AtomicReference<>(); + + return propertyPreservingHapiSpec("childInheritanceOfAdminKeyAuthorizesParentAssociationInConstructor") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "TokenAssociateToAccount", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + "10_000_000"), + newKeyNamed(multiKey), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(ft) + .exposingCreatedIdTo(id -> + tokenMirrorAddr.set(hex(asSolidityAddress(HapiPropertySource.asToken(id)))))) + .when(uploadInitCode(immediateChildAssoc), sourcing(() -> contractCreate( + immediateChildAssoc, asHeadlongAddress(tokenMirrorAddr.get())) + .gas(2_000_000) + .adminKey(multiKey) + .payingWith(GENESIS) + .sigMapPrefixes(uniqueWithFullPrefixesFor(GENESIS, multiKey)) + .signedBy(GENESIS, multiKey) + .exposingNumTo(n -> childMirrorAddr.set("0.0." + (n + 1))) + .via(creationAndAssociation))) + .then(sourcing(() -> getContractInfo(childMirrorAddr.get()).logged())); + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/opcodes/SelfDestructSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/opcodes/SelfDestructSuite.java index 65be78030227..afc83d71ac3d 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/opcodes/SelfDestructSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/opcodes/SelfDestructSuite.java @@ -28,7 +28,7 @@ import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; -import static com.hedera.services.bdd.suites.contract.precompile.LazyCreateThroughPrecompileSuite.mirrorAddrWith; +import static com.hedera.services.bdd.suites.contract.Utils.mirrorAddrWith; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_DELETED; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/AssociatePrecompileSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/AssociatePrecompileSuite.java index c7f78fae4f57..879692b34316 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/AssociatePrecompileSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/AssociatePrecompileSuite.java @@ -16,7 +16,6 @@ package com.hedera.services.bdd.suites.contract.precompile; -import static com.hedera.services.bdd.spec.HapiPropertySource.asDotDelimitedLongArray; import static com.hedera.services.bdd.spec.HapiPropertySource.idAsHeadlongAddress; import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; @@ -34,7 +33,6 @@ import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; -import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.emptyChildRecordsCheck; @@ -42,16 +40,12 @@ import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.asToken; import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TOKEN_ID; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; -import static com.hederahashgraph.api.proto.java.TokenFreezeStatus.FreezeNotApplicable; -import static com.hederahashgraph.api.proto.java.TokenFreezeStatus.Frozen; -import static com.hederahashgraph.api.proto.java.TokenFreezeStatus.Unfrozen; -import static com.hederahashgraph.api.proto.java.TokenKycStatus.KycNotApplicable; -import static com.hederahashgraph.api.proto.java.TokenKycStatus.Revoked; import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -61,34 +55,24 @@ import com.hedera.services.bdd.spec.keys.KeyShape; import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; import com.hedera.services.bdd.suites.HapiSuite; -import com.hedera.services.bdd.suites.token.TokenAssociationSpecs; import com.hederahashgraph.api.proto.java.AccountID; -import com.hederahashgraph.api.proto.java.ResponseCodeEnum; import com.hederahashgraph.api.proto.java.TokenID; import java.util.List; import java.util.concurrent.atomic.AtomicReference; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.jetbrains.annotations.NotNull; public class AssociatePrecompileSuite extends HapiSuite { private static final Logger log = LogManager.getLogger(AssociatePrecompileSuite.class); private static final long GAS_TO_OFFER = 4_000_000L; - private static final long TOTAL_SUPPLY = 1_000; private static final KeyShape DELEGATE_CONTRACT_KEY_SHAPE = KeyShape.threshOf(1, SIMPLE, DELEGATE_CONTRACT); private static final String TOKEN_TREASURY = "treasury"; - private static final String OUTER_CONTRACT = "NestedAssociateDissociate"; private static final String INNER_CONTRACT = "AssociateDissociate"; public static final String THE_CONTRACT = "AssociateDissociate"; private static final String THE_GRACEFULLY_FAILING_CONTRACT = "GracefullyFailing"; private static final String ACCOUNT = "anybody"; - private static final String FROZEN_TOKEN = "Frozen token"; - private static final String UNFROZEN_TOKEN = "Unfrozen token"; - private static final String KYC_TOKEN = "KYC token"; private static final String DELEGATE_KEY = "Delegate key"; - private static final String FREEZE_KEY = "Freeze key"; - private static final String KYC_KEY = "KYC key"; private static final byte[] ACCOUNT_ADDRESS = asAddress(AccountID.newBuilder().build()); private static final byte[] TOKEN_ADDRESS = asAddress(TokenID.newBuilder().build()); @@ -121,10 +105,7 @@ List negativeSpecs() { } List positiveSpecs() { - return List.of( - nestedAssociateWorksAsExpected(), - multipleAssociatePrecompileWithSignatureWorksForFungible(), - associateWithMissingEvmAddressHasSaneTxnAndRecord()); + return List.of(associateWithMissingEvmAddressHasSaneTxnAndRecord()); } /* -- HSCS-PREC-27 from HTS Precompile Test Plan -- */ @@ -279,128 +260,6 @@ private HapiSpec invalidlyFormattedAbiCallGracefullyFailsWithMultipleContractCal getAccountInfo(ACCOUNT).hasToken(relationshipWith(VANILLA_TOKEN))); } - /* -- HSCS-PREC-006 from HTS Precompile Test Plan -- */ - private HapiSpec multipleAssociatePrecompileWithSignatureWorksForFungible() { - final AtomicReference accountID = new AtomicReference<>(); - final AtomicReference frozenTokenID = new AtomicReference<>(); - final AtomicReference unfrozenTokenID = new AtomicReference<>(); - final AtomicReference kycTokenID = new AtomicReference<>(); - final AtomicReference vanillaTokenID = new AtomicReference<>(); - - return defaultHapiSpec("multipleAssociatePrecompileWithSignatureWorksForFungible") - .given( - newKeyNamed(FREEZE_KEY), - newKeyNamed(KYC_KEY), - cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).exposingCreatedIdTo(accountID::set), - cryptoCreate(TOKEN_TREASURY).balance(0L), - tokenCreate(FROZEN_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .initialSupply(TOTAL_SUPPLY) - .freezeKey(FREEZE_KEY) - .freezeDefault(true) - .exposingCreatedIdTo(id -> frozenTokenID.set(asToken(id))), - tokenCreate(UNFROZEN_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .freezeKey(FREEZE_KEY) - .freezeDefault(false) - .exposingCreatedIdTo(id -> unfrozenTokenID.set(asToken(id))), - tokenCreate(KYC_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .kycKey(KYC_KEY) - .exposingCreatedIdTo(id -> kycTokenID.set(asToken(id))), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - uploadInitCode(THE_CONTRACT), - contractCreate(THE_CONTRACT)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - THE_CONTRACT, - "tokensAssociate", - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - new Address[] { - HapiParserUtil.asHeadlongAddress(asAddress(frozenTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(unfrozenTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(kycTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())) - }) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("MultipleTokensAssociationsTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(ResponseCodeEnum.SUCCESS)))) - .then( - childRecordsCheck( - "MultipleTokensAssociationsTxn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)))), - getAccountInfo(ACCOUNT) - .hasToken(relationshipWith(FROZEN_TOKEN) - .kyc(KycNotApplicable) - .freeze(Frozen)) - .hasToken(relationshipWith(UNFROZEN_TOKEN) - .kyc(KycNotApplicable) - .freeze(Unfrozen)) - .hasToken( - relationshipWith(KYC_TOKEN).kyc(Revoked).freeze(FreezeNotApplicable)) - .hasToken(relationshipWith(TokenAssociationSpecs.VANILLA_TOKEN) - .kyc(KycNotApplicable) - .freeze(FreezeNotApplicable))); - } - - /* -- HSCS-PREC-010 from HTS Precompile Test Plan -- */ - private HapiSpec nestedAssociateWorksAsExpected() { - final AtomicReference accountID = new AtomicReference<>(); - final AtomicReference vanillaTokenID = new AtomicReference<>(); - - return defaultHapiSpec("nestedAssociateWorksAsExpected") - .given( - cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).exposingCreatedIdTo(accountID::set), - cryptoCreate(TOKEN_TREASURY).balance(0L), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - uploadInitCode(INNER_CONTRACT, OUTER_CONTRACT), - contractCreate(INNER_CONTRACT)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - OUTER_CONTRACT, asHeadlongAddress(getNestedContractAddress(INNER_CONTRACT, spec))), - contractCall( - OUTER_CONTRACT, - "associateDissociateContractCall", - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get()))) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("nestedAssociateTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(ResponseCodeEnum.SUCCESS)))) - .then( - childRecordsCheck( - "nestedAssociateTxn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)))), - getAccountInfo(ACCOUNT).hasNoTokenRelationship(VANILLA_TOKEN)); - } - private HapiSpec associateWithMissingEvmAddressHasSaneTxnAndRecord() { final AtomicReference
tokenAddress = new AtomicReference<>(); final var missingAddress = @@ -451,19 +310,4 @@ private HapiSpec invalidSingleAbiCallConsumesAllProvidedGas() { protected Logger getResultsLogger() { return log; } - - /* --- Helpers --- */ - private static TokenID asToken(String v) { - long[] nativeParts = asDotDelimitedLongArray(v); - return TokenID.newBuilder() - .setShardNum(nativeParts[0]) - .setRealmNum(nativeParts[1]) - .setTokenNum(nativeParts[2]) - .build(); - } - - @NotNull - public static String getNestedContractAddress(final String outerContract, final HapiSpec spec) { - return HapiPropertySource.asHexedSolidityAddress(spec.registry().getContractId(outerContract)); - } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/AssociatePrecompileV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/AssociatePrecompileV1SecurityModelSuite.java new file mode 100644 index 000000000000..f6cfe83cb05d --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/AssociatePrecompileV1SecurityModelSuite.java @@ -0,0 +1,240 @@ +/* + * Copyright (C) 2021-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; +import static com.hedera.services.bdd.spec.queries.crypto.ExpectedTokenRel.relationshipWith; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overridingTwo; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.asToken; +import static com.hedera.services.bdd.suites.contract.Utils.getNestedContractAddress; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static com.hederahashgraph.api.proto.java.TokenFreezeStatus.FreezeNotApplicable; +import static com.hederahashgraph.api.proto.java.TokenFreezeStatus.Frozen; +import static com.hederahashgraph.api.proto.java.TokenFreezeStatus.Unfrozen; +import static com.hederahashgraph.api.proto.java.TokenKycStatus.KycNotApplicable; +import static com.hederahashgraph.api.proto.java.TokenKycStatus.Revoked; +import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; + +import com.esaulpaugh.headlong.abi.Address; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hedera.services.bdd.suites.token.TokenAssociationSpecs; +import com.hederahashgraph.api.proto.java.AccountID; +import com.hederahashgraph.api.proto.java.ResponseCodeEnum; +import com.hederahashgraph.api.proto.java.TokenID; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class AssociatePrecompileV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(AssociatePrecompileV1SecurityModelSuite.class); + + private static final long GAS_TO_OFFER = 4_000_000L; + private static final long TOTAL_SUPPLY = 1_000; + private static final String TOKEN_TREASURY = "treasury"; + private static final String OUTER_CONTRACT = "NestedAssociateDissociate"; + private static final String INNER_CONTRACT = "AssociateDissociate"; + public static final String THE_CONTRACT = "AssociateDissociate"; + private static final String ACCOUNT = "anybody"; + private static final String FROZEN_TOKEN = "Frozen token"; + private static final String UNFROZEN_TOKEN = "Unfrozen token"; + private static final String KYC_TOKEN = "KYC token"; + private static final String FREEZE_KEY = "Freeze key"; + private static final String KYC_KEY = "KYC key"; + + public static void main(String... args) { + new AssociatePrecompileV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + public List getSpecsInSuite() { + return allOf(positiveSpecs(), negativeSpecs()); + } + + List negativeSpecs() { + return List.of(); + } + + List positiveSpecs() { + return List.of(nestedAssociateWorksAsExpected(), multipleAssociatePrecompileWithSignatureWorksForFungible()); + } + + /* -- HSCS-PREC-006 from HTS Precompile Test Plan -- */ + private HapiSpec multipleAssociatePrecompileWithSignatureWorksForFungible() { + final AtomicReference accountID = new AtomicReference<>(); + final AtomicReference frozenTokenID = new AtomicReference<>(); + final AtomicReference unfrozenTokenID = new AtomicReference<>(); + final AtomicReference kycTokenID = new AtomicReference<>(); + final AtomicReference vanillaTokenID = new AtomicReference<>(); + + return propertyPreservingHapiSpec("multipleAssociatePrecompileWithSignatureWorksForFungible") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(FREEZE_KEY), + newKeyNamed(KYC_KEY), + cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).exposingCreatedIdTo(accountID::set), + cryptoCreate(TOKEN_TREASURY).balance(0L), + tokenCreate(FROZEN_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .initialSupply(TOTAL_SUPPLY) + .freezeKey(FREEZE_KEY) + .freezeDefault(true) + .exposingCreatedIdTo(id -> frozenTokenID.set(asToken(id))), + tokenCreate(UNFROZEN_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .freezeKey(FREEZE_KEY) + .freezeDefault(false) + .exposingCreatedIdTo(id -> unfrozenTokenID.set(asToken(id))), + tokenCreate(KYC_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .kycKey(KYC_KEY) + .exposingCreatedIdTo(id -> kycTokenID.set(asToken(id))), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + uploadInitCode(THE_CONTRACT), + contractCreate(THE_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + THE_CONTRACT, + "tokensAssociate", + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + new Address[] { + HapiParserUtil.asHeadlongAddress(asAddress(frozenTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(unfrozenTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(kycTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())) + }) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("MultipleTokensAssociationsTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(ResponseCodeEnum.SUCCESS)))) + .then( + childRecordsCheck( + "MultipleTokensAssociationsTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)))), + getAccountInfo(ACCOUNT) + .hasToken(relationshipWith(FROZEN_TOKEN) + .kyc(KycNotApplicable) + .freeze(Frozen)) + .hasToken(relationshipWith(UNFROZEN_TOKEN) + .kyc(KycNotApplicable) + .freeze(Unfrozen)) + .hasToken( + relationshipWith(KYC_TOKEN).kyc(Revoked).freeze(FreezeNotApplicable)) + .hasToken(relationshipWith(TokenAssociationSpecs.VANILLA_TOKEN) + .kyc(KycNotApplicable) + .freeze(FreezeNotApplicable))); + } + + /* -- HSCS-PREC-010 from HTS Precompile Test Plan -- */ + private HapiSpec nestedAssociateWorksAsExpected() { + final AtomicReference accountID = new AtomicReference<>(); + final AtomicReference vanillaTokenID = new AtomicReference<>(); + + return propertyPreservingHapiSpec("nestedAssociateWorksAsExpected") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenDissociateFromAccount", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).exposingCreatedIdTo(accountID::set), + cryptoCreate(TOKEN_TREASURY).balance(0L), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + uploadInitCode(INNER_CONTRACT, OUTER_CONTRACT), + contractCreate(INNER_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + OUTER_CONTRACT, asHeadlongAddress(getNestedContractAddress(INNER_CONTRACT, spec))), + contractCall( + OUTER_CONTRACT, + "associateDissociateContractCall", + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get()))) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("nestedAssociateTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(ResponseCodeEnum.SUCCESS)))) + .then( + childRecordsCheck( + "nestedAssociateTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)))), + getAccountInfo(ACCOUNT).hasNoTokenRelationship(VANILLA_TOKEN)); + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractBurnHTSSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractBurnHTSSuite.java index 9dd4fe24a407..30864e6668e9 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractBurnHTSSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractBurnHTSSuite.java @@ -18,51 +18,21 @@ import static com.google.protobuf.ByteString.copyFromUtf8; import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; -import static com.hedera.services.bdd.spec.assertions.AssertUtils.inOrder; -import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; -import static com.hedera.services.bdd.spec.assertions.ContractLogAsserts.logWith; -import static com.hedera.services.bdd.spec.assertions.SomeFungibleTransfers.changingFungibleBalances; -import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; -import static com.hedera.services.bdd.spec.keys.KeyShape.DELEGATE_CONTRACT; -import static com.hedera.services.bdd.spec.keys.KeyShape.SIMPLE; -import static com.hedera.services.bdd.spec.keys.KeyShape.sigs; -import static com.hedera.services.bdd.spec.keys.SigControl.ON; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenUpdate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; -import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; -import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fixedHbarFee; -import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; -import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; -import static com.hedera.services.bdd.suites.contract.Utils.asAddress; -import static com.hedera.services.bdd.suites.contract.Utils.asHexedAddress; -import static com.hedera.services.bdd.suites.contract.Utils.parsedToByteString; -import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.REVERTED_SUCCESS; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; import com.esaulpaugh.headlong.abi.Address; -import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; import com.hedera.services.bdd.spec.HapiPropertySource; import com.hedera.services.bdd.spec.HapiSpec; -import com.hedera.services.bdd.spec.assertions.AccountInfoAsserts; -import com.hedera.services.bdd.spec.keys.KeyShape; -import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; import com.hedera.services.bdd.suites.HapiSuite; import com.hederahashgraph.api.proto.java.TokenType; import java.math.BigInteger; @@ -83,10 +53,7 @@ public class ContractBurnHTSSuite extends HapiSuite { private static final String TOKEN = "Token"; private static final String TOKEN_TREASURY = "TokenTreasury"; private static final String MULTI_KEY = "purpose"; - private static final String CONTRACT_KEY = "Contract key"; - private static final String SUPPLY_KEY = "Supply key"; public static final String CREATION_TX = "creationTx"; - private static final String BURN_AFTER_NESTED_MINT_TX = "burnAfterNestedMint"; public static final String BURN_TOKEN_WITH_EVENT = "burnTokenWithEvent"; private static final String FIRST = "First!"; private static final String SECOND = "Second!"; @@ -108,297 +75,11 @@ public List getSpecsInSuite() { } List negativeSpecs() { - return List.of( - hscsPreC020RollbackBurnThatFailsAfterAPrecompileTransfer(), - burnFungibleV1andV2WithZeroAndNegativeValues(), - burnNonFungibleV1andV2WithNegativeValues()); + return List.of(burnFungibleV1andV2WithZeroAndNegativeValues(), burnNonFungibleV1andV2WithNegativeValues()); } List positiveSpecs() { - return List.of( - hscsPrec004TokenBurnOfFungibleTokenUnits(), - hscsPrec005TokenBurnOfNft(), - hscsPrec011BurnAfterNestedMint()); - } - - private HapiSpec hscsPrec004TokenBurnOfFungibleTokenUnits() { - final var gasUsed = 14085L; - return defaultHapiSpec("hscsPrec004TokenBurnOfFungibleTokenUnits") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(ALICE).balance(10 * ONE_HUNDRED_HBARS), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(50L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .treasury(TOKEN_TREASURY), - uploadInitCode(THE_BURN_CONTRACT), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - THE_BURN_CONTRACT, - asHeadlongAddress(asHexedAddress( - spec.registry().getTokenID(TOKEN)))) - .payingWith(ALICE) - .via(CREATION_TX) - .gas(GAS_TO_OFFER))), - getTxnRecord(CREATION_TX).logged()) - .when( - // Burning 0 amount for Fungible tokens should fail - contractCall(THE_BURN_CONTRACT, BURN_TOKEN_WITH_EVENT, BigInteger.ZERO, new long[0]) - .payingWith(ALICE) - .alsoSigningWithFullPrefix(MULTI_KEY) - .gas(GAS_TO_OFFER) - .via("burnZero") - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TOKEN, 50), - contractCall(THE_BURN_CONTRACT, BURN_TOKEN_WITH_EVENT, BigInteger.ONE, new long[0]) - .payingWith(ALICE) - .alsoSigningWithFullPrefix(MULTI_KEY) - .gas(GAS_TO_OFFER) - .via("burn"), - getTxnRecord("burn") - .hasPriority(recordWith() - .contractCallResult(resultWith() - .logs(inOrder(logWith() - .noData() - .withTopicsInOrder(List.of(parsedToByteString(49))))))), - getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TOKEN, 49), - childRecordsCheck( - "burn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_BURN) - .withStatus(SUCCESS) - .withTotalSupply(49)) - .gasUsed(gasUsed)) - .newTotalSupply(49) - .tokenTransfers( - changingFungibleBalances().including(TOKEN, TOKEN_TREASURY, -1)) - .newTotalSupply(49)), - newKeyNamed(CONTRACT_KEY).shape(DELEGATE_CONTRACT.signedWith(THE_BURN_CONTRACT)), - tokenUpdate(TOKEN).supplyKey(CONTRACT_KEY), - contractCall(THE_BURN_CONTRACT, "burnToken", BigInteger.ONE, new long[0]) - .via("burn with contract key") - .gas(GAS_TO_OFFER), - childRecordsCheck( - "burn with contract key", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_BURN) - .withStatus(SUCCESS) - .withTotalSupply(48))) - .newTotalSupply(48) - .tokenTransfers( - changingFungibleBalances().including(TOKEN, TOKEN_TREASURY, -1)))) - .then(getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TOKEN, 48)); - } - - private HapiSpec hscsPrec005TokenBurnOfNft() { - final var gasUsed = 14085; - return defaultHapiSpec("hscsPrec005TokenBurnOfNft") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(ALICE).balance(10 * ONE_HUNDRED_HBARS), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .initialSupply(0L) - .supplyKey(MULTI_KEY) - .treasury(TOKEN_TREASURY), - mintToken(TOKEN, List.of(copyFromUtf8(FIRST))), - mintToken(TOKEN, List.of(copyFromUtf8(SECOND))), - uploadInitCode(THE_BURN_CONTRACT), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - THE_BURN_CONTRACT, - asHeadlongAddress(asHexedAddress( - spec.registry().getTokenID(TOKEN)))) - .payingWith(ALICE) - .via(CREATION_TX) - .gas(GAS_TO_OFFER))), - getTxnRecord(CREATION_TX).logged()) - .when( - withOpContext((spec, opLog) -> { - final var serialNumbers = new long[] {1L}; - allRunFor( - spec, - contractCall(THE_BURN_CONTRACT, "burnToken", BigInteger.ZERO, serialNumbers) - .payingWith(ALICE) - .alsoSigningWithFullPrefix(MULTI_KEY) - .gas(GAS_TO_OFFER) - .via("burn")); - }), - childRecordsCheck( - "burn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_BURN) - .withStatus(SUCCESS) - .withTotalSupply(1)) - .gasUsed(gasUsed)) - .newTotalSupply(1))) - .then(getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TOKEN, 1)); - } - - private HapiSpec hscsPrec011BurnAfterNestedMint() { - final var innerContract = "MintToken"; - final var outerContract = "NestedBurn"; - final var revisedKey = KeyShape.threshOf(1, SIMPLE, DELEGATE_CONTRACT, DELEGATE_CONTRACT); - - return defaultHapiSpec("hscsPrec011BurnAfterNestedMint") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(ALICE).balance(10 * ONE_HUNDRED_HBARS), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(50L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .treasury(TOKEN_TREASURY), - uploadInitCode(innerContract, outerContract), - contractCreate(innerContract).gas(GAS_TO_OFFER), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - outerContract, - asHeadlongAddress(getNestedContractAddress(innerContract, spec))) - .payingWith(ALICE) - .via(CREATION_TX) - .gas(GAS_TO_OFFER))), - getTxnRecord(CREATION_TX).logged()) - .when( - withOpContext((spec, opLog) -> allRunFor( - spec, - newKeyNamed(CONTRACT_KEY) - .shape(revisedKey.signedWith(sigs(ON, innerContract, outerContract))), - tokenUpdate(TOKEN).supplyKey(CONTRACT_KEY), - contractCall( - outerContract, - BURN_AFTER_NESTED_MINT_TX, - BigInteger.ONE, - HapiParserUtil.asHeadlongAddress(asAddress( - spec.registry().getTokenID(TOKEN))), - new long[0]) - .payingWith(ALICE) - .via(BURN_AFTER_NESTED_MINT_TX))), - childRecordsCheck( - BURN_AFTER_NESTED_MINT_TX, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_MINT) - .withStatus(SUCCESS) - .withTotalSupply(51) - .withSerialNumbers())) - .tokenTransfers( - changingFungibleBalances().including(TOKEN, TOKEN_TREASURY, 1)) - .newTotalSupply(51), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_BURN) - .withStatus(SUCCESS) - .withTotalSupply(50))) - .tokenTransfers( - changingFungibleBalances().including(TOKEN, TOKEN_TREASURY, -1)) - .newTotalSupply(50))) - .then(getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TOKEN, 50)); - } - - private HapiSpec hscsPreC020RollbackBurnThatFailsAfterAPrecompileTransfer() { - final var bob = "bob"; - final var feeCollector = "feeCollector"; - final var tokenWithHbarFee = "tokenWithHbarFee"; - final var theContract = "TransferAndBurn"; - - return defaultHapiSpec("hscsPreC020RollbackBurnThatFailsAfterAPrecompileTransfer") - .given( - newKeyNamed(SUPPLY_KEY), - cryptoCreate(ALICE).balance(ONE_HUNDRED_HBARS), - cryptoCreate(bob).balance(ONE_HUNDRED_HBARS), - cryptoCreate(TOKEN_TREASURY).balance(ONE_HUNDRED_HBARS), - cryptoCreate(feeCollector).balance(0L), - tokenCreate(tokenWithHbarFee) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .supplyKey(SUPPLY_KEY) - .initialSupply(0L) - .treasury(TOKEN_TREASURY) - .withCustom(fixedHbarFee(300 * ONE_HBAR, feeCollector)), - mintToken(tokenWithHbarFee, List.of(copyFromUtf8(FIRST))), - mintToken(tokenWithHbarFee, List.of(copyFromUtf8(SECOND))), - uploadInitCode(theContract), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - theContract, - asHeadlongAddress(asHexedAddress( - spec.registry().getTokenID(tokenWithHbarFee)))) - .payingWith(bob) - .gas(GAS_TO_OFFER))), - tokenAssociate(ALICE, tokenWithHbarFee), - tokenAssociate(bob, tokenWithHbarFee), - tokenAssociate(theContract, tokenWithHbarFee), - cryptoTransfer(movingUnique(tokenWithHbarFee, 2L).between(TOKEN_TREASURY, ALICE)) - .payingWith(GENESIS), - getAccountInfo(feeCollector) - .has(AccountInfoAsserts.accountWith().balance(0L))) - .when( - withOpContext((spec, opLog) -> { - final var serialNumbers = new long[] {1L}; - allRunFor( - spec, - contractCall( - theContract, - "transferBurn", - HapiParserUtil.asHeadlongAddress(asAddress( - spec.registry().getAccountID(ALICE))), - HapiParserUtil.asHeadlongAddress(asAddress( - spec.registry().getAccountID(bob))), - BigInteger.ZERO, - 2L, - serialNumbers) - .alsoSigningWithFullPrefix(ALICE, SUPPLY_KEY) - .gas(GAS_TO_OFFER) - .via("contractCallTxn") - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)); - }), - childRecordsCheck( - "contractCallTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(REVERTED_SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_BURN) - .withStatus(SUCCESS) - .withTotalSupply(1))), - recordWith() - .status(INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .withStatus( - INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE))))) - .then( - getAccountBalance(bob).hasTokenBalance(tokenWithHbarFee, 0), - getAccountBalance(TOKEN_TREASURY).hasTokenBalance(tokenWithHbarFee, 1), - getAccountBalance(ALICE).hasTokenBalance(tokenWithHbarFee, 1)); + return List.of(); } private HapiSpec burnFungibleV1andV2WithZeroAndNegativeValues() { diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractBurnHTSV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractBurnHTSV1SecurityModelSuite.java new file mode 100644 index 000000000000..f2b317537f2c --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractBurnHTSV1SecurityModelSuite.java @@ -0,0 +1,434 @@ +/* + * Copyright (C) 2021-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.google.protobuf.ByteString.copyFromUtf8; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.AssertUtils.inOrder; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.ContractLogAsserts.logWith; +import static com.hedera.services.bdd.spec.assertions.SomeFungibleTransfers.changingFungibleBalances; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.keys.KeyShape.DELEGATE_CONTRACT; +import static com.hedera.services.bdd.spec.keys.KeyShape.SIMPLE; +import static com.hedera.services.bdd.spec.keys.KeyShape.sigs; +import static com.hedera.services.bdd.spec.keys.SigControl.ON; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenUpdate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fixedHbarFee; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overridingTwo; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.asHexedAddress; +import static com.hedera.services.bdd.suites.contract.Utils.parsedToByteString; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.REVERTED_SUCCESS; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; + +import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; +import com.hedera.services.bdd.spec.HapiPropertySource; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.assertions.AccountInfoAsserts; +import com.hedera.services.bdd.spec.keys.KeyShape; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.TokenType; +import java.math.BigInteger; +import java.util.List; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.jetbrains.annotations.NotNull; + +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse +public class ContractBurnHTSV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(ContractBurnHTSV1SecurityModelSuite.class); + + private static final long GAS_TO_OFFER = 4_000_000L; + public static final String THE_BURN_CONTRACT = "BurnToken"; + public static final String MULTIVERSION_BURN_CONTRACT = "MultiversionBurn"; + + public static final String ALICE = "Alice"; + private static final String TOKEN = "Token"; + private static final String TOKEN_TREASURY = "TokenTreasury"; + private static final String MULTI_KEY = "purpose"; + private static final String SUPPLY_KEY = "Supply key"; + private static final String CONTRACT_KEY = "Contract key"; + public static final String CREATION_TX = "creationTx"; + private static final String BURN_AFTER_NESTED_MINT_TX = "burnAfterNestedMint"; + public static final String BURN_TOKEN_WITH_EVENT = "burnTokenWithEvent"; + private static final String FIRST = "First!"; + private static final String SECOND = "Second!"; + + public static void main(String... args) { + new ContractBurnHTSV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + public List getSpecsInSuite() { + return allOf(positiveSpecs(), negativeSpecs()); + } + + List negativeSpecs() { + return List.of(hscsPreC020RollbackBurnThatFailsAfterAPrecompileTransfer()); + } + + List positiveSpecs() { + return List.of( + hscsPrec004TokenBurnOfFungibleTokenUnits(), + hscsPrec005TokenBurnOfNft(), + hscsPrec011BurnAfterNestedMint()); + } + + private HapiSpec hscsPreC020RollbackBurnThatFailsAfterAPrecompileTransfer() { + final var bob = "bob"; + final var feeCollector = "feeCollector"; + final var tokenWithHbarFee = "tokenWithHbarFee"; + final var theContract = "TransferAndBurn"; + + return propertyPreservingHapiSpec("hscsPreC020RollbackBurnThatFailsAfterAPrecompileTransfer") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenBurn,TokenCreate,TokenMint", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(SUPPLY_KEY), + cryptoCreate(ALICE).balance(ONE_HUNDRED_HBARS), + cryptoCreate(bob).balance(ONE_HUNDRED_HBARS), + cryptoCreate(TOKEN_TREASURY).balance(ONE_HUNDRED_HBARS), + cryptoCreate(feeCollector).balance(0L), + tokenCreate(tokenWithHbarFee) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .supplyKey(SUPPLY_KEY) + .initialSupply(0L) + .treasury(TOKEN_TREASURY) + .withCustom(fixedHbarFee(300 * ONE_HBAR, feeCollector)), + mintToken(tokenWithHbarFee, List.of(copyFromUtf8(FIRST))), + mintToken(tokenWithHbarFee, List.of(copyFromUtf8(SECOND))), + uploadInitCode(theContract), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + theContract, + asHeadlongAddress(asHexedAddress( + spec.registry().getTokenID(tokenWithHbarFee)))) + .payingWith(bob) + .gas(GAS_TO_OFFER))), + tokenAssociate(ALICE, tokenWithHbarFee), + tokenAssociate(bob, tokenWithHbarFee), + tokenAssociate(theContract, tokenWithHbarFee), + cryptoTransfer(movingUnique(tokenWithHbarFee, 2L).between(TOKEN_TREASURY, ALICE)) + .payingWith(GENESIS), + getAccountInfo(feeCollector) + .has(AccountInfoAsserts.accountWith().balance(0L))) + .when( + withOpContext((spec, opLog) -> { + final var serialNumbers = new long[] {1L}; + allRunFor( + spec, + contractCall( + theContract, + "transferBurn", + HapiParserUtil.asHeadlongAddress(asAddress( + spec.registry().getAccountID(ALICE))), + HapiParserUtil.asHeadlongAddress(asAddress( + spec.registry().getAccountID(bob))), + BigInteger.ZERO, + 2L, + serialNumbers) + .alsoSigningWithFullPrefix(ALICE, SUPPLY_KEY) + .gas(GAS_TO_OFFER) + .via("contractCallTxn") + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)); + }), + childRecordsCheck( + "contractCallTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(REVERTED_SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_BURN) + .withStatus(SUCCESS) + .withTotalSupply(1))), + recordWith() + .status(INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .withStatus( + INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE))))) + .then( + getAccountBalance(bob).hasTokenBalance(tokenWithHbarFee, 0), + getAccountBalance(TOKEN_TREASURY).hasTokenBalance(tokenWithHbarFee, 1), + getAccountBalance(ALICE).hasTokenBalance(tokenWithHbarFee, 1)); + } + + private HapiSpec hscsPrec004TokenBurnOfFungibleTokenUnits() { + final var gasUsed = 14085L; + return propertyPreservingHapiSpec("hscsPrec004TokenBurnOfFungibleTokenUnits") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenBurn,TokenCreate,TokenUpdate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(ALICE).balance(10 * ONE_HUNDRED_HBARS), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(50L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .treasury(TOKEN_TREASURY), + uploadInitCode(THE_BURN_CONTRACT), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + THE_BURN_CONTRACT, + asHeadlongAddress(asHexedAddress( + spec.registry().getTokenID(TOKEN)))) + .payingWith(ALICE) + .via(CREATION_TX) + .gas(GAS_TO_OFFER))), + getTxnRecord(CREATION_TX).logged()) + .when( + // Burning 0 amount for Fungible tokens should fail + contractCall(THE_BURN_CONTRACT, BURN_TOKEN_WITH_EVENT, BigInteger.ZERO, new long[0]) + .payingWith(ALICE) + .alsoSigningWithFullPrefix(MULTI_KEY) + .gas(GAS_TO_OFFER) + .via("burnZero") + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TOKEN, 50), + contractCall(THE_BURN_CONTRACT, BURN_TOKEN_WITH_EVENT, BigInteger.ONE, new long[0]) + .payingWith(ALICE) + .alsoSigningWithFullPrefix(MULTI_KEY) + .gas(GAS_TO_OFFER) + .via("burn"), + getTxnRecord("burn") + .hasPriority(recordWith() + .contractCallResult(resultWith() + .logs(inOrder(logWith() + .noData() + .withTopicsInOrder(List.of(parsedToByteString(49))))))), + getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TOKEN, 49), + childRecordsCheck( + "burn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_BURN) + .withStatus(SUCCESS) + .withTotalSupply(49)) + .gasUsed(gasUsed)) + .newTotalSupply(49) + .tokenTransfers( + changingFungibleBalances().including(TOKEN, TOKEN_TREASURY, -1)) + .newTotalSupply(49)), + newKeyNamed(CONTRACT_KEY).shape(DELEGATE_CONTRACT.signedWith(THE_BURN_CONTRACT)), + tokenUpdate(TOKEN).supplyKey(CONTRACT_KEY), + contractCall(THE_BURN_CONTRACT, "burnToken", BigInteger.ONE, new long[0]) + .via("burn with contract key") + .gas(GAS_TO_OFFER), + childRecordsCheck( + "burn with contract key", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_BURN) + .withStatus(SUCCESS) + .withTotalSupply(48))) + .newTotalSupply(48) + .tokenTransfers( + changingFungibleBalances().including(TOKEN, TOKEN_TREASURY, -1)))) + .then(getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TOKEN, 48)); + } + + private HapiSpec hscsPrec005TokenBurnOfNft() { + final var gasUsed = 14085; + return propertyPreservingHapiSpec("hscsPrec005TokenBurnOfNft") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,TokenBurn,TokenCreate,TokenMint", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(ALICE).balance(10 * ONE_HUNDRED_HBARS), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .initialSupply(0L) + .supplyKey(MULTI_KEY) + .treasury(TOKEN_TREASURY), + mintToken(TOKEN, List.of(copyFromUtf8(FIRST))), + mintToken(TOKEN, List.of(copyFromUtf8(SECOND))), + uploadInitCode(THE_BURN_CONTRACT), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + THE_BURN_CONTRACT, + asHeadlongAddress(asHexedAddress( + spec.registry().getTokenID(TOKEN)))) + .payingWith(ALICE) + .via(CREATION_TX) + .gas(GAS_TO_OFFER))), + getTxnRecord(CREATION_TX).logged()) + .when( + withOpContext((spec, opLog) -> { + final var serialNumbers = new long[] {1L}; + allRunFor( + spec, + contractCall(THE_BURN_CONTRACT, "burnToken", BigInteger.ZERO, serialNumbers) + .payingWith(ALICE) + .alsoSigningWithFullPrefix(MULTI_KEY) + .gas(GAS_TO_OFFER) + .via("burn")); + }), + childRecordsCheck( + "burn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_BURN) + .withStatus(SUCCESS) + .withTotalSupply(1)) + .gasUsed(gasUsed)) + .newTotalSupply(1))) + .then(getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TOKEN, 1)); + } + + private HapiSpec hscsPrec011BurnAfterNestedMint() { + final var innerContract = "MintToken"; + final var outerContract = "NestedBurn"; + final var revisedKey = KeyShape.threshOf(1, SIMPLE, DELEGATE_CONTRACT, DELEGATE_CONTRACT); + + return propertyPreservingHapiSpec("hscsPrec011BurnAfterNestedMint") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,TokenBurn,TokenCreate,TokenMint", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(ALICE).balance(10 * ONE_HUNDRED_HBARS), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(50L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .treasury(TOKEN_TREASURY), + uploadInitCode(innerContract, outerContract), + contractCreate(innerContract).gas(GAS_TO_OFFER), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + outerContract, + asHeadlongAddress(getNestedContractAddress(innerContract, spec))) + .payingWith(ALICE) + .via(CREATION_TX) + .gas(GAS_TO_OFFER))), + getTxnRecord(CREATION_TX).logged()) + .when( + withOpContext((spec, opLog) -> allRunFor( + spec, + newKeyNamed(CONTRACT_KEY) + .shape(revisedKey.signedWith(sigs(ON, innerContract, outerContract))), + tokenUpdate(TOKEN).supplyKey(CONTRACT_KEY), + contractCall( + outerContract, + BURN_AFTER_NESTED_MINT_TX, + BigInteger.ONE, + HapiParserUtil.asHeadlongAddress(asAddress( + spec.registry().getTokenID(TOKEN))), + new long[0]) + .payingWith(ALICE) + .via(BURN_AFTER_NESTED_MINT_TX))), + childRecordsCheck( + BURN_AFTER_NESTED_MINT_TX, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_MINT) + .withStatus(SUCCESS) + .withTotalSupply(51) + .withSerialNumbers())) + .tokenTransfers( + changingFungibleBalances().including(TOKEN, TOKEN_TREASURY, 1)) + .newTotalSupply(51), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_BURN) + .withStatus(SUCCESS) + .withTotalSupply(50))) + .tokenTransfers( + changingFungibleBalances().including(TOKEN, TOKEN_TREASURY, -1)) + .newTotalSupply(50))) + .then(getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TOKEN, 50)); + } + + @NotNull + private String getNestedContractAddress(String outerContract, HapiSpec spec) { + return HapiPropertySource.asHexedSolidityAddress(spec.registry().getContractId(outerContract)); + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractHTSSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractHTSSuite.java index 2af4628ff62c..1e7c7b24a882 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractHTSSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractHTSSuite.java @@ -16,64 +16,34 @@ package com.hedera.services.bdd.suites.contract.precompile; -import static com.google.protobuf.ByteString.copyFromUtf8; -import static com.hedera.services.bdd.spec.HapiPropertySource.asToken; import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; -import static com.hedera.services.bdd.spec.assertions.SomeFungibleTransfers.changingFungibleBalances; import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; -import static com.hedera.services.bdd.spec.assertions.TransferListAsserts.including; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; -import static com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoTransfer.tinyBarsFromTo; -import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fixedHbarFee; -import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fixedHtsFee; import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; -import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.Utils.asAddress; -import static com.hedera.services.bdd.suites.utils.MiscEETUtils.metadata; +import static com.hedera.services.bdd.suites.contract.Utils.getNestedContractAddress; import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_TOKEN_BALANCE; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.REVERTED_SUCCESS; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TRANSFERS_NOT_ZERO_SUM_FOR_TOKEN; -import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; -import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; import com.esaulpaugh.headlong.abi.Address; import com.hedera.services.bdd.spec.HapiSpec; -import com.hedera.services.bdd.spec.assertions.AccountInfoAsserts; -import com.hedera.services.bdd.spec.assertions.ContractInfoAsserts; -import com.hedera.services.bdd.spec.assertions.NonFungibleTransfers; import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; -import com.hedera.services.bdd.spec.transactions.token.TokenMovement; import com.hedera.services.bdd.suites.HapiSuite; -import com.hederahashgraph.api.proto.java.AccountID; -import com.hederahashgraph.api.proto.java.TokenID; -import com.hederahashgraph.api.proto.java.TokenSupplyType; import com.hederahashgraph.api.proto.java.TokenType; -import java.math.BigInteger; import java.util.List; -import java.util.concurrent.atomic.AtomicReference; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -85,19 +55,12 @@ public class ContractHTSSuite extends HapiSuite { private static final long GAS_TO_OFFER = 2_000_000L; private static final long TOTAL_SUPPLY = 1_000; - private static final long AMOUNT_TO_SEND = 10L; - private static final long CUSTOM_HBAR_FEE_AMOUNT = 100L; private static final String TOKEN_TREASURY = "treasury"; private static final String A_TOKEN = "TokenA"; - private static final String NFT = "nft"; private static final String ACCOUNT = "sender"; - private static final String FEE_COLLECTOR = "feeCollector"; private static final String RECEIVER = "receiver"; - private static final String SECOND_RECEIVER = "receiver2"; - - private static final String FEE_TOKEN = "feeToken"; private static final String UNIVERSAL_KEY = "multipurpose"; @@ -116,741 +79,11 @@ public List getSpecsInSuite() { } List negativeSpecs() { - return List.of(hscsPrec017RollbackAfterInsufficientBalance(), nonZeroTransfersFail()); + return List.of(nonZeroTransfersFail()); } List positiveSpecs() { - return List.of( - distributeMultipleTokens(), - depositAndWithdrawFungibleTokens(), - transferNft(), - transferMultipleNfts(), - tokenTransferFromFeeCollector(), - tokenTransferFromFeeCollectorStaticNestedCall(), - hbarTransferFromFeeCollector()); - } - - private HapiSpec hscsPrec017RollbackAfterInsufficientBalance() { - final var alice = "alice"; - final var bob = "bob"; - final var treasuryForToken = "treasuryForToken"; - final var feeCollector = "feeCollector"; - final var supplyKey = "supplyKey"; - final var tokenWithHbarFee = "tokenWithHbarFee"; - final var theContract = "TransferAmountAndToken"; - - return defaultHapiSpec("hscsPrec017RollbackAfterInsufficientBalance") - .given( - newKeyNamed(supplyKey), - cryptoCreate(alice).balance(7 * ONE_HBAR), - cryptoCreate(bob).balance(ONE_HUNDRED_HBARS), - cryptoCreate(treasuryForToken).balance(ONE_HUNDRED_HBARS), - cryptoCreate(feeCollector).balance(0L), - tokenCreate(tokenWithHbarFee) - .tokenType(NON_FUNGIBLE_UNIQUE) - .supplyKey(supplyKey) - .initialSupply(0L) - .treasury(treasuryForToken) - .withCustom(fixedHbarFee(4 * ONE_HBAR, feeCollector)), - mintToken(tokenWithHbarFee, List.of(copyFromUtf8("First!"))), - mintToken(tokenWithHbarFee, List.of(copyFromUtf8("Second!"))), - uploadInitCode(theContract), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - theContract, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(tokenWithHbarFee)))))), - tokenAssociate(alice, tokenWithHbarFee), - tokenAssociate(bob, tokenWithHbarFee), - tokenAssociate(theContract, tokenWithHbarFee), - cryptoTransfer(movingUnique(tokenWithHbarFee, 1L).between(treasuryForToken, alice)) - .payingWith(GENESIS), - cryptoTransfer(movingUnique(tokenWithHbarFee, 2L).between(treasuryForToken, alice)) - .payingWith(GENESIS), - getAccountInfo(feeCollector) - .has(AccountInfoAsserts.accountWith().balance(0L))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - theContract, - "transferToAddress", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(alice))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(bob))), - 1L, - 2L) - .payingWith(bob) - .alsoSigningWithFullPrefix(alice) - .gas(GAS_TO_OFFER) - .via("contractCallTxn") - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)))) - .then( - childRecordsCheck( - "contractCallTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(REVERTED_SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))), - recordWith() - .status(INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .withStatus( - INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE)))), - getAccountInfo(feeCollector) - .has(AccountInfoAsserts.accountWith().balance(0L))); - } - - private HapiSpec depositAndWithdrawFungibleTokens() { - final var theContract = "ZenosBank"; - - return defaultHapiSpec("depositAndWithdrawFungibleTokens") - .given( - newKeyNamed(UNIVERSAL_KEY), - cryptoCreate(RECEIVER), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(A_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .initialSupply(TOTAL_SUPPLY) - .treasury(TOKEN_TREASURY), - uploadInitCode(theContract), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - theContract, - HapiParserUtil.asHeadlongAddress(asAddress( - spec.registry().getTokenID(A_TOKEN)))) - .via("creationTx"))), - tokenAssociate(DEFAULT_CONTRACT_SENDER, List.of(A_TOKEN)), - tokenAssociate(theContract, List.of(A_TOKEN)), - cryptoTransfer(moving(200, A_TOKEN).between(TOKEN_TREASURY, DEFAULT_CONTRACT_SENDER))) - .when( - // If we are using Ethereum transactions, the DEFAULT_CONTRACT_SENDER - // signature will have to - // be validated via EthTxSigs, because in any case only DEFAULT_PAYER signs - // this call - contractCall(theContract, "depositTokens", 50L) - .gas(GAS_TO_OFFER) - .via("zeno"), - contractCall(theContract, "depositTokens", 0L) - .gas(GAS_TO_OFFER) - .via("zeroTransfers"), - contractCall(theContract, "withdrawTokens") - .payingWith(RECEIVER) - .alsoSigningWithFullPrefix(theContract) - .gas(GAS_TO_OFFER) - .via("receiverTx") - // The depositTokens will associate the Ethereum - // DEFAULT_CONTRACT_SENDER; and this - // contract fails if the msg.sender is already associated - .refusingEthConversion()) - .then( - childRecordsCheck( - "zeno", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))) - .tokenTransfers(changingFungibleBalances() - .including(A_TOKEN, DEFAULT_CONTRACT_SENDER, -50L) - .including(A_TOKEN, theContract, 50L))), - childRecordsCheck( - "receiverTx", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))) - .tokenTransfers(changingFungibleBalances() - .including(A_TOKEN, theContract, -25L) - .including(A_TOKEN, RECEIVER, 25L)))); - } - - private HapiSpec distributeMultipleTokens() { - final var theSecondReceiver = "somebody2"; - - return defaultHapiSpec("DistributeMultipleTokens") - .given( - newKeyNamed(UNIVERSAL_KEY), - cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS), - cryptoCreate(RECEIVER), - cryptoCreate(theSecondReceiver), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(A_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(TOTAL_SUPPLY) - .treasury(TOKEN_TREASURY), - uploadInitCode(VERSATILE_TRANSFERS, FEE_DISTRIBUTOR), - contractCreate(FEE_DISTRIBUTOR), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - VERSATILE_TRANSFERS, - asHeadlongAddress(getNestedContractAddress(FEE_DISTRIBUTOR, spec))))), - tokenAssociate(ACCOUNT, List.of(A_TOKEN)), - tokenAssociate(VERSATILE_TRANSFERS, List.of(A_TOKEN)), - tokenAssociate(RECEIVER, List.of(A_TOKEN)), - tokenAssociate(theSecondReceiver, List.of(A_TOKEN)), - cryptoTransfer(moving(200, A_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) - .when(withOpContext((spec, opLog) -> { - final var sender = asAddress(spec.registry().getAccountID(ACCOUNT)); - final var receiver1 = asAddress(spec.registry().getAccountID(RECEIVER)); - final var receiver2 = asAddress(spec.registry().getAccountID(theSecondReceiver)); - final var accounts = new Address[] { - HapiParserUtil.asHeadlongAddress(sender), - HapiParserUtil.asHeadlongAddress(receiver1), - HapiParserUtil.asHeadlongAddress(receiver2) - }; - final var amounts = new long[] {-10L, 5L, 5L}; - - allRunFor( - spec, - contractCall( - VERSATILE_TRANSFERS, - "distributeTokens", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(A_TOKEN))), - accounts, - amounts) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER) - .via("distributeTx")); - })) - .then(childRecordsCheck( - "distributeTx", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))) - .tokenTransfers(changingFungibleBalances() - .including(A_TOKEN, ACCOUNT, -10L) - .including(A_TOKEN, RECEIVER, 5L) - .including(A_TOKEN, theSecondReceiver, 5L)))); - } - - private HapiSpec tokenTransferFromFeeCollector() { - return defaultHapiSpec("TokenTransferFromFeeCollector") - .given( - cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).maxAutomaticTokenAssociations(10), - cryptoCreate(FEE_COLLECTOR), - cryptoCreate(RECEIVER).maxAutomaticTokenAssociations(10), - cryptoCreate(SECOND_RECEIVER), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(FEE_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .initialSupply(TOTAL_SUPPLY) - .treasury(TOKEN_TREASURY), - tokenAssociate(FEE_COLLECTOR, FEE_TOKEN), - tokenCreate(A_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(TOTAL_SUPPLY) - .treasury(TOKEN_TREASURY) - .withCustom(fixedHtsFee(100L, FEE_TOKEN, FEE_COLLECTOR)), - tokenAssociate(ACCOUNT, A_TOKEN), - tokenAssociate(RECEIVER, A_TOKEN), - tokenAssociate(SECOND_RECEIVER, A_TOKEN), - cryptoTransfer(moving(TOTAL_SUPPLY, FEE_TOKEN).between(TOKEN_TREASURY, ACCOUNT)), - cryptoTransfer(moving(TOTAL_SUPPLY, A_TOKEN).between(TOKEN_TREASURY, ACCOUNT)), - uploadInitCode(VERSATILE_TRANSFERS, FEE_DISTRIBUTOR), - contractCreate(FEE_DISTRIBUTOR), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - VERSATILE_TRANSFERS, - asHeadlongAddress(getNestedContractAddress(FEE_DISTRIBUTOR, spec)))))) - .when(withOpContext((spec, opLog) -> { - final var sender = asAddress(spec.registry().getAccountID(ACCOUNT)); - final var receiver1 = asAddress(spec.registry().getAccountID(RECEIVER)); - final var receiver2 = asAddress(spec.registry().getAccountID(SECOND_RECEIVER)); - final var accounts = new Address[] { - HapiParserUtil.asHeadlongAddress(sender), - HapiParserUtil.asHeadlongAddress(receiver1), - HapiParserUtil.asHeadlongAddress(receiver2) - }; - final var amounts = new long[] {-10L, 5L, 5L}; - - /* --- HSCS-PREC-009 --- */ - allRunFor( - spec, - contractCall( - VERSATILE_TRANSFERS, - "feeDistributionAfterTransfer", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(A_TOKEN))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(FEE_TOKEN))), - accounts, - amounts, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(FEE_COLLECTOR)))) - .gas(GAS_TO_OFFER) - .via("distributeTx") - .alsoSigningWithFullPrefix(ACCOUNT, FEE_COLLECTOR) - .hasKnownStatus(SUCCESS)); - - /* --- HSCS-PREC-018 --- */ - allRunFor( - spec, - contractCall( - VERSATILE_TRANSFERS, - "feeDistributionAfterTransfer", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(A_TOKEN))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(FEE_TOKEN))), - accounts, - amounts, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(FEE_COLLECTOR)))) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER) - .via("missingSignatureTx") - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)); - - /* --- HSCS-PREC-023 --- */ - allRunFor( - spec, - contractCall( - VERSATILE_TRANSFERS, - "feeDistributionAfterTransfer", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(A_TOKEN))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(FEE_TOKEN))), - accounts, - amounts, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(RECEIVER)))) - .alsoSigningWithFullPrefix(ACCOUNT, RECEIVER) - .gas(GAS_TO_OFFER) - .via("failingChildFrameTx") - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)); - })) - .then( - childRecordsCheck( - "distributeTx", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))) - .tokenTransfers(changingFungibleBalances() - .including(A_TOKEN, ACCOUNT, -10L) - .including(A_TOKEN, RECEIVER, 5L) - .including(A_TOKEN, SECOND_RECEIVER, 5L)), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))) - .tokenTransfers(changingFungibleBalances() - .including(FEE_TOKEN, FEE_COLLECTOR, -100L) - .including(FEE_TOKEN, ACCOUNT, 100L))), - childRecordsCheck( - "missingSignatureTx", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(REVERTED_SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))), - recordWith() - .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), - childRecordsCheck( - "failingChildFrameTx", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(REVERTED_SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))), - recordWith() - .status(INSUFFICIENT_TOKEN_BALANCE) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INSUFFICIENT_TOKEN_BALANCE)))), - getAccountBalance(ACCOUNT).hasTokenBalance(FEE_TOKEN, 1000), - getAccountBalance(FEE_COLLECTOR).hasTokenBalance(FEE_TOKEN, 0)); - } - - private HapiSpec tokenTransferFromFeeCollectorStaticNestedCall() { - return defaultHapiSpec("TokenTransferFromFeeCollectorStaticNestedCall") - .given( - cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).maxAutomaticTokenAssociations(10), - cryptoCreate(FEE_COLLECTOR), - cryptoCreate(RECEIVER).maxAutomaticTokenAssociations(10), - cryptoCreate(SECOND_RECEIVER), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(FEE_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .initialSupply(TOTAL_SUPPLY) - .treasury(TOKEN_TREASURY), - tokenAssociate(FEE_COLLECTOR, FEE_TOKEN), - tokenCreate(A_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(TOTAL_SUPPLY) - .treasury(TOKEN_TREASURY) - .withCustom(fixedHtsFee(100L, FEE_TOKEN, FEE_COLLECTOR)), - tokenAssociate(ACCOUNT, A_TOKEN), - tokenAssociate(RECEIVER, A_TOKEN), - tokenAssociate(SECOND_RECEIVER, A_TOKEN), - cryptoTransfer(moving(TOTAL_SUPPLY, FEE_TOKEN).between(TOKEN_TREASURY, ACCOUNT)), - cryptoTransfer(moving(TOTAL_SUPPLY, A_TOKEN).between(TOKEN_TREASURY, ACCOUNT)), - uploadInitCode(VERSATILE_TRANSFERS, FEE_DISTRIBUTOR), - contractCreate(FEE_DISTRIBUTOR), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - VERSATILE_TRANSFERS, - asHeadlongAddress(getNestedContractAddress(FEE_DISTRIBUTOR, spec)))))) - .when(withOpContext((spec, opLog) -> { - final var sender = asAddress(spec.registry().getAccountID(ACCOUNT)); - final var receiver1 = asAddress(spec.registry().getAccountID(RECEIVER)); - final var receiver2 = asAddress(spec.registry().getAccountID(SECOND_RECEIVER)); - final var accounts = new Address[] { - HapiParserUtil.asHeadlongAddress(sender), - HapiParserUtil.asHeadlongAddress(receiver1), - HapiParserUtil.asHeadlongAddress(receiver2) - }; - final var amounts = new long[] {-10L, 5L, 5L}; - - /* --- HSCS-PREC-009 --- */ - allRunFor( - spec, - contractCall( - VERSATILE_TRANSFERS, - "feeDistributionAfterTransferStaticNestedCall", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(A_TOKEN))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(FEE_TOKEN))), - accounts, - amounts, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(FEE_COLLECTOR)))) - .alsoSigningWithFullPrefix(ACCOUNT, FEE_COLLECTOR) - .gas(GAS_TO_OFFER) - .via("distributeTx") - .hasKnownStatus(SUCCESS)); - - /* --- HSCS-PREC-018 --- */ - allRunFor( - spec, - contractCall( - VERSATILE_TRANSFERS, - "feeDistributionAfterTransferStaticNestedCall", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(A_TOKEN))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(FEE_TOKEN))), - accounts, - amounts, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(FEE_COLLECTOR)))) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER) - .via("missingSignatureTx") - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)); - - /* --- HSCS-PREC-023 --- */ - allRunFor( - spec, - contractCall( - VERSATILE_TRANSFERS, - "feeDistributionAfterTransferStaticNestedCall", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(A_TOKEN))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(FEE_TOKEN))), - accounts, - amounts, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(RECEIVER)))) - .alsoSigningWithFullPrefix(ACCOUNT, RECEIVER) - .gas(GAS_TO_OFFER) - .via("failingChildFrameTx") - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)); - })) - .then( - childRecordsCheck( - "distributeTx", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))) - .tokenTransfers(changingFungibleBalances() - .including(A_TOKEN, ACCOUNT, -10L) - .including(A_TOKEN, RECEIVER, 5L) - .including(A_TOKEN, SECOND_RECEIVER, 5L)), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))) - .tokenTransfers(changingFungibleBalances() - .including(FEE_TOKEN, FEE_COLLECTOR, -100L) - .including(FEE_TOKEN, ACCOUNT, 100L))), - childRecordsCheck( - "missingSignatureTx", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(REVERTED_SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))), - recordWith() - .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), - childRecordsCheck( - "failingChildFrameTx", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(REVERTED_SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))), - recordWith() - .status(INSUFFICIENT_TOKEN_BALANCE) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INSUFFICIENT_TOKEN_BALANCE)))), - getAccountBalance(ACCOUNT).hasTokenBalance(FEE_TOKEN, 1000), - getAccountBalance(FEE_COLLECTOR).hasTokenBalance(FEE_TOKEN, 0)); - } - - /* --- HSCS-PREC-009 --- - * Contract is a custom hbar fee collector - * Contract that otherwise wouldn't have enough balance for a .transfer of hbars can perform the transfer after - * collecting the custom hbar fees from a nested token transfer through the HTS precompile - * */ - private HapiSpec hbarTransferFromFeeCollector() { - final var outerContract = "HbarFeeCollector"; - final var innerContract = "NestedHTSTransferrer"; - - final AtomicReference tokenID = new AtomicReference<>(); - final AtomicReference senderAccountID = new AtomicReference<>(); - final AtomicReference tokenReceiverAccountID = new AtomicReference<>(); - final AtomicReference hbarReceiverAccountID = new AtomicReference<>(); - - return defaultHapiSpec("HbarTransferFromFeeCollector") - .given( - cryptoCreate(ACCOUNT) - .balance(10 * ONE_HUNDRED_HBARS) - .exposingCreatedIdTo(senderAccountID::set) - .maxAutomaticTokenAssociations(10), - cryptoCreate(RECEIVER) - .exposingCreatedIdTo(tokenReceiverAccountID::set) - .maxAutomaticTokenAssociations(10), - cryptoCreate(SECOND_RECEIVER) - .exposingCreatedIdTo(hbarReceiverAccountID::set) - .balance(0L), - cryptoCreate(TOKEN_TREASURY), - uploadInitCode(outerContract, innerContract), - contractCreate(innerContract), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - outerContract, - asHeadlongAddress(getNestedContractAddress(innerContract, spec)))))) - .when(withOpContext((spec, opLog) -> { - allRunFor( - spec, - tokenCreate(A_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(TOTAL_SUPPLY) - .treasury(TOKEN_TREASURY) - .exposingCreatedIdTo(id -> tokenID.set(asToken(id))) - .withCustom(fixedHbarFee(CUSTOM_HBAR_FEE_AMOUNT, outerContract)), - cryptoTransfer(moving(TOTAL_SUPPLY, A_TOKEN).between(TOKEN_TREASURY, ACCOUNT))); - allRunFor( - spec, - contractCall( - outerContract, - "feeDistributionAfterTransfer", - HapiParserUtil.asHeadlongAddress(asAddress(tokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(senderAccountID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(tokenReceiverAccountID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(hbarReceiverAccountID.get())), - AMOUNT_TO_SEND, - BigInteger.valueOf(CUSTOM_HBAR_FEE_AMOUNT)) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER) - .via("distributeTx")); - })) - .then( - getTxnRecord("distributeTx") - .andAllChildRecords() - .logged() - .hasPriority(recordWith() - .transfers(including(tinyBarsFromTo( - outerContract, SECOND_RECEIVER, CUSTOM_HBAR_FEE_AMOUNT)))), - childRecordsCheck( - "distributeTx", - SUCCESS, - recordWith() - .status(SUCCESS) - .transfers(including( - tinyBarsFromTo(ACCOUNT, outerContract, CUSTOM_HBAR_FEE_AMOUNT))) - .tokenTransfers(changingFungibleBalances() - .including(A_TOKEN, ACCOUNT, -AMOUNT_TO_SEND) - .including(A_TOKEN, RECEIVER, AMOUNT_TO_SEND))), - getAccountBalance(SECOND_RECEIVER).hasTinyBars(CUSTOM_HBAR_FEE_AMOUNT)); - } - - private HapiSpec transferNft() { - return defaultHapiSpec("TransferNft") - .given( - newKeyNamed(UNIVERSAL_KEY), - cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS), - cryptoCreate(RECEIVER), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(NFT) - .tokenType(NON_FUNGIBLE_UNIQUE) - .supplyKey(UNIVERSAL_KEY) - .supplyType(TokenSupplyType.INFINITE) - .initialSupply(0) - .treasury(TOKEN_TREASURY), - tokenAssociate(ACCOUNT, NFT), - mintToken(NFT, List.of(metadata("firstMemo"), metadata("secondMemo"))), - uploadInitCode(VERSATILE_TRANSFERS, FEE_DISTRIBUTOR), - contractCreate(FEE_DISTRIBUTOR).maxAutomaticTokenAssociations(2), - getContractInfo(FEE_DISTRIBUTOR) - .has(ContractInfoAsserts.contractWith().maxAutoAssociations(2)) - .logged(), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - VERSATILE_TRANSFERS, - asHeadlongAddress(getNestedContractAddress(FEE_DISTRIBUTOR, spec))))), - tokenAssociate(VERSATILE_TRANSFERS, List.of(NFT)), - tokenAssociate(RECEIVER, List.of(NFT)), - cryptoTransfer(TokenMovement.movingUnique(NFT, 1).between(TOKEN_TREASURY, ACCOUNT)) - .logged()) - .when(withOpContext((spec, opLog) -> { - final var tokenAddress = asAddress(spec.registry().getTokenID(NFT)); - final var sender = asAddress(spec.registry().getAccountID(ACCOUNT)); - final var receiver = asAddress(spec.registry().getAccountID(RECEIVER)); - - allRunFor( - spec, - contractCall( - VERSATILE_TRANSFERS, - "transferNft", - HapiParserUtil.asHeadlongAddress(tokenAddress), - HapiParserUtil.asHeadlongAddress(sender), - HapiParserUtil.asHeadlongAddress(receiver), - 1L) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER) - .via("distributeTx")); - })) - .then( - getTokenInfo(NFT).hasTotalSupply(2), - getAccountInfo(RECEIVER).hasOwnedNfts(1), - getAccountBalance(RECEIVER).hasTokenBalance(NFT, 1), - getAccountInfo(ACCOUNT).hasOwnedNfts(0), - getAccountBalance(ACCOUNT).hasTokenBalance(NFT, 0), - childRecordsCheck( - "distributeTx", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))) - .tokenTransfers(NonFungibleTransfers.changingNFTBalances() - .including(NFT, ACCOUNT, RECEIVER, 1L)))); - } - - private HapiSpec transferMultipleNfts() { - return defaultHapiSpec("TransferMultipleNfts") - .given( - newKeyNamed(UNIVERSAL_KEY), - cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS), - cryptoCreate(RECEIVER), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(NFT) - .tokenType(NON_FUNGIBLE_UNIQUE) - .supplyKey(UNIVERSAL_KEY) - .supplyType(TokenSupplyType.INFINITE) - .initialSupply(0) - .treasury(TOKEN_TREASURY), - tokenAssociate(ACCOUNT, NFT), - mintToken(NFT, List.of(metadata("firstMemo"), metadata("secondMemo"))), - uploadInitCode(VERSATILE_TRANSFERS, FEE_DISTRIBUTOR), - contractCreate(FEE_DISTRIBUTOR), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - VERSATILE_TRANSFERS, - asHeadlongAddress(getNestedContractAddress(FEE_DISTRIBUTOR, spec))))), - tokenAssociate(VERSATILE_TRANSFERS, List.of(NFT)), - tokenAssociate(RECEIVER, List.of(NFT)), - cryptoTransfer(TokenMovement.movingUnique(NFT, 1, 2).between(TOKEN_TREASURY, ACCOUNT))) - .when(withOpContext((spec, opLog) -> { - final var tokenAddress = asAddress(spec.registry().getTokenID(NFT)); - final var sender = asAddress(spec.registry().getAccountID(ACCOUNT)); - final var receiver = asAddress(spec.registry().getAccountID(RECEIVER)); - final var theSenders = new Address[] { - HapiParserUtil.asHeadlongAddress(sender), HapiParserUtil.asHeadlongAddress(sender) - }; - final var theReceivers = new Address[] { - HapiParserUtil.asHeadlongAddress(receiver), HapiParserUtil.asHeadlongAddress(receiver) - }; - final var theSerialNumbers = new long[] {1L, 2L}; - - allRunFor( - spec, - contractCall( - VERSATILE_TRANSFERS, - "transferNfts", - HapiParserUtil.asHeadlongAddress(tokenAddress), - theSenders, - theReceivers, - theSerialNumbers) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER) - .via("distributeTx")); - })) - .then( - childRecordsCheck( - "distributeTx", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))) - .tokenTransfers(NonFungibleTransfers.changingNFTBalances() - .including(NFT, ACCOUNT, RECEIVER, 1L) - .including(NFT, ACCOUNT, RECEIVER, 2L))), - getTokenInfo(NFT).hasTotalSupply(2), - getAccountInfo(RECEIVER).hasOwnedNfts(2), - getAccountBalance(RECEIVER).hasTokenBalance(NFT, 2), - getAccountInfo(ACCOUNT).hasOwnedNfts(0), - getAccountBalance(ACCOUNT).hasTokenBalance(NFT, 0)); + return List.of(); } private HapiSpec nonZeroTransfersFail() { @@ -911,10 +144,6 @@ private HapiSpec nonZeroTransfersFail() { htsPrecompileResult().withStatus(TRANSFERS_NOT_ZERO_SUM_FOR_TOKEN))))); } - private String getNestedContractAddress(final String contract, final HapiSpec spec) { - return AssociatePrecompileSuite.getNestedContractAddress(contract, spec); - } - @Override protected Logger getResultsLogger() { return log; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractHTSV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractHTSV1SecurityModelSuite.java new file mode 100644 index 000000000000..1a3a8885b114 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractHTSV1SecurityModelSuite.java @@ -0,0 +1,913 @@ +/* + * Copyright (C) 2021-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.google.protobuf.ByteString.copyFromUtf8; +import static com.hedera.services.bdd.spec.HapiPropertySource.asToken; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.SomeFungibleTransfers.changingFungibleBalances; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.assertions.TransferListAsserts.including; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoTransfer.tinyBarsFromTo; +import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fixedHbarFee; +import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fixedHtsFee; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overridingTwo; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.getNestedContractAddress; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.utils.MiscEETUtils.metadata; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_TOKEN_BALANCE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.REVERTED_SUCCESS; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; +import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; + +import com.esaulpaugh.headlong.abi.Address; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.assertions.AccountInfoAsserts; +import com.hedera.services.bdd.spec.assertions.ContractInfoAsserts; +import com.hedera.services.bdd.spec.assertions.NonFungibleTransfers; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.spec.transactions.token.TokenMovement; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.AccountID; +import com.hederahashgraph.api.proto.java.TokenID; +import com.hederahashgraph.api.proto.java.TokenSupplyType; +import com.hederahashgraph.api.proto.java.TokenType; +import java.math.BigInteger; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse +public class ContractHTSV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(ContractHTSV1SecurityModelSuite.class); + + public static final String VERSATILE_TRANSFERS = "VersatileTransfers"; + public static final String FEE_DISTRIBUTOR = "FeeDistributor"; + + private static final long GAS_TO_OFFER = 2_000_000L; + private static final long TOTAL_SUPPLY = 1_000; + private static final long AMOUNT_TO_SEND = 10L; + private static final long CUSTOM_HBAR_FEE_AMOUNT = 100L; + private static final String TOKEN_TREASURY = "treasury"; + + private static final String A_TOKEN = "TokenA"; + private static final String NFT = "nft"; + + private static final String ACCOUNT = "sender"; + private static final String FEE_COLLECTOR = "feeCollector"; + private static final String RECEIVER = "receiver"; + private static final String SECOND_RECEIVER = "receiver2"; + + private static final String FEE_TOKEN = "feeToken"; + + private static final String UNIVERSAL_KEY = "multipurpose"; + + public static void main(String... args) { + new ContractHTSV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + public List getSpecsInSuite() { + return allOf(positiveSpecs(), negativeSpecs()); + } + + List negativeSpecs() { + return List.of(hscsPrec017RollbackAfterInsufficientBalance()); + } + + List positiveSpecs() { + return List.of( + distributeMultipleTokens(), + depositAndWithdrawFungibleTokens(), + transferNft(), + transferMultipleNfts(), + tokenTransferFromFeeCollector(), + tokenTransferFromFeeCollectorStaticNestedCall(), + hbarTransferFromFeeCollector()); + } + + private HapiSpec hscsPrec017RollbackAfterInsufficientBalance() { + final var alice = "alice"; + final var bob = "bob"; + final var treasuryForToken = "treasuryForToken"; + final var feeCollector = "feeCollector"; + final var supplyKey = "supplyKey"; + final var tokenWithHbarFee = "tokenWithHbarFee"; + final var theContract = "TransferAmountAndToken"; + + return propertyPreservingHapiSpec("hscsPrec017RollbackAfterInsufficientBalance") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenMint", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(supplyKey), + cryptoCreate(alice).balance(7 * ONE_HBAR), + cryptoCreate(bob).balance(ONE_HUNDRED_HBARS), + cryptoCreate(treasuryForToken).balance(ONE_HUNDRED_HBARS), + cryptoCreate(feeCollector).balance(0L), + tokenCreate(tokenWithHbarFee) + .tokenType(NON_FUNGIBLE_UNIQUE) + .supplyKey(supplyKey) + .initialSupply(0L) + .treasury(treasuryForToken) + .withCustom(fixedHbarFee(4 * ONE_HBAR, feeCollector)), + mintToken(tokenWithHbarFee, List.of(copyFromUtf8("First!"))), + mintToken(tokenWithHbarFee, List.of(copyFromUtf8("Second!"))), + uploadInitCode(theContract), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + theContract, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(tokenWithHbarFee)))))), + tokenAssociate(alice, tokenWithHbarFee), + tokenAssociate(bob, tokenWithHbarFee), + tokenAssociate(theContract, tokenWithHbarFee), + cryptoTransfer(movingUnique(tokenWithHbarFee, 1L).between(treasuryForToken, alice)) + .payingWith(GENESIS), + cryptoTransfer(movingUnique(tokenWithHbarFee, 2L).between(treasuryForToken, alice)) + .payingWith(GENESIS), + getAccountInfo(feeCollector) + .has(AccountInfoAsserts.accountWith().balance(0L))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + theContract, + "transferToAddress", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(alice))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(bob))), + 1L, + 2L) + .payingWith(bob) + .alsoSigningWithFullPrefix(alice) + .gas(GAS_TO_OFFER) + .via("contractCallTxn") + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)))) + .then( + childRecordsCheck( + "contractCallTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(REVERTED_SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))), + recordWith() + .status(INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .withStatus( + INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE)))), + getAccountInfo(feeCollector) + .has(AccountInfoAsserts.accountWith().balance(0L))); + } + + private HapiSpec depositAndWithdrawFungibleTokens() { + final var theContract = "ZenosBank"; + + return propertyPreservingHapiSpec("depositAndWithdrawFungibleTokens") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "CryptoTransfer,TokenAssociateToAccount,TokenCreate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(UNIVERSAL_KEY), + cryptoCreate(RECEIVER), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(A_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .initialSupply(TOTAL_SUPPLY) + .treasury(TOKEN_TREASURY), + uploadInitCode(theContract), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + theContract, + HapiParserUtil.asHeadlongAddress(asAddress( + spec.registry().getTokenID(A_TOKEN)))) + .via("creationTx"))), + tokenAssociate(DEFAULT_CONTRACT_SENDER, List.of(A_TOKEN)), + tokenAssociate(theContract, List.of(A_TOKEN)), + cryptoTransfer(moving(200, A_TOKEN).between(TOKEN_TREASURY, DEFAULT_CONTRACT_SENDER))) + .when( + // If we are using Ethereum transactions, the DEFAULT_CONTRACT_SENDER + // signature will have to + // be validated via EthTxSigs, because in any case only DEFAULT_PAYER signs + // this call + contractCall(theContract, "depositTokens", 50L) + .gas(GAS_TO_OFFER) + .via("zeno"), + contractCall(theContract, "depositTokens", 0L) + .gas(GAS_TO_OFFER) + .via("zeroTransfers"), + contractCall(theContract, "withdrawTokens") + .payingWith(RECEIVER) + .alsoSigningWithFullPrefix(theContract) + .gas(GAS_TO_OFFER) + .via("receiverTx") + // The depositTokens will associate the Ethereum + // DEFAULT_CONTRACT_SENDER; and this + // contract fails if the msg.sender is already associated + .refusingEthConversion()) + .then( + childRecordsCheck( + "zeno", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))) + .tokenTransfers(changingFungibleBalances() + .including(A_TOKEN, DEFAULT_CONTRACT_SENDER, -50L) + .including(A_TOKEN, theContract, 50L))), + childRecordsCheck( + "receiverTx", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))) + .tokenTransfers(changingFungibleBalances() + .including(A_TOKEN, theContract, -25L) + .including(A_TOKEN, RECEIVER, 25L)))); + } + + private HapiSpec distributeMultipleTokens() { + final var theSecondReceiver = "somebody2"; + + return propertyPreservingHapiSpec("distributeMultipleTokens") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(UNIVERSAL_KEY), + cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS), + cryptoCreate(RECEIVER), + cryptoCreate(theSecondReceiver), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(A_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(TOTAL_SUPPLY) + .treasury(TOKEN_TREASURY), + uploadInitCode(VERSATILE_TRANSFERS, FEE_DISTRIBUTOR), + contractCreate(FEE_DISTRIBUTOR), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + VERSATILE_TRANSFERS, + asHeadlongAddress(getNestedContractAddress(FEE_DISTRIBUTOR, spec))))), + tokenAssociate(ACCOUNT, List.of(A_TOKEN)), + tokenAssociate(VERSATILE_TRANSFERS, List.of(A_TOKEN)), + tokenAssociate(RECEIVER, List.of(A_TOKEN)), + tokenAssociate(theSecondReceiver, List.of(A_TOKEN)), + cryptoTransfer(moving(200, A_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) + .when(withOpContext((spec, opLog) -> { + final var sender = asAddress(spec.registry().getAccountID(ACCOUNT)); + final var receiver1 = asAddress(spec.registry().getAccountID(RECEIVER)); + final var receiver2 = asAddress(spec.registry().getAccountID(theSecondReceiver)); + final var accounts = new Address[] { + HapiParserUtil.asHeadlongAddress(sender), + HapiParserUtil.asHeadlongAddress(receiver1), + HapiParserUtil.asHeadlongAddress(receiver2) + }; + final var amounts = new long[] {-10L, 5L, 5L}; + + allRunFor( + spec, + contractCall( + VERSATILE_TRANSFERS, + "distributeTokens", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(A_TOKEN))), + accounts, + amounts) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER) + .via("distributeTx")); + })) + .then(childRecordsCheck( + "distributeTx", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))) + .tokenTransfers(changingFungibleBalances() + .including(A_TOKEN, ACCOUNT, -10L) + .including(A_TOKEN, RECEIVER, 5L) + .including(A_TOKEN, theSecondReceiver, 5L)))); + } + + private HapiSpec tokenTransferFromFeeCollector() { + return propertyPreservingHapiSpec("tokenTransferFromFeeCollector") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).maxAutomaticTokenAssociations(10), + cryptoCreate(FEE_COLLECTOR), + cryptoCreate(RECEIVER).maxAutomaticTokenAssociations(10), + cryptoCreate(SECOND_RECEIVER), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(FEE_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .initialSupply(TOTAL_SUPPLY) + .treasury(TOKEN_TREASURY), + tokenAssociate(FEE_COLLECTOR, FEE_TOKEN), + tokenCreate(A_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(TOTAL_SUPPLY) + .treasury(TOKEN_TREASURY) + .withCustom(fixedHtsFee(100L, FEE_TOKEN, FEE_COLLECTOR)), + tokenAssociate(ACCOUNT, A_TOKEN), + tokenAssociate(RECEIVER, A_TOKEN), + tokenAssociate(SECOND_RECEIVER, A_TOKEN), + cryptoTransfer(moving(TOTAL_SUPPLY, FEE_TOKEN).between(TOKEN_TREASURY, ACCOUNT)), + cryptoTransfer(moving(TOTAL_SUPPLY, A_TOKEN).between(TOKEN_TREASURY, ACCOUNT)), + uploadInitCode(VERSATILE_TRANSFERS, FEE_DISTRIBUTOR), + contractCreate(FEE_DISTRIBUTOR), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + VERSATILE_TRANSFERS, + asHeadlongAddress(getNestedContractAddress(FEE_DISTRIBUTOR, spec)))))) + .when(withOpContext((spec, opLog) -> { + final var sender = asAddress(spec.registry().getAccountID(ACCOUNT)); + final var receiver1 = asAddress(spec.registry().getAccountID(RECEIVER)); + final var receiver2 = asAddress(spec.registry().getAccountID(SECOND_RECEIVER)); + final var accounts = new Address[] { + HapiParserUtil.asHeadlongAddress(sender), + HapiParserUtil.asHeadlongAddress(receiver1), + HapiParserUtil.asHeadlongAddress(receiver2) + }; + final var amounts = new long[] {-10L, 5L, 5L}; + + /* --- HSCS-PREC-009 --- */ + allRunFor( + spec, + contractCall( + VERSATILE_TRANSFERS, + "feeDistributionAfterTransfer", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(A_TOKEN))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FEE_TOKEN))), + accounts, + amounts, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(FEE_COLLECTOR)))) + .gas(GAS_TO_OFFER) + .via("distributeTx") + .alsoSigningWithFullPrefix(ACCOUNT, FEE_COLLECTOR) + .hasKnownStatus(SUCCESS)); + + /* --- HSCS-PREC-018 --- */ + allRunFor( + spec, + contractCall( + VERSATILE_TRANSFERS, + "feeDistributionAfterTransfer", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(A_TOKEN))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FEE_TOKEN))), + accounts, + amounts, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(FEE_COLLECTOR)))) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER) + .via("missingSignatureTx") + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)); + + /* --- HSCS-PREC-023 --- */ + allRunFor( + spec, + contractCall( + VERSATILE_TRANSFERS, + "feeDistributionAfterTransfer", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(A_TOKEN))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FEE_TOKEN))), + accounts, + amounts, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(RECEIVER)))) + .alsoSigningWithFullPrefix(ACCOUNT, RECEIVER) + .gas(GAS_TO_OFFER) + .via("failingChildFrameTx") + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)); + })) + .then( + childRecordsCheck( + "distributeTx", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))) + .tokenTransfers(changingFungibleBalances() + .including(A_TOKEN, ACCOUNT, -10L) + .including(A_TOKEN, RECEIVER, 5L) + .including(A_TOKEN, SECOND_RECEIVER, 5L)), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))) + .tokenTransfers(changingFungibleBalances() + .including(FEE_TOKEN, FEE_COLLECTOR, -100L) + .including(FEE_TOKEN, ACCOUNT, 100L))), + childRecordsCheck( + "missingSignatureTx", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(REVERTED_SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))), + recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), + childRecordsCheck( + "failingChildFrameTx", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(REVERTED_SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))), + recordWith() + .status(INSUFFICIENT_TOKEN_BALANCE) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INSUFFICIENT_TOKEN_BALANCE)))), + getAccountBalance(ACCOUNT).hasTokenBalance(FEE_TOKEN, 1000), + getAccountBalance(FEE_COLLECTOR).hasTokenBalance(FEE_TOKEN, 0)); + } + + private HapiSpec tokenTransferFromFeeCollectorStaticNestedCall() { + return propertyPreservingHapiSpec("tokenTransferFromFeeCollectorStaticNestedCall") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).maxAutomaticTokenAssociations(10), + cryptoCreate(FEE_COLLECTOR), + cryptoCreate(RECEIVER).maxAutomaticTokenAssociations(10), + cryptoCreate(SECOND_RECEIVER), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(FEE_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .initialSupply(TOTAL_SUPPLY) + .treasury(TOKEN_TREASURY), + tokenAssociate(FEE_COLLECTOR, FEE_TOKEN), + tokenCreate(A_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(TOTAL_SUPPLY) + .treasury(TOKEN_TREASURY) + .withCustom(fixedHtsFee(100L, FEE_TOKEN, FEE_COLLECTOR)), + tokenAssociate(ACCOUNT, A_TOKEN), + tokenAssociate(RECEIVER, A_TOKEN), + tokenAssociate(SECOND_RECEIVER, A_TOKEN), + cryptoTransfer(moving(TOTAL_SUPPLY, FEE_TOKEN).between(TOKEN_TREASURY, ACCOUNT)), + cryptoTransfer(moving(TOTAL_SUPPLY, A_TOKEN).between(TOKEN_TREASURY, ACCOUNT)), + uploadInitCode(VERSATILE_TRANSFERS, FEE_DISTRIBUTOR), + contractCreate(FEE_DISTRIBUTOR), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + VERSATILE_TRANSFERS, + asHeadlongAddress(getNestedContractAddress(FEE_DISTRIBUTOR, spec)))))) + .when(withOpContext((spec, opLog) -> { + final var sender = asAddress(spec.registry().getAccountID(ACCOUNT)); + final var receiver1 = asAddress(spec.registry().getAccountID(RECEIVER)); + final var receiver2 = asAddress(spec.registry().getAccountID(SECOND_RECEIVER)); + final var accounts = new Address[] { + HapiParserUtil.asHeadlongAddress(sender), + HapiParserUtil.asHeadlongAddress(receiver1), + HapiParserUtil.asHeadlongAddress(receiver2) + }; + final var amounts = new long[] {-10L, 5L, 5L}; + + /* --- HSCS-PREC-009 --- */ + allRunFor( + spec, + contractCall( + VERSATILE_TRANSFERS, + "feeDistributionAfterTransferStaticNestedCall", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(A_TOKEN))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FEE_TOKEN))), + accounts, + amounts, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(FEE_COLLECTOR)))) + .alsoSigningWithFullPrefix(ACCOUNT, FEE_COLLECTOR) + .gas(GAS_TO_OFFER) + .via("distributeTx") + .hasKnownStatus(SUCCESS)); + + /* --- HSCS-PREC-018 --- */ + allRunFor( + spec, + contractCall( + VERSATILE_TRANSFERS, + "feeDistributionAfterTransferStaticNestedCall", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(A_TOKEN))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FEE_TOKEN))), + accounts, + amounts, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(FEE_COLLECTOR)))) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER) + .via("missingSignatureTx") + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)); + + /* --- HSCS-PREC-023 --- */ + allRunFor( + spec, + contractCall( + VERSATILE_TRANSFERS, + "feeDistributionAfterTransferStaticNestedCall", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(A_TOKEN))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FEE_TOKEN))), + accounts, + amounts, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(RECEIVER)))) + .alsoSigningWithFullPrefix(ACCOUNT, RECEIVER) + .gas(GAS_TO_OFFER) + .via("failingChildFrameTx") + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)); + })) + .then( + childRecordsCheck( + "distributeTx", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))) + .tokenTransfers(changingFungibleBalances() + .including(A_TOKEN, ACCOUNT, -10L) + .including(A_TOKEN, RECEIVER, 5L) + .including(A_TOKEN, SECOND_RECEIVER, 5L)), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))) + .tokenTransfers(changingFungibleBalances() + .including(FEE_TOKEN, FEE_COLLECTOR, -100L) + .including(FEE_TOKEN, ACCOUNT, 100L))), + childRecordsCheck( + "missingSignatureTx", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(REVERTED_SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))), + recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), + childRecordsCheck( + "failingChildFrameTx", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(REVERTED_SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))), + recordWith() + .status(INSUFFICIENT_TOKEN_BALANCE) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INSUFFICIENT_TOKEN_BALANCE)))), + getAccountBalance(ACCOUNT).hasTokenBalance(FEE_TOKEN, 1000), + getAccountBalance(FEE_COLLECTOR).hasTokenBalance(FEE_TOKEN, 0)); + } + + /* --- HSCS-PREC-009 --- + * Contract is a custom hbar fee collector + * Contract that otherwise wouldn't have enough balance for a transfer of hbars can perform the transfer after + * collecting the custom hbar fees from a nested token transfer through the HTS precompile + * */ + private HapiSpec hbarTransferFromFeeCollector() { + final var outerContract = "HbarFeeCollector"; + final var innerContract = "NestedHTSTransferrer"; + + final AtomicReference tokenID = new AtomicReference<>(); + final AtomicReference senderAccountID = new AtomicReference<>(); + final AtomicReference tokenReceiverAccountID = new AtomicReference<>(); + final AtomicReference hbarReceiverAccountID = new AtomicReference<>(); + + return propertyPreservingHapiSpec("hbarTransferFromFeeCollector") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "CryptoTransfer", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(ACCOUNT) + .balance(10 * ONE_HUNDRED_HBARS) + .exposingCreatedIdTo(senderAccountID::set) + .maxAutomaticTokenAssociations(10), + cryptoCreate(RECEIVER) + .exposingCreatedIdTo(tokenReceiverAccountID::set) + .maxAutomaticTokenAssociations(10), + cryptoCreate(SECOND_RECEIVER) + .exposingCreatedIdTo(hbarReceiverAccountID::set) + .balance(0L), + cryptoCreate(TOKEN_TREASURY), + uploadInitCode(outerContract, innerContract), + contractCreate(innerContract), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + outerContract, + asHeadlongAddress(getNestedContractAddress(innerContract, spec)))))) + .when(withOpContext((spec, opLog) -> { + allRunFor( + spec, + tokenCreate(A_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(TOTAL_SUPPLY) + .treasury(TOKEN_TREASURY) + .exposingCreatedIdTo(id -> tokenID.set(asToken(id))) + .withCustom(fixedHbarFee(CUSTOM_HBAR_FEE_AMOUNT, outerContract)), + cryptoTransfer(moving(TOTAL_SUPPLY, A_TOKEN).between(TOKEN_TREASURY, ACCOUNT))); + allRunFor( + spec, + contractCall( + outerContract, + "feeDistributionAfterTransfer", + HapiParserUtil.asHeadlongAddress(asAddress(tokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(senderAccountID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(tokenReceiverAccountID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(hbarReceiverAccountID.get())), + AMOUNT_TO_SEND, + BigInteger.valueOf(CUSTOM_HBAR_FEE_AMOUNT)) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER) + .via("distributeTx")); + })) + .then( + getTxnRecord("distributeTx") + .andAllChildRecords() + .logged() + .hasPriority(recordWith() + .transfers(including(tinyBarsFromTo( + outerContract, SECOND_RECEIVER, CUSTOM_HBAR_FEE_AMOUNT)))), + childRecordsCheck( + "distributeTx", + SUCCESS, + recordWith() + .status(SUCCESS) + .transfers(including( + tinyBarsFromTo(ACCOUNT, outerContract, CUSTOM_HBAR_FEE_AMOUNT))) + .tokenTransfers(changingFungibleBalances() + .including(A_TOKEN, ACCOUNT, -AMOUNT_TO_SEND) + .including(A_TOKEN, RECEIVER, AMOUNT_TO_SEND))), + getAccountBalance(SECOND_RECEIVER).hasTinyBars(CUSTOM_HBAR_FEE_AMOUNT)); + } + + private HapiSpec transferNft() { + return propertyPreservingHapiSpec("transferNft") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenMint", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(UNIVERSAL_KEY), + cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS), + cryptoCreate(RECEIVER), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(NFT) + .tokenType(NON_FUNGIBLE_UNIQUE) + .supplyKey(UNIVERSAL_KEY) + .supplyType(TokenSupplyType.INFINITE) + .initialSupply(0) + .treasury(TOKEN_TREASURY), + tokenAssociate(ACCOUNT, NFT), + mintToken(NFT, List.of(metadata("firstMemo"), metadata("secondMemo"))), + uploadInitCode(VERSATILE_TRANSFERS, FEE_DISTRIBUTOR), + contractCreate(FEE_DISTRIBUTOR).maxAutomaticTokenAssociations(2), + getContractInfo(FEE_DISTRIBUTOR) + .has(ContractInfoAsserts.contractWith().maxAutoAssociations(2)) + .logged(), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + VERSATILE_TRANSFERS, + asHeadlongAddress(getNestedContractAddress(FEE_DISTRIBUTOR, spec))))), + tokenAssociate(VERSATILE_TRANSFERS, List.of(NFT)), + tokenAssociate(RECEIVER, List.of(NFT)), + cryptoTransfer(TokenMovement.movingUnique(NFT, 1).between(TOKEN_TREASURY, ACCOUNT)) + .logged()) + .when(withOpContext((spec, opLog) -> { + final var tokenAddress = asAddress(spec.registry().getTokenID(NFT)); + final var sender = asAddress(spec.registry().getAccountID(ACCOUNT)); + final var receiver = asAddress(spec.registry().getAccountID(RECEIVER)); + + allRunFor( + spec, + contractCall( + VERSATILE_TRANSFERS, + "transferNft", + HapiParserUtil.asHeadlongAddress(tokenAddress), + HapiParserUtil.asHeadlongAddress(sender), + HapiParserUtil.asHeadlongAddress(receiver), + 1L) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER) + .via("distributeTx")); + })) + .then( + getTokenInfo(NFT).hasTotalSupply(2), + getAccountInfo(RECEIVER).hasOwnedNfts(1), + getAccountBalance(RECEIVER).hasTokenBalance(NFT, 1), + getAccountInfo(ACCOUNT).hasOwnedNfts(0), + getAccountBalance(ACCOUNT).hasTokenBalance(NFT, 0), + childRecordsCheck( + "distributeTx", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))) + .tokenTransfers(NonFungibleTransfers.changingNFTBalances() + .including(NFT, ACCOUNT, RECEIVER, 1L)))); + } + + private HapiSpec transferMultipleNfts() { + return propertyPreservingHapiSpec("transferMultipleNfts") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenMint", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(UNIVERSAL_KEY), + cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS), + cryptoCreate(RECEIVER), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(NFT) + .tokenType(NON_FUNGIBLE_UNIQUE) + .supplyKey(UNIVERSAL_KEY) + .supplyType(TokenSupplyType.INFINITE) + .initialSupply(0) + .treasury(TOKEN_TREASURY), + tokenAssociate(ACCOUNT, NFT), + mintToken(NFT, List.of(metadata("firstMemo"), metadata("secondMemo"))), + uploadInitCode(VERSATILE_TRANSFERS, FEE_DISTRIBUTOR), + contractCreate(FEE_DISTRIBUTOR), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + VERSATILE_TRANSFERS, + asHeadlongAddress(getNestedContractAddress(FEE_DISTRIBUTOR, spec))))), + tokenAssociate(VERSATILE_TRANSFERS, List.of(NFT)), + tokenAssociate(RECEIVER, List.of(NFT)), + cryptoTransfer(TokenMovement.movingUnique(NFT, 1, 2).between(TOKEN_TREASURY, ACCOUNT))) + .when(withOpContext((spec, opLog) -> { + final var tokenAddress = asAddress(spec.registry().getTokenID(NFT)); + final var sender = asAddress(spec.registry().getAccountID(ACCOUNT)); + final var receiver = asAddress(spec.registry().getAccountID(RECEIVER)); + final var theSenders = new Address[] { + HapiParserUtil.asHeadlongAddress(sender), HapiParserUtil.asHeadlongAddress(sender) + }; + final var theReceivers = new Address[] { + HapiParserUtil.asHeadlongAddress(receiver), HapiParserUtil.asHeadlongAddress(receiver) + }; + final var theSerialNumbers = new long[] {1L, 2L}; + + allRunFor( + spec, + contractCall( + VERSATILE_TRANSFERS, + "transferNfts", + HapiParserUtil.asHeadlongAddress(tokenAddress), + theSenders, + theReceivers, + theSerialNumbers) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER) + .via("distributeTx")); + })) + .then( + childRecordsCheck( + "distributeTx", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))) + .tokenTransfers(NonFungibleTransfers.changingNFTBalances() + .including(NFT, ACCOUNT, RECEIVER, 1L) + .including(NFT, ACCOUNT, RECEIVER, 2L))), + getTokenInfo(NFT).hasTotalSupply(2), + getAccountInfo(RECEIVER).hasOwnedNfts(2), + getAccountBalance(RECEIVER).hasTokenBalance(NFT, 2), + getAccountInfo(ACCOUNT).hasOwnedNfts(0), + getAccountBalance(ACCOUNT).hasTokenBalance(NFT, 0)); + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractKeysHTSSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractKeysHTSSuite.java index 227cd69fa3b5..0e64ccf2a940 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractKeysHTSSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractKeysHTSSuite.java @@ -51,6 +51,7 @@ import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.getNestedContractAddress; import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; import static com.hedera.services.bdd.suites.utils.MiscEETUtils.metadata; import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; @@ -85,7 +86,6 @@ import java.util.concurrent.atomic.AtomicReference; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.jetbrains.annotations.NotNull; public class ContractKeysHTSSuite extends HapiSuite { private static final long GAS_TO_OFFER = 1_500_000L; @@ -150,7 +150,7 @@ public boolean canRunConcurrent() { @Override public List getSpecsInSuite() { - return allOf(hscsKey1(), hscsKey2(), hscsKey3(), hscsKey4(), hscsKey5(), hscsKey6(), hscsKey7(), hscsKey8()); + return allOf(hscsKey1(), hscsKey2(), hscsKey3(), hscsKey4(), hscsKey5(), hscsKey6()); } List hscsKey1() { @@ -169,7 +169,6 @@ List hscsKey2() { staticCallForTransferWithContractKey(), staticCallForBurnWithContractKey(), staticCallForMintWithContractKey(), - delegateCallForTransferWithContractKey(), delegateCallForBurnWithContractKey(), delegateCallForMintWithContractKey(), staticCallForDissociatePrecompileFails()); @@ -214,14 +213,6 @@ List hscsKey6() { return List.of(burnWithKeyAsPartOf1OfXThreshold()); } - List hscsKey7() { - return List.of(transferWithKeyAsPartOf2OfXThreshold()); - } - - List hscsKey8() { - return List.of(burnTokenWithFullPrefixAndPartialPrefixKeys()); - } - private HapiSpec burnWithKeyAsPartOf1OfXThreshold() { final var delegateContractKeyShape = KeyShape.threshOf(1, SIMPLE, DELEGATE_CONTRACT); final var contractKeyShape = KeyShape.threshOf(1, SIMPLE, KeyShape.CONTRACT); @@ -284,121 +275,6 @@ private HapiSpec burnWithKeyAsPartOf1OfXThreshold() { .including(TOKEN_USAGE, TOKEN_TREASURY, -1)))); } - private HapiSpec transferWithKeyAsPartOf2OfXThreshold() { - final AtomicReference accountID = new AtomicReference<>(); - final AtomicReference vanillaTokenTokenID = new AtomicReference<>(); - final AtomicReference receiverID = new AtomicReference<>(); - final var delegateContractKeyShape = KeyShape.threshOf(2, SIMPLE, SIMPLE, DELEGATE_CONTRACT, KeyShape.CONTRACT); - - return defaultHapiSpec("transferWithKeyAsPartOf2OfXThreshold") - .given( - newKeyNamed(SUPPLY_KEY), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(VANILLA_TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .supplyKey(SUPPLY_KEY) - .treasury(TOKEN_TREASURY) - .initialSupply(0) - .exposingCreatedIdTo(id -> vanillaTokenTokenID.set(asToken(id))), - mintToken(VANILLA_TOKEN, List.of(copyFromUtf8(FIRST_STRING_FOR_MINT))), - cryptoCreate(ACCOUNT).exposingCreatedIdTo(accountID::set), - cryptoCreate(RECEIVER).exposingCreatedIdTo(receiverID::set), - uploadInitCode(OUTER_CONTRACT, NESTED_CONTRACT), - contractCreate(NESTED_CONTRACT), - tokenAssociate(NESTED_CONTRACT, VANILLA_TOKEN), - tokenAssociate(ACCOUNT, VANILLA_TOKEN), - tokenAssociate(RECEIVER, VANILLA_TOKEN), - cryptoTransfer(movingUnique(VANILLA_TOKEN, 1L).between(TOKEN_TREASURY, ACCOUNT)) - .payingWith(GENESIS)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - OUTER_CONTRACT, asHeadlongAddress(getNestedContractAddress(NESTED_CONTRACT, spec))), - tokenAssociate(OUTER_CONTRACT, VANILLA_TOKEN), - newKeyNamed(DELEGATE_KEY) - .shape(delegateContractKeyShape.signedWith( - sigs(ON, ON, OUTER_CONTRACT, NESTED_CONTRACT))), - cryptoUpdate(ACCOUNT).key(DELEGATE_KEY), - contractCall( - OUTER_CONTRACT, - "transferDelegateCall", - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(receiverID.get())), - 1L) - .payingWith(GENESIS) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("delegateTransferCallWithDelegateContractKeyTxn") - .gas(GAS_TO_OFFER)))) - .then( - childRecordsCheck( - "delegateTransferCallWithDelegateContractKeyTxn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)))), - getAccountBalance(ACCOUNT).hasTokenBalance(VANILLA_TOKEN, 0), - getAccountBalance(RECEIVER).hasTokenBalance(VANILLA_TOKEN, 1)); - } - - private HapiSpec delegateCallForTransferWithContractKey() { - final AtomicReference accountID = new AtomicReference<>(); - final AtomicReference vanillaTokenTokenID = new AtomicReference<>(); - final AtomicReference receiverID = new AtomicReference<>(); - - return defaultHapiSpec("delegateCallForTransferWithContractKey") - .given( - newKeyNamed(SUPPLY_KEY), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(VANILLA_TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .supplyKey(SUPPLY_KEY) - .treasury(TOKEN_TREASURY) - .initialSupply(0) - .exposingCreatedIdTo(id -> vanillaTokenTokenID.set(asToken(id))), - mintToken(VANILLA_TOKEN, List.of(copyFromUtf8(FIRST_STRING_FOR_MINT))), - cryptoCreate(ACCOUNT).exposingCreatedIdTo(accountID::set), - cryptoCreate(RECEIVER).exposingCreatedIdTo(receiverID::set), - uploadInitCode(OUTER_CONTRACT, NESTED_CONTRACT), - contractCreate(NESTED_CONTRACT), - tokenAssociate(NESTED_CONTRACT, VANILLA_TOKEN), - tokenAssociate(ACCOUNT, VANILLA_TOKEN), - tokenAssociate(RECEIVER, VANILLA_TOKEN), - cryptoTransfer(movingUnique(VANILLA_TOKEN, 1L).between(TOKEN_TREASURY, ACCOUNT)) - .payingWith(GENESIS)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - OUTER_CONTRACT, asHeadlongAddress(getNestedContractAddress(NESTED_CONTRACT, spec))), - tokenAssociate(OUTER_CONTRACT, VANILLA_TOKEN), - newKeyNamed(CONTRACT_KEY).shape(CONTRACT_KEY_SHAPE.signedWith(sigs(ON, OUTER_CONTRACT))), - cryptoUpdate(ACCOUNT).key(CONTRACT_KEY), - contractCall( - OUTER_CONTRACT, - "transferDelegateCall", - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(receiverID.get())), - 1L) - .payingWith(GENESIS) - .via("delegateTransferCallWithContractKeyTxn") - .hasKnownStatus(ResponseCodeEnum.CONTRACT_REVERT_EXECUTED) - .gas(GAS_TO_OFFER)))) - .then( - childRecordsCheck( - "delegateTransferCallWithContractKeyTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), - getAccountBalance(ACCOUNT).hasTokenBalance(VANILLA_TOKEN, 1), - getAccountBalance(RECEIVER).hasTokenBalance(VANILLA_TOKEN, 0)); - } - private HapiSpec delegateCallForBurnWithContractKey() { final AtomicReference vanillaTokenTokenID = new AtomicReference<>(); @@ -2410,81 +2286,6 @@ private HapiSpec callForBurnWithContractKey() { .then(getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TOKEN_USAGE, 49)); } - private HapiSpec burnTokenWithFullPrefixAndPartialPrefixKeys() { - final var firstBurnTxn = "firstBurnTxn"; - final var secondBurnTxn = "secondBurnTxn"; - final var amount = 99L; - final AtomicLong fungibleNum = new AtomicLong(); - - return defaultHapiSpec("burnTokenWithFullPrefixAndPartialPrefixKeys") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(ACCOUNT_NAME).balance(10 * ONE_HUNDRED_HBARS), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(TYPE_OF_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(100) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .exposingCreatedIdTo(idLit -> fungibleNum.set(asDotDelimitedLongArray(idLit)[2])), - uploadInitCode(ORDINARY_CALLS_CONTRACT), - contractCreate(ORDINARY_CALLS_CONTRACT)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - ORDINARY_CALLS_CONTRACT, - "burnTokenCall", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(TYPE_OF_TOKEN))), - BigInteger.ONE, - new long[0]) - .via(firstBurnTxn) - .payingWith(ACCOUNT_NAME) - .signedBy(MULTI_KEY) - .signedBy(ACCOUNT_NAME) - .hasKnownStatus(SUCCESS), - contractCall( - ORDINARY_CALLS_CONTRACT, - "burnTokenCall", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(TYPE_OF_TOKEN))), - BigInteger.ONE, - new long[0]) - .via(secondBurnTxn) - .payingWith(ACCOUNT_NAME) - .alsoSigningWithFullPrefix(MULTI_KEY) - .hasKnownStatus(SUCCESS)))) - .then( - childRecordsCheck( - firstBurnTxn, - SUCCESS, - recordWith() - .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_BURN) - .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), - childRecordsCheck( - secondBurnTxn, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_BURN) - .withStatus(SUCCESS) - .withTotalSupply(99))) - .newTotalSupply(99)), - getTokenInfo(TYPE_OF_TOKEN).hasTotalSupply(amount), - getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TYPE_OF_TOKEN, amount)); - } - - @NotNull - private String getNestedContractAddress(String contract, HapiSpec spec) { - return AssociatePrecompileSuite.getNestedContractAddress(contract, spec); - } - @Override protected Logger getResultsLogger() { return log; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractKeysHTSV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractKeysHTSV1SecurityModelSuite.java new file mode 100644 index 000000000000..6fd9677bcd1c --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractKeysHTSV1SecurityModelSuite.java @@ -0,0 +1,322 @@ +/* + * Copyright (C) 2021-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.google.protobuf.ByteString.copyFromUtf8; +import static com.hedera.services.bdd.spec.HapiPropertySource.asDotDelimitedLongArray; +import static com.hedera.services.bdd.spec.HapiPropertySource.asToken; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.keys.KeyShape.DELEGATE_CONTRACT; +import static com.hedera.services.bdd.spec.keys.KeyShape.SIMPLE; +import static com.hedera.services.bdd.spec.keys.KeyShape.sigs; +import static com.hedera.services.bdd.spec.keys.SigControl.ON; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overridingTwo; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.getNestedContractAddress; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; + +import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.keys.KeyShape; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.AccountID; +import com.hederahashgraph.api.proto.java.ResponseCodeEnum; +import com.hederahashgraph.api.proto.java.TokenID; +import com.hederahashgraph.api.proto.java.TokenType; +import java.math.BigInteger; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class ContractKeysHTSV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(ContractKeysHTSV1SecurityModelSuite.class); + + private static final long GAS_TO_OFFER = 1_500_000L; + + private static final String TOKEN_TREASURY = "treasury"; + + private static final String ACCOUNT = "sender"; + private static final String RECEIVER = "receiver"; + + private static final KeyShape CONTRACT_KEY_SHAPE = KeyShape.threshOf(1, SIMPLE, KeyShape.CONTRACT); + + private static final String DELEGATE_KEY = "Delegate Contract Key"; + private static final String CONTRACT_KEY = "Contract Key"; + private static final String MULTI_KEY = "Multi Key"; + private static final String SUPPLY_KEY = "Supply Key"; + + private static final String ORDINARY_CALLS_CONTRACT = "HTSCalls"; + private static final String OUTER_CONTRACT = "DelegateContract"; + private static final String NESTED_CONTRACT = "ServiceContract"; + private static final String FIRST_STRING_FOR_MINT = "First!"; + private static final String ACCOUNT_NAME = "anybody"; + private static final String TYPE_OF_TOKEN = "fungibleToken"; + + public static void main(String... args) { + new ContractKeysHTSV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + public List getSpecsInSuite() { + return List.of( + delegateCallForTransferWithContractKey(), + transferWithKeyAsPartOf2OfXThreshold(), + burnTokenWithFullPrefixAndPartialPrefixKeys()); + } + + private HapiSpec transferWithKeyAsPartOf2OfXThreshold() { + final AtomicReference accountID = new AtomicReference<>(); + final AtomicReference vanillaTokenTokenID = new AtomicReference<>(); + final AtomicReference receiverID = new AtomicReference<>(); + final var delegateContractKeyShape = KeyShape.threshOf(2, SIMPLE, SIMPLE, DELEGATE_CONTRACT, KeyShape.CONTRACT); + + return propertyPreservingHapiSpec("transferWithKeyAsPartOf2OfXThreshold") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenMint", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(SUPPLY_KEY), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(VANILLA_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .supplyKey(SUPPLY_KEY) + .treasury(TOKEN_TREASURY) + .initialSupply(0) + .exposingCreatedIdTo(id -> vanillaTokenTokenID.set(asToken(id))), + mintToken(VANILLA_TOKEN, List.of(copyFromUtf8(FIRST_STRING_FOR_MINT))), + cryptoCreate(ACCOUNT).exposingCreatedIdTo(accountID::set), + cryptoCreate(RECEIVER).exposingCreatedIdTo(receiverID::set), + uploadInitCode(OUTER_CONTRACT, NESTED_CONTRACT), + contractCreate(NESTED_CONTRACT), + tokenAssociate(NESTED_CONTRACT, VANILLA_TOKEN), + tokenAssociate(ACCOUNT, VANILLA_TOKEN), + tokenAssociate(RECEIVER, VANILLA_TOKEN), + cryptoTransfer(movingUnique(VANILLA_TOKEN, 1L).between(TOKEN_TREASURY, ACCOUNT)) + .payingWith(GENESIS)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + OUTER_CONTRACT, asHeadlongAddress(getNestedContractAddress(NESTED_CONTRACT, spec))), + tokenAssociate(OUTER_CONTRACT, VANILLA_TOKEN), + newKeyNamed(DELEGATE_KEY) + .shape(delegateContractKeyShape.signedWith( + sigs(ON, ON, OUTER_CONTRACT, NESTED_CONTRACT))), + cryptoUpdate(ACCOUNT).key(DELEGATE_KEY), + contractCall( + OUTER_CONTRACT, + "transferDelegateCall", + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(receiverID.get())), + 1L) + .payingWith(GENESIS) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("delegateTransferCallWithDelegateContractKeyTxn") + .gas(GAS_TO_OFFER)))) + .then( + childRecordsCheck( + "delegateTransferCallWithDelegateContractKeyTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)))), + getAccountBalance(ACCOUNT).hasTokenBalance(VANILLA_TOKEN, 0), + getAccountBalance(RECEIVER).hasTokenBalance(VANILLA_TOKEN, 1)); + } + + private HapiSpec delegateCallForTransferWithContractKey() { + final AtomicReference accountID = new AtomicReference<>(); + final AtomicReference vanillaTokenTokenID = new AtomicReference<>(); + final AtomicReference receiverID = new AtomicReference<>(); + + return propertyPreservingHapiSpec("delegateCallForTransferWithContractKey") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenMint", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(SUPPLY_KEY), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(VANILLA_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .supplyKey(SUPPLY_KEY) + .treasury(TOKEN_TREASURY) + .initialSupply(0) + .exposingCreatedIdTo(id -> vanillaTokenTokenID.set(asToken(id))), + mintToken(VANILLA_TOKEN, List.of(copyFromUtf8(FIRST_STRING_FOR_MINT))), + cryptoCreate(ACCOUNT).exposingCreatedIdTo(accountID::set), + cryptoCreate(RECEIVER).exposingCreatedIdTo(receiverID::set), + uploadInitCode(OUTER_CONTRACT, NESTED_CONTRACT), + contractCreate(NESTED_CONTRACT), + tokenAssociate(NESTED_CONTRACT, VANILLA_TOKEN), + tokenAssociate(ACCOUNT, VANILLA_TOKEN), + tokenAssociate(RECEIVER, VANILLA_TOKEN), + cryptoTransfer(movingUnique(VANILLA_TOKEN, 1L).between(TOKEN_TREASURY, ACCOUNT)) + .payingWith(GENESIS)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + OUTER_CONTRACT, asHeadlongAddress(getNestedContractAddress(NESTED_CONTRACT, spec))), + tokenAssociate(OUTER_CONTRACT, VANILLA_TOKEN), + newKeyNamed(CONTRACT_KEY).shape(CONTRACT_KEY_SHAPE.signedWith(sigs(ON, OUTER_CONTRACT))), + cryptoUpdate(ACCOUNT).key(CONTRACT_KEY), + contractCall( + OUTER_CONTRACT, + "transferDelegateCall", + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(receiverID.get())), + 1L) + .payingWith(GENESIS) + .via("delegateTransferCallWithContractKeyTxn") + .hasKnownStatus(ResponseCodeEnum.CONTRACT_REVERT_EXECUTED) + .gas(GAS_TO_OFFER)))) + .then( + childRecordsCheck( + "delegateTransferCallWithContractKeyTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), + getAccountBalance(ACCOUNT).hasTokenBalance(VANILLA_TOKEN, 1), + getAccountBalance(RECEIVER).hasTokenBalance(VANILLA_TOKEN, 0)); + } + + private HapiSpec burnTokenWithFullPrefixAndPartialPrefixKeys() { + final var firstBurnTxn = "firstBurnTxn"; + final var secondBurnTxn = "secondBurnTxn"; + final var amount = 99L; + final AtomicLong fungibleNum = new AtomicLong(); + + return propertyPreservingHapiSpec("burnTokenWithFullPrefixAndPartialPrefixKeys") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenBurn,TokenCreate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(ACCOUNT_NAME).balance(10 * ONE_HUNDRED_HBARS), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(TYPE_OF_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(100) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .exposingCreatedIdTo(idLit -> fungibleNum.set(asDotDelimitedLongArray(idLit)[2])), + uploadInitCode(ORDINARY_CALLS_CONTRACT), + contractCreate(ORDINARY_CALLS_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + ORDINARY_CALLS_CONTRACT, + "burnTokenCall", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(TYPE_OF_TOKEN))), + BigInteger.ONE, + new long[0]) + .via(firstBurnTxn) + .payingWith(ACCOUNT_NAME) + .signedBy(MULTI_KEY) + .signedBy(ACCOUNT_NAME) + .hasKnownStatus(SUCCESS), + contractCall( + ORDINARY_CALLS_CONTRACT, + "burnTokenCall", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(TYPE_OF_TOKEN))), + BigInteger.ONE, + new long[0]) + .via(secondBurnTxn) + .payingWith(ACCOUNT_NAME) + .alsoSigningWithFullPrefix(MULTI_KEY) + .hasKnownStatus(SUCCESS)))) + .then( + childRecordsCheck( + firstBurnTxn, + SUCCESS, + recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_BURN) + .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), + childRecordsCheck( + secondBurnTxn, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_BURN) + .withStatus(SUCCESS) + .withTotalSupply(99))) + .newTotalSupply(99)), + getTokenInfo(TYPE_OF_TOKEN).hasTotalSupply(amount), + getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TYPE_OF_TOKEN, amount)); + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractKeysStillWorkAsExpectedSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractKeysStillWorkAsExpectedSuite.java index 705605ef8467..65d9fc996091 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractKeysStillWorkAsExpectedSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractKeysStillWorkAsExpectedSuite.java @@ -60,6 +60,7 @@ import static com.hedera.services.bdd.spec.utilops.UtilVerbs.streamMustInclude; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.hapi.ContractCallSuite.PAY_RECEIVABLE_CONTRACT; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.MULTI_KEY; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE; @@ -87,7 +88,6 @@ public class ContractKeysStillWorkAsExpectedSuite extends HapiSuite { private static final Logger log = LogManager.getLogger(ContractKeysStillWorkAsExpectedSuite.class); private static final String EVM_ALIAS_ENABLED_PROP = "cryptoCreateWithAliasAndEvmAddress.enabled"; - public static final String CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS = "contracts.maxNumWithHapiSigsAccess"; public static void main(String... args) { new ContractKeysStillWorkAsExpectedSuite().runSuiteSync(); diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractMintHTSSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractMintHTSSuite.java index 8d0b97f3b02d..a6f18c266662 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractMintHTSSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractMintHTSSuite.java @@ -16,19 +16,13 @@ package com.hedera.services.bdd.suites.contract.precompile; -import static com.hedera.services.bdd.spec.HapiPropertySource.asToken; import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; -import static com.hedera.services.bdd.spec.assertions.AssertUtils.inOrder; import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; -import static com.hedera.services.bdd.spec.assertions.ContractLogAsserts.logWith; -import static com.hedera.services.bdd.spec.assertions.SomeFungibleTransfers.changingFungibleBalances; import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; import static com.hedera.services.bdd.spec.keys.KeyShape.DELEGATE_CONTRACT; import static com.hedera.services.bdd.spec.keys.KeyShape.sigs; import static com.hedera.services.bdd.spec.keys.SigControl.ON; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenNftInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; @@ -42,49 +36,31 @@ import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.assertionsHold; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.Utils.asAddress; -import static com.hedera.services.bdd.suites.contract.Utils.parsedToByteString; +import static com.hedera.services.bdd.suites.contract.Utils.assertTxnRecordHasNoTraceabilityEnrichedContractFnResult; +import static com.hedera.services.bdd.suites.contract.Utils.expectedPrecompileGasFor; +import static com.hedera.services.bdd.suites.contract.Utils.getNestedContractAddress; import static com.hedera.services.bdd.suites.utils.contracts.FunctionParameters.functionParameters; import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; -import static com.hederahashgraph.api.proto.java.HederaFunctionality.ContractCall; import static com.hederahashgraph.api.proto.java.HederaFunctionality.TokenMint; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.*; -import static com.hederahashgraph.api.proto.java.SubType.DEFAULT; -import static com.hederahashgraph.api.proto.java.SubType.TOKEN_FUNGIBLE_COMMON; import static com.hederahashgraph.api.proto.java.SubType.TOKEN_NON_FUNGIBLE_UNIQUE; -import static org.junit.jupiter.api.Assertions.assertEquals; -import com.google.protobuf.ByteString; -import com.hedera.node.app.hapi.fees.pricing.AssetsLoader; import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; import com.hedera.services.bdd.spec.HapiSpec; import com.hedera.services.bdd.spec.assertions.NonFungibleTransfers; import com.hedera.services.bdd.spec.keys.KeyShape; import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; -import com.hedera.services.bdd.spec.utilops.CustomSpecAssert; import com.hedera.services.bdd.suites.HapiSuite; import com.hedera.services.bdd.suites.utils.contracts.FunctionParameters; -import com.hederahashgraph.api.proto.java.HederaFunctionality; -import com.hederahashgraph.api.proto.java.SubType; -import com.hederahashgraph.api.proto.java.TokenID; import com.hederahashgraph.api.proto.java.TokenSupplyType; import com.hederahashgraph.api.proto.java.TokenType; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.math.BigDecimal; -import java.math.BigInteger; -import java.util.Arrays; import java.util.List; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.jetbrains.annotations.NotNull; public class ContractMintHTSSuite extends HapiSuite { @@ -96,23 +72,17 @@ public class ContractMintHTSSuite extends HapiSuite { private static final KeyShape DELEGATE_CONTRACT_KEY_SHAPE = KeyShape.threshOf(1, KeyShape.SIMPLE, DELEGATE_CONTRACT); private static final String DELEGATE_KEY = "DelegateKey"; - private static final String CONTRACT_KEY = "ContractKey"; private static final String MULTI_KEY = "purpose"; public static final String MINT_CONTRACT = "MintContract"; public static final String MINT_NFT_CONTRACT = "MintNFTContract"; private static final String NESTED_MINT_CONTRACT = "NestedMint"; - private static final String HELLO_WORLD_MINT = "HelloWorldMint"; private static final String ACCOUNT = "anybody"; private static final String DELEGATE_CONTRACT_KEY_NAME = "contractKey"; private static final String FUNGIBLE_TOKEN = "fungibleToken"; - private static final String FIRST_MINT_TXN = "firstMintTxn"; - private static final String SECOND_MINT_TXN = "secondMintTxn"; private static final String NON_FUNGIBLE_TOKEN = "nonFungibleToken"; private static final String TEST_METADATA_1 = "Test metadata 1"; - private static final String TEST_METADATA_2 = "Test metadata 2"; private static final String RECIPIENT = "recipient"; - private static final String MINT_FUNGIBLE_TOKEN = "mintFungibleToken"; public static final String MINT_FUNGIBLE_TOKEN_WITH_EVENT = "mintFungibleTokenWithEvent"; public static void main(final String... args) { @@ -130,276 +100,11 @@ public List getSpecsInSuite() { } List negativeSpecs() { - return List.of( - rollbackOnFailedMintAfterFungibleTransfer(), - rollbackOnFailedAssociateAfterNonFungibleMint(), - gasCostNotMetSetsInsufficientGasStatusInChildRecord()); + return List.of(rollbackOnFailedMintAfterFungibleTransfer()); } List positiveSpecs() { - return List.of( - helloWorldFungibleMint(), - helloWorldNftMint(), - happyPathFungibleTokenMint(), - happyPathNonFungibleTokenMint(), - transferNftAfterNestedMint(), - happyPathZeroUnitFungibleTokenMint()); - } - - private HapiSpec happyPathZeroUnitFungibleTokenMint() { - final var amount = 0L; - final var gasUsed = 14085L; - final AtomicReference fungible = new AtomicReference<>(); - - return defaultHapiSpec("happyPathZeroUnitFungibleTokenMint") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).payingWith(GENESIS), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(FUNGIBLE_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(0) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .exposingCreatedIdTo(idLit -> fungible.set(asToken(idLit))), - uploadInitCode(MINT_CONTRACT), - sourcing(() -> contractCreate( - MINT_CONTRACT, HapiParserUtil.asHeadlongAddress(asAddress(fungible.get()))))) - .when( - contractCall(MINT_CONTRACT, MINT_FUNGIBLE_TOKEN_WITH_EVENT, BigInteger.valueOf(amount)) - .via(FIRST_MINT_TXN) - .gas(GAS_TO_OFFER) - .payingWith(ACCOUNT) - .alsoSigningWithFullPrefix(MULTI_KEY), - getTxnRecord(FIRST_MINT_TXN).andAllChildRecords().logged()) - .then(childRecordsCheck( - FIRST_MINT_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_MINT) - .withStatus(SUCCESS) - .withTotalSupply(0) - .withSerialNumbers()) - .gasUsed(gasUsed)) - .newTotalSupply(0))); - } - - private HapiSpec helloWorldFungibleMint() { - final var amount = 1_234_567L; - final AtomicReference fungible = new AtomicReference<>(); - - return defaultHapiSpec("HelloWorldFungibleMint") - .given( - newKeyNamed(MULTI_KEY), - tokenCreate(FUNGIBLE_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(0) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .exposingCreatedIdTo(idLit -> fungible.set(asToken(idLit))), - uploadInitCode(HELLO_WORLD_MINT)) - .when( - sourcing(() -> contractCreate( - HELLO_WORLD_MINT, HapiParserUtil.asHeadlongAddress(asAddress(fungible.get())))), - contractCall(HELLO_WORLD_MINT, "brrr", BigInteger.valueOf(amount)) - .via(FIRST_MINT_TXN) - .alsoSigningWithFullPrefix(MULTI_KEY), - getTxnRecord(FIRST_MINT_TXN).andAllChildRecords().logged(), - getTokenInfo(FUNGIBLE_TOKEN).hasTotalSupply(amount), - /* And now make the token contract-controlled so no explicit supply sig is required */ - newKeyNamed(CONTRACT_KEY).shape(DELEGATE_CONTRACT.signedWith(HELLO_WORLD_MINT)), - tokenUpdate(FUNGIBLE_TOKEN).supplyKey(CONTRACT_KEY), - getTokenInfo(FUNGIBLE_TOKEN).logged(), - contractCall(HELLO_WORLD_MINT, "brrr", BigInteger.valueOf(amount)) - .via(SECOND_MINT_TXN), - getTxnRecord(SECOND_MINT_TXN).andAllChildRecords().logged(), - getTokenInfo(FUNGIBLE_TOKEN).hasTotalSupply(2 * amount)) - .then(childRecordsCheck( - SECOND_MINT_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_MINT) - .withStatus(SUCCESS) - .withTotalSupply(2469134L) - .withSerialNumbers())) - .newTotalSupply(2469134L) - .tokenTransfers( - changingFungibleBalances().including(FUNGIBLE_TOKEN, DEFAULT_PAYER, amount)))); - } - - private HapiSpec helloWorldNftMint() { - final AtomicReference nonFungible = new AtomicReference<>(); - - return defaultHapiSpec("HelloWorldNftMint") - .given( - newKeyNamed(MULTI_KEY), - tokenCreate(NON_FUNGIBLE_TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .initialSupply(0) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .exposingCreatedIdTo(idLit -> nonFungible.set(asToken(idLit))), - uploadInitCode(HELLO_WORLD_MINT), - sourcing(() -> contractCreate( - HELLO_WORLD_MINT, HapiParserUtil.asHeadlongAddress(asAddress(nonFungible.get()))))) - .when( - contractCall(HELLO_WORLD_MINT, "mint") - .via(FIRST_MINT_TXN) - .gas(GAS_TO_OFFER) - .alsoSigningWithFullPrefix(MULTI_KEY), - getTxnRecord(FIRST_MINT_TXN).andAllChildRecords().logged(), - getTokenInfo(NON_FUNGIBLE_TOKEN).hasTotalSupply(1), - /* And now make the token contract-controlled so no explicit supply sig is required */ - newKeyNamed(CONTRACT_KEY).shape(DELEGATE_CONTRACT.signedWith(HELLO_WORLD_MINT)), - tokenUpdate(NON_FUNGIBLE_TOKEN).supplyKey(CONTRACT_KEY), - getTokenInfo(NON_FUNGIBLE_TOKEN).logged(), - contractCall(HELLO_WORLD_MINT, "mint") - .via(SECOND_MINT_TXN) - .gas(GAS_TO_OFFER), - getTxnRecord(SECOND_MINT_TXN).andAllChildRecords().logged()) - .then( - getTokenInfo(NON_FUNGIBLE_TOKEN).hasTotalSupply(2), - getTokenNftInfo(NON_FUNGIBLE_TOKEN, 2L).logged(), - childRecordsCheck( - FIRST_MINT_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_MINT) - .withStatus(SUCCESS) - .withTotalSupply(1) - .withSerialNumbers(1))) - .newTotalSupply(1) - .serialNos(List.of(1L))), - childRecordsCheck( - SECOND_MINT_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_MINT) - .withStatus(SUCCESS) - .withTotalSupply(2) - .withSerialNumbers(2))) - .newTotalSupply(2) - .serialNos(List.of(2L)))); - } - - private HapiSpec happyPathFungibleTokenMint() { - final var amount = 10L; - final var gasUsed = 14085L; - final AtomicReference fungible = new AtomicReference<>(); - - return defaultHapiSpec("happyPathFungibleTokenMint") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).payingWith(GENESIS), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(FUNGIBLE_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(0) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .exposingCreatedIdTo(idLit -> fungible.set(asToken(idLit))), - uploadInitCode(MINT_CONTRACT), - sourcing(() -> contractCreate( - MINT_CONTRACT, HapiParserUtil.asHeadlongAddress(asAddress(fungible.get()))))) - .when( - contractCall(MINT_CONTRACT, MINT_FUNGIBLE_TOKEN_WITH_EVENT, BigInteger.valueOf(10)) - .via(FIRST_MINT_TXN) - .gas(GAS_TO_OFFER) - .payingWith(ACCOUNT) - .alsoSigningWithFullPrefix(MULTI_KEY), - getTxnRecord(FIRST_MINT_TXN).andAllChildRecords().logged(), - getTxnRecord(FIRST_MINT_TXN) - .hasPriority(recordWith() - .contractCallResult(resultWith() - .logs(inOrder(logWith() - .noData() - .withTopicsInOrder(List.of( - parsedToByteString(amount), parsedToByteString(0)))))))) - .then( - getTokenInfo(FUNGIBLE_TOKEN).hasTotalSupply(amount), - getAccountBalance(TOKEN_TREASURY).hasTokenBalance(FUNGIBLE_TOKEN, amount), - childRecordsCheck( - FIRST_MINT_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_MINT) - .withStatus(SUCCESS) - .withTotalSupply(10) - .withSerialNumbers()) - .gasUsed(gasUsed)) - .newTotalSupply(10))); - } - - private HapiSpec happyPathNonFungibleTokenMint() { - final var totalSupply = 2; - final AtomicReference nonFungible = new AtomicReference<>(); - - return defaultHapiSpec("happyPathNonFungibleTokenMint") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(NON_FUNGIBLE_TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .supplyType(TokenSupplyType.INFINITE) - .initialSupply(0) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .exposingCreatedIdTo(idLit -> nonFungible.set(asToken(idLit))), - uploadInitCode(MINT_CONTRACT), - sourcing(() -> contractCreate( - MINT_CONTRACT, HapiParserUtil.asHeadlongAddress(asAddress(nonFungible.get()))))) - .when( - contractCall(MINT_CONTRACT, "mintNonFungibleTokenWithEvent", (Object) - new byte[][] {TEST_METADATA_1.getBytes(), TEST_METADATA_2.getBytes()}) - .via(FIRST_MINT_TXN) - .payingWith(ACCOUNT) - .gas(GAS_TO_OFFER) - .alsoSigningWithFullPrefix(MULTI_KEY), - getTxnRecord(FIRST_MINT_TXN).andAllChildRecords().logged(), - getTxnRecord(FIRST_MINT_TXN) - .hasPriority(recordWith() - .contractCallResult(resultWith() - .logs(inOrder(logWith() - .noData() - .withTopicsInOrder(List.of( - parsedToByteString(totalSupply), - parsedToByteString(1)))))))) - .then( - getTokenInfo(NON_FUNGIBLE_TOKEN).hasTotalSupply(totalSupply), - getAccountBalance(TOKEN_TREASURY).hasTokenBalance(NON_FUNGIBLE_TOKEN, totalSupply), - childRecordsCheck( - FIRST_MINT_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_MINT) - .withStatus(SUCCESS) - .withTotalSupply(2L) - .withSerialNumbers(1L, 2L)) - .gasUsed(704226L)) - .newTotalSupply(2) - .serialNos(Arrays.asList(1L, 2L)))); + return List.of(transferNftAfterNestedMint()); } private HapiSpec transferNftAfterNestedMint() { @@ -557,170 +262,8 @@ private HapiSpec rollbackOnFailedMintAfterFungibleTransfer() { .withSerialNumbers())))); } - private HapiSpec rollbackOnFailedAssociateAfterNonFungibleMint() { - final var nestedMintTxn = "nestedMintTxn"; - - return defaultHapiSpec("RollbackOnFailedAssociateAfterNonFungibleMint") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(ACCOUNT).balance(ONE_HUNDRED_HBARS), - cryptoCreate(RECIPIENT), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(NON_FUNGIBLE_TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .supplyType(TokenSupplyType.INFINITE) - .initialSupply(0) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY), - uploadInitCode(MINT_NFT_CONTRACT, NESTED_MINT_CONTRACT), - contractCreate(MINT_NFT_CONTRACT)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - NESTED_MINT_CONTRACT, - asHeadlongAddress(getNestedContractAddress(MINT_NFT_CONTRACT, spec)), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(NON_FUNGIBLE_TOKEN)))) - .gas(GAS_TO_OFFER), - newKeyNamed(DELEGATE_KEY) - .shape(DELEGATE_CONTRACT_KEY_SHAPE.signedWith(sigs(ON, NESTED_MINT_CONTRACT))), - cryptoUpdate(ACCOUNT).key(DELEGATE_KEY), - contractCall( - NESTED_MINT_CONTRACT, - "revertMintAfterFailedAssociate", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - new byte[][] {TEST_METADATA_1.getBytes()}) - .payingWith(GENESIS) - .alsoSigningWithFullPrefix(MULTI_KEY) - .via(nestedMintTxn) - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - getTxnRecord(nestedMintTxn).andAllChildRecords().logged()))) - .then( - getAccountBalance(TOKEN_TREASURY).hasTokenBalance(NON_FUNGIBLE_TOKEN, 0), - childRecordsCheck( - nestedMintTxn, - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(REVERTED_SUCCESS) - .newTotalSupply(0) - .serialNos(List.of()), - recordWith() - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_TOKEN_ID))))); - } - - private HapiSpec gasCostNotMetSetsInsufficientGasStatusInChildRecord() { - final var amount = 10L; - final var baselineMintWithEnoughGas = "baselineMintWithEnoughGas"; - - final AtomicLong expectedInsufficientGas = new AtomicLong(); - final AtomicReference fungible = new AtomicReference<>(); - - return defaultHapiSpec("gasCostNotMetSetsInsufficientGasStatusInChildRecord") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(ACCOUNT).balance(5 * ONE_HUNDRED_HBARS), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(FUNGIBLE_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(0) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .exposingCreatedIdTo(idLit -> fungible.set(asToken(idLit)))) - .when(uploadInitCode(MINT_CONTRACT), sourcing(() -> contractCreate( - MINT_CONTRACT, HapiParserUtil.asHeadlongAddress(asAddress(fungible.get()))) - .payingWith(ACCOUNT) - .gas(GAS_TO_OFFER))) - .then( - contractCall(MINT_CONTRACT, MINT_FUNGIBLE_TOKEN, BigInteger.valueOf(amount)) - .via(baselineMintWithEnoughGas) - .payingWith(ACCOUNT) - .alsoSigningWithFullPrefix(MULTI_KEY) - .gas(64_000L), - withOpContext((spec, opLog) -> { - final var expectedPrecompileGas = - expectedPrecompileGasFor(spec, TokenMint, TOKEN_FUNGIBLE_COMMON); - final var baselineCostLookup = getTxnRecord(baselineMintWithEnoughGas) - .andAllChildRecords() - .logged() - .assertingNothing(); - allRunFor(spec, baselineCostLookup); - final var baselineGas = baselineCostLookup - .getResponseRecord() - .getContractCallResult() - .getGasUsed(); - expectedInsufficientGas.set(baselineGas - expectedPrecompileGas); - }), - sourcing(() -> contractCall(MINT_CONTRACT, MINT_FUNGIBLE_TOKEN, BigInteger.valueOf(amount)) - .via(FIRST_MINT_TXN) - .payingWith(ACCOUNT) - .alsoSigningWithFullPrefix(MULTI_KEY) - .gas(expectedInsufficientGas.get()) - .hasKnownStatus(INSUFFICIENT_GAS)), - getTxnRecord(FIRST_MINT_TXN).andAllChildRecords().logged(), - getTokenInfo(FUNGIBLE_TOKEN).hasTotalSupply(amount), - getAccountBalance(TOKEN_TREASURY).hasTokenBalance(FUNGIBLE_TOKEN, amount), - childRecordsCheck( - FIRST_MINT_TXN, - INSUFFICIENT_GAS, - recordWith() - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_MINT) - .withStatus(INSUFFICIENT_GAS) - .withTotalSupply(0L) - .withSerialNumbers())))); - } - - private long expectedPrecompileGasFor(final HapiSpec spec, final HederaFunctionality function, final SubType type) { - final var gasThousandthsOfTinycentPrice = spec.fees() - .getCurrentOpFeeData() - .get(ContractCall) - .get(DEFAULT) - .getServicedata() - .getGas(); - final var assetsLoader = new AssetsLoader(); - final BigDecimal hapiUsdPrice; - try { - hapiUsdPrice = assetsLoader.loadCanonicalPrices().get(function).get(type); - } catch (final IOException e) { - throw new UncheckedIOException(e); - } - final var precompileTinycentPrice = hapiUsdPrice - .multiply(BigDecimal.valueOf(1.2)) - .multiply(BigDecimal.valueOf(100 * 100_000_000L)) - .longValueExact(); - return (precompileTinycentPrice * 1000 / gasThousandthsOfTinycentPrice); - } - - @NotNull - private String getNestedContractAddress(final String contract, final HapiSpec spec) { - return AssociatePrecompileSuite.getNestedContractAddress(contract, spec); - } - @Override protected Logger getResultsLogger() { return LOG; } - - @NotNull - @SuppressWarnings("java:S5960") - private CustomSpecAssert assertTxnRecordHasNoTraceabilityEnrichedContractFnResult(final String nestedTransferTxn) { - return assertionsHold((spec, log) -> { - final var subOp = getTxnRecord(nestedTransferTxn); - allRunFor(spec, subOp); - - final var rcd = subOp.getResponseRecord(); - - final var contractCallResult = rcd.getContractCallResult(); - assertEquals(0L, contractCallResult.getGas(), "Result not expected to externalize gas"); - assertEquals(0L, contractCallResult.getAmount(), "Result not expected to externalize amount"); - assertEquals(ByteString.EMPTY, contractCallResult.getFunctionParameters()); - }); - } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractMintHTSV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractMintHTSV1SecurityModelSuite.java new file mode 100644 index 000000000000..2b90c1b6afa2 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ContractMintHTSV1SecurityModelSuite.java @@ -0,0 +1,518 @@ +/* + * Copyright (C) 2021-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.spec.HapiPropertySource.asToken; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.AssertUtils.inOrder; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.ContractLogAsserts.logWith; +import static com.hedera.services.bdd.spec.assertions.SomeFungibleTransfers.changingFungibleBalances; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.keys.KeyShape.DELEGATE_CONTRACT; +import static com.hedera.services.bdd.spec.keys.KeyShape.sigs; +import static com.hedera.services.bdd.spec.keys.SigControl.ON; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenNftInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenUpdate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overriding; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.expectedPrecompileGasFor; +import static com.hedera.services.bdd.suites.contract.Utils.getNestedContractAddress; +import static com.hedera.services.bdd.suites.contract.Utils.parsedToByteString; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.HederaFunctionality.TokenMint; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.*; +import static com.hederahashgraph.api.proto.java.SubType.TOKEN_FUNGIBLE_COMMON; + +import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.keys.KeyShape; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.TokenID; +import com.hederahashgraph.api.proto.java.TokenSupplyType; +import com.hederahashgraph.api.proto.java.TokenType; +import java.math.BigInteger; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse +public class ContractMintHTSV1SecurityModelSuite extends HapiSuite { + + private static final Logger LOG = LogManager.getLogger(ContractMintHTSV1SecurityModelSuite.class); + + private static final long GAS_TO_OFFER = 4_000_000L; + private static final String TOKEN_TREASURY = "treasury"; + private static final KeyShape DELEGATE_CONTRACT_KEY_SHAPE = + KeyShape.threshOf(1, KeyShape.SIMPLE, DELEGATE_CONTRACT); + private static final String DELEGATE_KEY = "DelegateKey"; + private static final String CONTRACT_KEY = "ContractKey"; + private static final String MULTI_KEY = "purpose"; + public static final String MINT_CONTRACT = "MintContract"; + public static final String MINT_NFT_CONTRACT = "MintNFTContract"; + private static final String NESTED_MINT_CONTRACT = "NestedMint"; + private static final String HELLO_WORLD_MINT = "HelloWorldMint"; + private static final String ACCOUNT = "anybody"; + private static final String FUNGIBLE_TOKEN = "fungibleToken"; + private static final String FIRST_MINT_TXN = "firstMintTxn"; + private static final String SECOND_MINT_TXN = "secondMintTxn"; + private static final String NON_FUNGIBLE_TOKEN = "nonFungibleToken"; + private static final String TEST_METADATA_1 = "Test metadata 1"; + private static final String TEST_METADATA_2 = "Test metadata 2"; + private static final String RECIPIENT = "recipient"; + private static final String MINT_FUNGIBLE_TOKEN = "mintFungibleToken"; + public static final String MINT_FUNGIBLE_TOKEN_WITH_EVENT = "mintFungibleTokenWithEvent"; + + public static void main(final String... args) { + new ContractMintHTSV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public List getSpecsInSuite() { + return allOf(positiveSpecs(), negativeSpecs()); + } + + List negativeSpecs() { + return List.of( + rollbackOnFailedAssociateAfterNonFungibleMint(), gasCostNotMetSetsInsufficientGasStatusInChildRecord()); + } + + List positiveSpecs() { + return List.of( + helloWorldFungibleMint(), + helloWorldNftMint(), + happyPathFungibleTokenMint(), + happyPathNonFungibleTokenMint(), + happyPathZeroUnitFungibleTokenMint()); + } + + private HapiSpec happyPathZeroUnitFungibleTokenMint() { + final var amount = 0L; + final var gasUsed = 14085L; + final AtomicReference fungible = new AtomicReference<>(); + + return propertyPreservingHapiSpec("happyPathZeroUnitFungibleTokenMint") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).payingWith(GENESIS), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(0) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .exposingCreatedIdTo(idLit -> fungible.set(asToken(idLit))), + uploadInitCode(MINT_CONTRACT), + sourcing(() -> contractCreate( + MINT_CONTRACT, HapiParserUtil.asHeadlongAddress(asAddress(fungible.get()))))) + .when( + contractCall(MINT_CONTRACT, MINT_FUNGIBLE_TOKEN_WITH_EVENT, BigInteger.valueOf(amount)) + .via(FIRST_MINT_TXN) + .gas(GAS_TO_OFFER) + .payingWith(ACCOUNT) + .alsoSigningWithFullPrefix(MULTI_KEY), + getTxnRecord(FIRST_MINT_TXN).andAllChildRecords().logged()) + .then(childRecordsCheck( + FIRST_MINT_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_MINT) + .withStatus(SUCCESS) + .withTotalSupply(0) + .withSerialNumbers()) + .gasUsed(gasUsed)) + .newTotalSupply(0))); + } + + private HapiSpec helloWorldFungibleMint() { + final var amount = 1_234_567L; + final AtomicReference fungible = new AtomicReference<>(); + + return propertyPreservingHapiSpec("helloWorldFungibleMint") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(0) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .exposingCreatedIdTo(idLit -> fungible.set(asToken(idLit))), + uploadInitCode(HELLO_WORLD_MINT)) + .when( + sourcing(() -> contractCreate( + HELLO_WORLD_MINT, HapiParserUtil.asHeadlongAddress(asAddress(fungible.get())))), + contractCall(HELLO_WORLD_MINT, "brrr", BigInteger.valueOf(amount)) + .via(FIRST_MINT_TXN) + .alsoSigningWithFullPrefix(MULTI_KEY), + getTxnRecord(FIRST_MINT_TXN).andAllChildRecords().logged(), + getTokenInfo(FUNGIBLE_TOKEN).hasTotalSupply(amount), + /* And now make the token contract-controlled so no explicit supply sig is required */ + newKeyNamed(CONTRACT_KEY).shape(DELEGATE_CONTRACT.signedWith(HELLO_WORLD_MINT)), + tokenUpdate(FUNGIBLE_TOKEN).supplyKey(CONTRACT_KEY), + getTokenInfo(FUNGIBLE_TOKEN).logged(), + contractCall(HELLO_WORLD_MINT, "brrr", BigInteger.valueOf(amount)) + .via(SECOND_MINT_TXN), + getTxnRecord(SECOND_MINT_TXN).andAllChildRecords().logged(), + getTokenInfo(FUNGIBLE_TOKEN).hasTotalSupply(2 * amount)) + .then(childRecordsCheck( + SECOND_MINT_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_MINT) + .withStatus(SUCCESS) + .withTotalSupply(2469134L) + .withSerialNumbers())) + .newTotalSupply(2469134L) + .tokenTransfers( + changingFungibleBalances().including(FUNGIBLE_TOKEN, DEFAULT_PAYER, amount)))); + } + + private HapiSpec helloWorldNftMint() { + final AtomicReference nonFungible = new AtomicReference<>(); + + return propertyPreservingHapiSpec("helloWorldNftMint") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + tokenCreate(NON_FUNGIBLE_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .initialSupply(0) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .exposingCreatedIdTo(idLit -> nonFungible.set(asToken(idLit))), + uploadInitCode(HELLO_WORLD_MINT), + sourcing(() -> contractCreate( + HELLO_WORLD_MINT, HapiParserUtil.asHeadlongAddress(asAddress(nonFungible.get()))))) + .when( + contractCall(HELLO_WORLD_MINT, "mint") + .via(FIRST_MINT_TXN) + .gas(GAS_TO_OFFER) + .alsoSigningWithFullPrefix(MULTI_KEY), + getTxnRecord(FIRST_MINT_TXN).andAllChildRecords().logged(), + getTokenInfo(NON_FUNGIBLE_TOKEN).hasTotalSupply(1), + /* And now make the token contract-controlled so no explicit supply sig is required */ + newKeyNamed(CONTRACT_KEY).shape(DELEGATE_CONTRACT.signedWith(HELLO_WORLD_MINT)), + tokenUpdate(NON_FUNGIBLE_TOKEN).supplyKey(CONTRACT_KEY), + getTokenInfo(NON_FUNGIBLE_TOKEN).logged(), + contractCall(HELLO_WORLD_MINT, "mint") + .via(SECOND_MINT_TXN) + .gas(GAS_TO_OFFER), + getTxnRecord(SECOND_MINT_TXN).andAllChildRecords().logged()) + .then( + getTokenInfo(NON_FUNGIBLE_TOKEN).hasTotalSupply(2), + getTokenNftInfo(NON_FUNGIBLE_TOKEN, 2L).logged(), + childRecordsCheck( + FIRST_MINT_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_MINT) + .withStatus(SUCCESS) + .withTotalSupply(1) + .withSerialNumbers(1))) + .newTotalSupply(1) + .serialNos(List.of(1L))), + childRecordsCheck( + SECOND_MINT_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_MINT) + .withStatus(SUCCESS) + .withTotalSupply(2) + .withSerialNumbers(2))) + .newTotalSupply(2) + .serialNos(List.of(2L)))); + } + + private HapiSpec happyPathFungibleTokenMint() { + final var amount = 10L; + final var gasUsed = 14085L; + final AtomicReference fungible = new AtomicReference<>(); + + return propertyPreservingHapiSpec("happyPathFungibleTokenMint") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).payingWith(GENESIS), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(0) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .exposingCreatedIdTo(idLit -> fungible.set(asToken(idLit))), + uploadInitCode(MINT_CONTRACT), + sourcing(() -> contractCreate( + MINT_CONTRACT, HapiParserUtil.asHeadlongAddress(asAddress(fungible.get()))))) + .when( + contractCall(MINT_CONTRACT, MINT_FUNGIBLE_TOKEN_WITH_EVENT, BigInteger.valueOf(10)) + .via(FIRST_MINT_TXN) + .gas(GAS_TO_OFFER) + .payingWith(ACCOUNT) + .alsoSigningWithFullPrefix(MULTI_KEY), + getTxnRecord(FIRST_MINT_TXN).andAllChildRecords().logged(), + getTxnRecord(FIRST_MINT_TXN) + .hasPriority(recordWith() + .contractCallResult(resultWith() + .logs(inOrder(logWith() + .noData() + .withTopicsInOrder(List.of( + parsedToByteString(amount), parsedToByteString(0)))))))) + .then( + getTokenInfo(FUNGIBLE_TOKEN).hasTotalSupply(amount), + getAccountBalance(TOKEN_TREASURY).hasTokenBalance(FUNGIBLE_TOKEN, amount), + childRecordsCheck( + FIRST_MINT_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_MINT) + .withStatus(SUCCESS) + .withTotalSupply(10) + .withSerialNumbers()) + .gasUsed(gasUsed)) + .newTotalSupply(10))); + } + + private HapiSpec happyPathNonFungibleTokenMint() { + final var totalSupply = 2; + final AtomicReference nonFungible = new AtomicReference<>(); + + return propertyPreservingHapiSpec("happyPathNonFungibleTokenMint") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(NON_FUNGIBLE_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .supplyType(TokenSupplyType.INFINITE) + .initialSupply(0) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .exposingCreatedIdTo(idLit -> nonFungible.set(asToken(idLit))), + uploadInitCode(MINT_CONTRACT), + sourcing(() -> contractCreate( + MINT_CONTRACT, HapiParserUtil.asHeadlongAddress(asAddress(nonFungible.get()))))) + .when( + contractCall(MINT_CONTRACT, "mintNonFungibleTokenWithEvent", (Object) + new byte[][] {TEST_METADATA_1.getBytes(), TEST_METADATA_2.getBytes()}) + .via(FIRST_MINT_TXN) + .payingWith(ACCOUNT) + .gas(GAS_TO_OFFER) + .alsoSigningWithFullPrefix(MULTI_KEY), + getTxnRecord(FIRST_MINT_TXN).andAllChildRecords().logged(), + getTxnRecord(FIRST_MINT_TXN) + .hasPriority(recordWith() + .contractCallResult(resultWith() + .logs(inOrder(logWith() + .noData() + .withTopicsInOrder(List.of( + parsedToByteString(totalSupply), + parsedToByteString(1)))))))) + .then( + getTokenInfo(NON_FUNGIBLE_TOKEN).hasTotalSupply(totalSupply), + getAccountBalance(TOKEN_TREASURY).hasTokenBalance(NON_FUNGIBLE_TOKEN, totalSupply), + childRecordsCheck( + FIRST_MINT_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_MINT) + .withStatus(SUCCESS) + .withTotalSupply(2L) + .withSerialNumbers(1L, 2L)) + .gasUsed(704226L)) + .newTotalSupply(2) + .serialNos(Arrays.asList(1L, 2L)))); + } + + private HapiSpec rollbackOnFailedAssociateAfterNonFungibleMint() { + final var nestedMintTxn = "nestedMintTxn"; + + return propertyPreservingHapiSpec("rollbackOnFailedAssociateAfterNonFungibleMint") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(ACCOUNT).balance(ONE_HUNDRED_HBARS), + cryptoCreate(RECIPIENT), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(NON_FUNGIBLE_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .supplyType(TokenSupplyType.INFINITE) + .initialSupply(0) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY), + uploadInitCode(MINT_NFT_CONTRACT, NESTED_MINT_CONTRACT), + contractCreate(MINT_NFT_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + NESTED_MINT_CONTRACT, + asHeadlongAddress(getNestedContractAddress(MINT_NFT_CONTRACT, spec)), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(NON_FUNGIBLE_TOKEN)))) + .gas(GAS_TO_OFFER), + newKeyNamed(DELEGATE_KEY) + .shape(DELEGATE_CONTRACT_KEY_SHAPE.signedWith(sigs(ON, NESTED_MINT_CONTRACT))), + cryptoUpdate(ACCOUNT).key(DELEGATE_KEY), + contractCall( + NESTED_MINT_CONTRACT, + "revertMintAfterFailedAssociate", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + new byte[][] {TEST_METADATA_1.getBytes()}) + .payingWith(GENESIS) + .alsoSigningWithFullPrefix(MULTI_KEY) + .via(nestedMintTxn) + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + getTxnRecord(nestedMintTxn).andAllChildRecords().logged()))) + .then( + getAccountBalance(TOKEN_TREASURY).hasTokenBalance(NON_FUNGIBLE_TOKEN, 0), + childRecordsCheck( + nestedMintTxn, + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(REVERTED_SUCCESS) + .newTotalSupply(0) + .serialNos(List.of()), + recordWith() + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_TOKEN_ID))))); + } + + private HapiSpec gasCostNotMetSetsInsufficientGasStatusInChildRecord() { + final var amount = 10L; + final var baselineMintWithEnoughGas = "baselineMintWithEnoughGas"; + + final AtomicLong expectedInsufficientGas = new AtomicLong(); + final AtomicReference fungible = new AtomicReference<>(); + + return propertyPreservingHapiSpec("gasCostNotMetSetsInsufficientGasStatusInChildRecord") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(ACCOUNT).balance(5 * ONE_HUNDRED_HBARS), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(0) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .exposingCreatedIdTo(idLit -> fungible.set(asToken(idLit)))) + .when(uploadInitCode(MINT_CONTRACT), sourcing(() -> contractCreate( + MINT_CONTRACT, HapiParserUtil.asHeadlongAddress(asAddress(fungible.get()))) + .payingWith(ACCOUNT) + .gas(GAS_TO_OFFER))) + .then( + contractCall(MINT_CONTRACT, MINT_FUNGIBLE_TOKEN, BigInteger.valueOf(amount)) + .via(baselineMintWithEnoughGas) + .payingWith(ACCOUNT) + .alsoSigningWithFullPrefix(MULTI_KEY) + .gas(64_000L), + withOpContext((spec, opLog) -> { + final var expectedPrecompileGas = + expectedPrecompileGasFor(spec, TokenMint, TOKEN_FUNGIBLE_COMMON); + final var baselineCostLookup = getTxnRecord(baselineMintWithEnoughGas) + .andAllChildRecords() + .logged() + .assertingNothing(); + allRunFor(spec, baselineCostLookup); + final var baselineGas = baselineCostLookup + .getResponseRecord() + .getContractCallResult() + .getGasUsed(); + expectedInsufficientGas.set(baselineGas - expectedPrecompileGas); + }), + sourcing(() -> contractCall(MINT_CONTRACT, MINT_FUNGIBLE_TOKEN, BigInteger.valueOf(amount)) + .via(FIRST_MINT_TXN) + .payingWith(ACCOUNT) + .alsoSigningWithFullPrefix(MULTI_KEY) + .gas(expectedInsufficientGas.get()) + .hasKnownStatus(INSUFFICIENT_GAS)), + getTxnRecord(FIRST_MINT_TXN).andAllChildRecords().logged(), + getTokenInfo(FUNGIBLE_TOKEN).hasTotalSupply(amount), + getAccountBalance(TOKEN_TREASURY).hasTokenBalance(FUNGIBLE_TOKEN, amount), + childRecordsCheck( + FIRST_MINT_TXN, + INSUFFICIENT_GAS, + recordWith() + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_MINT) + .withStatus(INSUFFICIENT_GAS) + .withTotalSupply(0L) + .withSerialNumbers())))); + } + + @Override + protected Logger getResultsLogger() { + return LOG; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CreatePrecompileSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CreatePrecompileSuite.java index b62075922980..1e84f8b4717e 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CreatePrecompileSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CreatePrecompileSuite.java @@ -16,56 +16,40 @@ package com.hedera.services.bdd.suites.contract.precompile; -import static com.hedera.services.bdd.spec.HapiPropertySource.asTokenString; import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.assertions.AccountInfoAsserts.changeFromSnapshot; -import static com.hedera.services.bdd.spec.keys.KeyShape.CONTRACT; -import static com.hedera.services.bdd.spec.keys.KeyShape.DELEGATE_CONTRACT; import static com.hedera.services.bdd.spec.keys.KeyShape.ED25519; import static com.hedera.services.bdd.spec.keys.KeyShape.SECP256K1; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoDelete; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.balanceSnapshot; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.emptyChildRecordsCheck; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.Utils.asAddress; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_IS_TREASURY; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_TX_FEE; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ACCOUNT_ID; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_EXPIRATION_TIME; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.MISSING_TOKEN_SYMBOL; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; -import com.esaulpaugh.headlong.abi.Address; import com.hedera.services.bdd.spec.HapiSpec; import com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts; -import com.hedera.services.bdd.spec.assertions.ContractInfoAsserts; import com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts; import com.hedera.services.bdd.spec.transactions.contract.HapiEthereumCall; import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; import com.hedera.services.bdd.suites.HapiSuite; import com.hedera.services.bdd.suites.contract.Utils; import com.hederahashgraph.api.proto.java.ResponseCodeEnum; -import com.hederahashgraph.api.proto.java.TokenFreezeStatus; -import com.hederahashgraph.api.proto.java.TokenID; -import com.hederahashgraph.api.proto.java.TokenPauseStatus; -import com.hederahashgraph.api.proto.java.TokenSupplyType; -import com.hederahashgraph.api.proto.java.TokenType; import java.util.List; -import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.lang3.ArrayUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -97,7 +81,6 @@ public class CreatePrecompileSuite extends HapiSuite { public static final String ED25519KEY = "ed25519key"; public static final String ECDSA_KEY = "ecdsa"; public static final String EXISTING_TOKEN = "EXISTING_TOKEN"; - private static final String AUTO_RENEW_ACCOUNT = "autoRenewAccount"; public static final String EXPLICIT_CREATE_RESULT = "Explicit create result is {}"; private static final String CREATE_NFT_WITH_KEYS_AND_EXPIRY_FUNCTION = "createNFTTokenWithKeysAndExpiry"; @@ -118,13 +101,8 @@ public List getSpecsInSuite() { List positiveSpecs() { return List.of( - fungibleTokenCreateHappyPath(), - nonFungibleTokenCreateHappyPath(), - fungibleTokenCreateThenQueryAndTransfer(), - nonFungibleTokenCreateThenQuery(), - inheritsSenderAutoRenewAccountIfAnyForNftCreate(), - inheritsSenderAutoRenewAccountForTokenCreate(), - createTokenWithDefaultExpiryAndEmptyKeys()); + // TODO: where are the security model V2 _positive_ tests? + ); } List negativeSpecs() { @@ -139,479 +117,6 @@ List negativeSpecs() { delegateCallTokenCreateFails()); } - // TEST-001 - private HapiSpec fungibleTokenCreateHappyPath() { - final var tokenCreateContractAsKeyDelegate = "tokenCreateContractAsKeyDelegate"; - final var createTokenNum = new AtomicLong(); - return defaultHapiSpec("fungibleTokenCreateHappyPath") - .given( - newKeyNamed(ED25519KEY).shape(ED25519), - newKeyNamed(ECDSA_KEY).shape(SECP256K1), - newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), - newKeyNamed(CONTRACT_ADMIN_KEY), - cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(ED25519KEY), - cryptoCreate(AUTO_RENEW_ACCOUNT) - .balance(ONE_HUNDRED_HBARS) - .key(ED25519KEY), - cryptoCreate(ACCOUNT_2).balance(ONE_HUNDRED_HBARS).key(ECDSA_KEY), - cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), - uploadInitCode(TOKEN_CREATE_CONTRACT), - contractCreate(TOKEN_CREATE_CONTRACT) - .gas(GAS_TO_OFFER) - .adminKey(CONTRACT_ADMIN_KEY) - .autoRenewAccountId(AUTO_RENEW_ACCOUNT) - .signedBy(CONTRACT_ADMIN_KEY, DEFAULT_PAYER, AUTO_RENEW_ACCOUNT), - getContractInfo(TOKEN_CREATE_CONTRACT) - .has(ContractInfoAsserts.contractWith().autoRenewAccountId(AUTO_RENEW_ACCOUNT)) - .logged()) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_CREATE_CONTRACT, - "createTokenWithKeysAndExpiry", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_CREATE_CONTRACT))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_CREATE_CONTRACT))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - AUTO_RENEW_PERIOD, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT_TO_ASSOCIATE)))) - .via(FIRST_CREATE_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .payingWith(ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT_TO_ASSOCIATE_KEY) - .refusingEthConversion() - .exposingResultTo(result -> { - log.info(EXPLICIT_CREATE_RESULT, result[0]); - final var res = (Address) result[0]; - createTokenNum.set(res.value().longValueExact()); - }), - newKeyNamed(TOKEN_CREATE_CONTRACT_AS_KEY).shape(CONTRACT.signedWith(TOKEN_CREATE_CONTRACT)), - newKeyNamed(tokenCreateContractAsKeyDelegate) - .shape(DELEGATE_CONTRACT.signedWith(TOKEN_CREATE_CONTRACT))))) - .then( - getTxnRecord(FIRST_CREATE_TXN).andAllChildRecords().logged(), - getAccountBalance(ACCOUNT).logged(), - getAccountBalance(TOKEN_CREATE_CONTRACT).logged(), - getContractInfo(TOKEN_CREATE_CONTRACT).logged(), - childRecordsCheck( - FIRST_CREATE_TXN, - ResponseCodeEnum.SUCCESS, - TransactionRecordAsserts.recordWith().status(ResponseCodeEnum.SUCCESS), - TransactionRecordAsserts.recordWith().status(ResponseCodeEnum.SUCCESS), - TransactionRecordAsserts.recordWith().status(ResponseCodeEnum.SUCCESS)), - sourcing(() -> - getAccountInfo(ACCOUNT_TO_ASSOCIATE).logged().hasTokenRelationShipCount(1)), - sourcing(() -> getTokenInfo(asTokenString(TokenID.newBuilder() - .setTokenNum(createTokenNum.get()) - .build())) - .logged() - .hasTokenType(TokenType.FUNGIBLE_COMMON) - .hasSymbol(TOKEN_SYMBOL) - .hasName(TOKEN_NAME) - .hasDecimals(8) - .hasTotalSupply(100) - .hasEntityMemo(MEMO) - .hasTreasury(ACCOUNT) - // Token doesn't inherit contract's auto-renew - // account if set in tokenCreate - .hasAutoRenewAccount(ACCOUNT) - .hasAutoRenewPeriod(AUTO_RENEW_PERIOD) - .hasSupplyType(TokenSupplyType.INFINITE) - .searchKeysGlobally() - .hasAdminKey(ED25519KEY) - .hasKycKey(ED25519KEY) - .hasFreezeKey(ECDSA_KEY) - .hasWipeKey(ECDSA_KEY) - .hasSupplyKey(TOKEN_CREATE_CONTRACT_AS_KEY) - .hasFeeScheduleKey(tokenCreateContractAsKeyDelegate) - .hasPauseKey(CONTRACT_ADMIN_KEY) - .hasPauseStatus(TokenPauseStatus.Unpaused)), - cryptoDelete(ACCOUNT).hasKnownStatus(ACCOUNT_IS_TREASURY)); - } - - // TEST-002 - - private HapiSpec inheritsSenderAutoRenewAccountIfAnyForNftCreate() { - final var createdNftTokenNum = new AtomicLong(); - return defaultHapiSpec("inheritsSenderAutoRenewAccountIfAnyForNftCreate") - .given( - newKeyNamed(ED25519KEY).shape(ED25519), - newKeyNamed(ECDSA_KEY), - cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(ED25519KEY), - cryptoCreate(AUTO_RENEW_ACCOUNT) - .balance(ONE_HUNDRED_HBARS) - .key(ED25519KEY), - uploadInitCode(TOKEN_CREATE_CONTRACT)) - .when( - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate(TOKEN_CREATE_CONTRACT) - .autoRenewAccountId(AUTO_RENEW_ACCOUNT) - .gas(GAS_TO_OFFER))), - getContractInfo(TOKEN_CREATE_CONTRACT) - .has(ContractInfoAsserts.contractWith().autoRenewAccountId(AUTO_RENEW_ACCOUNT)) - .logged()) - .then(withOpContext((spec, ignore) -> { - final var subop1 = balanceSnapshot(ACCOUNT_BALANCE, ACCOUNT); - final var subop2 = contractCall( - TOKEN_CREATE_CONTRACT, - CREATE_NFT_WITH_KEYS_AND_EXPIRY_FUNCTION, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - HapiParserUtil.asHeadlongAddress(new byte[20]), - AUTO_RENEW_PERIOD) - .via(FIRST_CREATE_TXN) - .gas(GAS_TO_OFFER) - .payingWith(ACCOUNT) - .sending(DEFAULT_AMOUNT_TO_SEND) - .refusingEthConversion() - .exposingResultTo(result -> { - log.info("Explicit create result is" + " {}", result[0]); - final var res = (Address) result[0]; - createdNftTokenNum.set(res.value().longValueExact()); - }) - .hasKnownStatus(SUCCESS); - - allRunFor( - spec, - subop1, - subop2, - childRecordsCheck( - FIRST_CREATE_TXN, - SUCCESS, - TransactionRecordAsserts.recordWith().status(SUCCESS))); - - final var nftInfo = getTokenInfo(asTokenString(TokenID.newBuilder() - .setTokenNum(createdNftTokenNum.get()) - .build())) - .hasAutoRenewAccount(AUTO_RENEW_ACCOUNT) - .logged(); - - allRunFor(spec, nftInfo); - })); - } - - private HapiSpec inheritsSenderAutoRenewAccountForTokenCreate() { - final var createTokenNum = new AtomicLong(); - return defaultHapiSpec("inheritsSenderAutoRenewAccountForTokenCreate") - .given( - newKeyNamed(ED25519KEY).shape(ED25519), - newKeyNamed(ECDSA_KEY).shape(SECP256K1), - newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), - newKeyNamed(CONTRACT_ADMIN_KEY), - cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(ED25519KEY), - cryptoCreate(AUTO_RENEW_ACCOUNT) - .balance(ONE_HUNDRED_HBARS) - .key(ED25519KEY), - cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), - uploadInitCode(TOKEN_CREATE_CONTRACT), - contractCreate(TOKEN_CREATE_CONTRACT) - .gas(GAS_TO_OFFER) - .adminKey(CONTRACT_ADMIN_KEY) - .autoRenewAccountId(AUTO_RENEW_ACCOUNT) // inherits if the tokenCreateOp doesn't - // have - // autoRenewAccount - .signedBy(CONTRACT_ADMIN_KEY, DEFAULT_PAYER, AUTO_RENEW_ACCOUNT), - getContractInfo(TOKEN_CREATE_CONTRACT) - .has(ContractInfoAsserts.contractWith().autoRenewAccountId(AUTO_RENEW_ACCOUNT)) - .logged()) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_CREATE_CONTRACT, - "createTokenWithKeysAndExpiry", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_CREATE_CONTRACT))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_CREATE_CONTRACT))), - HapiParserUtil.asHeadlongAddress(new byte[20]), // set empty - // autoRenewAccount - AUTO_RENEW_PERIOD, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT_TO_ASSOCIATE)))) - .via(FIRST_CREATE_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .payingWith(ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT_TO_ASSOCIATE_KEY) - .refusingEthConversion() - .exposingResultTo(result -> { - log.info(EXPLICIT_CREATE_RESULT, result[0]); - final var res = (Address) result[0]; - createTokenNum.set(res.value().longValueExact()); - })))) - .then(sourcing(() -> getTokenInfo(asTokenString(TokenID.newBuilder() - .setTokenNum(createTokenNum.get()) - .build())) - .logged() - .hasAutoRenewAccount(AUTO_RENEW_ACCOUNT) - .hasPauseStatus(TokenPauseStatus.Unpaused))); - } - - // TEST-003 & TEST-019 - private HapiSpec nonFungibleTokenCreateHappyPath() { - final var createdTokenNum = new AtomicLong(); - return defaultHapiSpec("nonFungibleTokenCreateHappyPath") - .given( - newKeyNamed(ED25519KEY).shape(ED25519), - cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(ED25519KEY), - uploadInitCode(TOKEN_CREATE_CONTRACT), - getAccountInfo(DEFAULT_CONTRACT_SENDER).savingSnapshot(DEFAULT_CONTRACT_SENDER)) - .when(withOpContext((spec, opLog) -> - allRunFor(spec, contractCreate(TOKEN_CREATE_CONTRACT).gas(GAS_TO_OFFER)))) - .then(withOpContext((spec, ignore) -> { - final var subop1 = balanceSnapshot(ACCOUNT_BALANCE, ACCOUNT); - final var subop2 = contractCall( - TOKEN_CREATE_CONTRACT, - CREATE_NFT_WITH_KEYS_AND_EXPIRY_FUNCTION, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - AUTO_RENEW_PERIOD) - .via(FIRST_CREATE_TXN) - .gas(GAS_TO_OFFER) - .payingWith(ACCOUNT) - .sending(DEFAULT_AMOUNT_TO_SEND) - .exposingResultTo(result -> { - log.info("Explicit create result is" + " {}", result[0]); - final var res = (Address) result[0]; - createdTokenNum.set(res.value().longValueExact()); - }) - .refusingEthConversion() - .hasKnownStatus(SUCCESS); - final var subop3 = getTxnRecord(FIRST_CREATE_TXN); - allRunFor( - spec, - subop1, - subop2, - subop3, - childRecordsCheck( - FIRST_CREATE_TXN, - SUCCESS, - TransactionRecordAsserts.recordWith().status(SUCCESS))); - - final var delta = subop3.getResponseRecord().getTransactionFee(); - final var effectivePayer = ACCOUNT; - final var subop4 = getAccountBalance(effectivePayer) - .hasTinyBars(changeFromSnapshot(ACCOUNT_BALANCE, -(delta + DEFAULT_AMOUNT_TO_SEND))); - final var contractBalanceCheck = getContractInfo(TOKEN_CREATE_CONTRACT) - .has(ContractInfoAsserts.contractWith() - .balanceGreaterThan(0L) - .balanceLessThan(DEFAULT_AMOUNT_TO_SEND)); - final var getAccountTokenBalance = getAccountBalance(ACCOUNT) - .hasTokenBalance( - asTokenString(TokenID.newBuilder() - .setTokenNum(createdTokenNum.get()) - .build()), - 0); - final var tokenInfo = getTokenInfo(asTokenString(TokenID.newBuilder() - .setTokenNum(createdTokenNum.get()) - .build())) - .hasTokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .hasSymbol(TOKEN_SYMBOL) - .hasName(TOKEN_NAME) - .hasDecimals(0) - .hasTotalSupply(0) - .hasEntityMemo(MEMO) - .hasTreasury(ACCOUNT) - .hasAutoRenewAccount(ACCOUNT) - .hasAutoRenewPeriod(AUTO_RENEW_PERIOD) - .hasSupplyType(TokenSupplyType.FINITE) - .hasFreezeDefault(TokenFreezeStatus.Frozen) - .hasMaxSupply(10) - .searchKeysGlobally() - .hasAdminKey(ED25519KEY) - .hasSupplyKey(ED25519KEY) - .hasPauseKey(ED25519KEY) - .hasFreezeKey(ED25519KEY) - .hasKycKey(ED25519KEY) - .hasFeeScheduleKey(ED25519KEY) - .hasWipeKey(ED25519KEY) - .hasPauseStatus(TokenPauseStatus.Unpaused) - .logged(); - allRunFor(spec, subop4, getAccountTokenBalance, tokenInfo, contractBalanceCheck); - })); - } - - // TEST-005 - private HapiSpec fungibleTokenCreateThenQueryAndTransfer() { - final var createdTokenNum = new AtomicLong(); - return defaultHapiSpec("fungibleTokenCreateThenQueryAndTransfer") - .given( - newKeyNamed(ED25519KEY).shape(ED25519), - cryptoCreate(ACCOUNT) - .balance(ONE_MILLION_HBARS) - .key(ED25519KEY) - .maxAutomaticTokenAssociations(1), - uploadInitCode(TOKEN_CREATE_CONTRACT), - contractCreate(TOKEN_CREATE_CONTRACT).gas(GAS_TO_OFFER)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_CREATE_CONTRACT, - "createTokenThenQueryAndTransfer", - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - AUTO_RENEW_PERIOD) - .via(FIRST_CREATE_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .payingWith(ACCOUNT) - .refusingEthConversion() - .exposingResultTo(result -> { - log.info(EXPLICIT_CREATE_RESULT, result[0]); - final var res = (Address) result[0]; - createdTokenNum.set(res.value().longValueExact()); - }), - newKeyNamed(TOKEN_CREATE_CONTRACT_AS_KEY).shape(CONTRACT.signedWith(TOKEN_CREATE_CONTRACT))))) - .then( - getTxnRecord(FIRST_CREATE_TXN).andAllChildRecords().logged(), - getAccountBalance(ACCOUNT).logged(), - getAccountBalance(TOKEN_CREATE_CONTRACT).logged(), - getContractInfo(TOKEN_CREATE_CONTRACT).logged(), - childRecordsCheck( - FIRST_CREATE_TXN, - ResponseCodeEnum.SUCCESS, - TransactionRecordAsserts.recordWith().status(SUCCESS), - TransactionRecordAsserts.recordWith().status(SUCCESS), - TransactionRecordAsserts.recordWith().status(SUCCESS), - TransactionRecordAsserts.recordWith().status(SUCCESS)), - sourcing(() -> getAccountBalance(ACCOUNT) - .hasTokenBalance( - asTokenString(TokenID.newBuilder() - .setTokenNum(createdTokenNum.get()) - .build()), - 20)), - sourcing(() -> getAccountBalance(TOKEN_CREATE_CONTRACT) - .hasTokenBalance( - asTokenString(TokenID.newBuilder() - .setTokenNum(createdTokenNum.get()) - .build()), - 10)), - sourcing(() -> getTokenInfo(asTokenString(TokenID.newBuilder() - .setTokenNum(createdTokenNum.get()) - .build())) - .hasTokenType(TokenType.FUNGIBLE_COMMON) - .hasSymbol(TOKEN_SYMBOL) - .hasName(TOKEN_NAME) - .hasDecimals(8) - .hasTotalSupply(30) - .hasEntityMemo(MEMO) - .hasTreasury(TOKEN_CREATE_CONTRACT) - .hasAutoRenewAccount(ACCOUNT) - .hasAutoRenewPeriod(AUTO_RENEW_PERIOD) - .hasSupplyType(TokenSupplyType.INFINITE) - .searchKeysGlobally() - .hasAdminKey(ED25519KEY) - .hasSupplyKey(TOKEN_CREATE_CONTRACT_AS_KEY) - .hasPauseKey(TOKEN_CREATE_CONTRACT_AS_KEY) - .hasPauseStatus(TokenPauseStatus.Unpaused) - .logged())); - } - - // TEST-006 - private HapiSpec nonFungibleTokenCreateThenQuery() { - final var createdTokenNum = new AtomicLong(); - return defaultHapiSpec("nonFungibleTokenCreateThenQuery") - .given( - cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS), - uploadInitCode(TOKEN_CREATE_CONTRACT), - contractCreate(TOKEN_CREATE_CONTRACT).gas(GAS_TO_OFFER)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_CREATE_CONTRACT, - "createNonFungibleTokenThenQuery", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_CREATE_CONTRACT))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - AUTO_RENEW_PERIOD) - .via(FIRST_CREATE_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .payingWith(ACCOUNT) - .refusingEthConversion() - .exposingResultTo(result -> { - log.info(EXPLICIT_CREATE_RESULT, result[0]); - final var res = (Address) result[0]; - createdTokenNum.set(res.value().longValueExact()); - }), - newKeyNamed(TOKEN_CREATE_CONTRACT_AS_KEY).shape(CONTRACT.signedWith(TOKEN_CREATE_CONTRACT))))) - .then( - getTxnRecord(FIRST_CREATE_TXN).andAllChildRecords().logged(), - getAccountBalance(ACCOUNT).logged(), - getAccountBalance(TOKEN_CREATE_CONTRACT).logged(), - getContractInfo(TOKEN_CREATE_CONTRACT).logged(), - childRecordsCheck( - FIRST_CREATE_TXN, - ResponseCodeEnum.SUCCESS, - TransactionRecordAsserts.recordWith().status(SUCCESS), - TransactionRecordAsserts.recordWith().status(SUCCESS), - TransactionRecordAsserts.recordWith().status(SUCCESS)), - sourcing(() -> getAccountBalance(TOKEN_CREATE_CONTRACT) - .hasTokenBalance( - asTokenString(TokenID.newBuilder() - .setTokenNum(createdTokenNum.get()) - .build()), - 0)), - sourcing(() -> getTokenInfo(asTokenString(TokenID.newBuilder() - .setTokenNum(createdTokenNum.get()) - .build())) - .hasTokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .hasSymbol(TOKEN_SYMBOL) - .hasName(TOKEN_NAME) - .hasDecimals(0) - .hasTotalSupply(0) - .hasEntityMemo(MEMO) - .hasTreasury(TOKEN_CREATE_CONTRACT) - .hasAutoRenewAccount(ACCOUNT) - .hasAutoRenewPeriod(AUTO_RENEW_PERIOD) - .hasSupplyType(TokenSupplyType.INFINITE) - .searchKeysGlobally() - .hasAdminKey(TOKEN_CREATE_CONTRACT_AS_KEY) - .hasSupplyKey(TOKEN_CREATE_CONTRACT_AS_KEY) - .hasPauseStatus(TokenPauseStatus.PauseNotApplicable) - .logged())); - } - // TEST-007 & TEST-016 private HapiSpec tokenCreateWithEmptyKeysReverts() { return defaultHapiSpec("tokenCreateWithEmptyKeysReverts") @@ -930,50 +435,6 @@ private HapiSpec delegateCallTokenCreateFails() { getContractInfo(TOKEN_CREATE_CONTRACT)); } - private HapiSpec createTokenWithDefaultExpiryAndEmptyKeys() { - final var tokenCreateContractAsKeyDelegate = "createTokenWithDefaultExpiryAndEmptyKeys"; - final var createTokenNum = new AtomicLong(); - return defaultHapiSpec(tokenCreateContractAsKeyDelegate) - .given( - newKeyNamed(ED25519KEY).shape(ED25519), - newKeyNamed(ECDSA_KEY).shape(SECP256K1), - newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), - newKeyNamed(CONTRACT_ADMIN_KEY), - cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(ED25519KEY), - cryptoCreate(AUTO_RENEW_ACCOUNT) - .balance(ONE_HUNDRED_HBARS) - .key(ED25519KEY), - cryptoCreate(ACCOUNT_2).balance(ONE_HUNDRED_HBARS).key(ECDSA_KEY), - cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), - uploadInitCode(TOKEN_CREATE_CONTRACT), - contractCreate(TOKEN_CREATE_CONTRACT) - .gas(GAS_TO_OFFER) - .adminKey(CONTRACT_ADMIN_KEY) - .autoRenewAccountId(AUTO_RENEW_ACCOUNT) - .signedBy(CONTRACT_ADMIN_KEY, DEFAULT_PAYER, AUTO_RENEW_ACCOUNT), - getContractInfo(TOKEN_CREATE_CONTRACT) - .has(ContractInfoAsserts.contractWith().autoRenewAccountId(AUTO_RENEW_ACCOUNT)) - .logged()) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall(TOKEN_CREATE_CONTRACT, tokenCreateContractAsKeyDelegate) - .via(FIRST_CREATE_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .payingWith(ACCOUNT) - .refusingEthConversion() - .alsoSigningWithFullPrefix(ACCOUNT_TO_ASSOCIATE_KEY) - .exposingResultTo(result -> { - log.info(EXPLICIT_CREATE_RESULT, result[0]); - final var res = (Address) result[0]; - createTokenNum.set(res.value().longValueExact()); - }), - newKeyNamed(TOKEN_CREATE_CONTRACT_AS_KEY).shape(CONTRACT.signedWith(TOKEN_CREATE_CONTRACT)), - newKeyNamed(tokenCreateContractAsKeyDelegate) - .shape(DELEGATE_CONTRACT.signedWith(TOKEN_CREATE_CONTRACT))))) - .then(getTxnRecord(FIRST_CREATE_TXN).andAllChildRecords().logged()); - } - @Override protected Logger getResultsLogger() { return log; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CreatePrecompileV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CreatePrecompileV1SecurityModelSuite.java new file mode 100644 index 000000000000..4633278731c3 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CreatePrecompileV1SecurityModelSuite.java @@ -0,0 +1,662 @@ +/* + * Copyright (C) 2022-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.spec.HapiPropertySource.asTokenString; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.AccountInfoAsserts.changeFromSnapshot; +import static com.hedera.services.bdd.spec.keys.KeyShape.CONTRACT; +import static com.hedera.services.bdd.spec.keys.KeyShape.DELEGATE_CONTRACT; +import static com.hedera.services.bdd.spec.keys.KeyShape.ED25519; +import static com.hedera.services.bdd.spec.keys.KeyShape.SECP256K1; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoDelete; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.balanceSnapshot; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overriding; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_IS_TREASURY; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; + +import com.esaulpaugh.headlong.abi.Address; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.assertions.ContractInfoAsserts; +import com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.ResponseCodeEnum; +import com.hederahashgraph.api.proto.java.TokenFreezeStatus; +import com.hederahashgraph.api.proto.java.TokenID; +import com.hederahashgraph.api.proto.java.TokenPauseStatus; +import com.hederahashgraph.api.proto.java.TokenSupplyType; +import com.hederahashgraph.api.proto.java.TokenType; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +// Some of the test cases cannot be converted to use eth calls, +// since they use admin keys, which are held by the txn payer. +// In the case of an eth txn, we revoke the payers keys and the txn would fail. +// The only way an eth account to create a token is the admin key to be of a contractId type. +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse +public class CreatePrecompileV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(CreatePrecompileV1SecurityModelSuite.class); + + public static final String ACCOUNT_2 = "account2"; + public static final String CONTRACT_ADMIN_KEY = "contractAdminKey"; + public static final String ACCOUNT_TO_ASSOCIATE = "account3"; + public static final String ACCOUNT_TO_ASSOCIATE_KEY = "associateKey"; + public static final String FALSE = "false"; + private static final long GAS_TO_OFFER = 1_000_000L; + public static final long AUTO_RENEW_PERIOD = 8_000_000L; + public static final String TOKEN_SYMBOL = "tokenSymbol"; + public static final String TOKEN_NAME = "tokenName"; + public static final String MEMO = "memo"; + public static final String TOKEN_CREATE_CONTRACT_AS_KEY = "tokenCreateContractAsKey"; + private static final String ACCOUNT = "account"; + public static final String TOKEN_CREATE_CONTRACT = "TokenCreateContract"; + public static final String FIRST_CREATE_TXN = "firstCreateTxn"; + private static final String ACCOUNT_BALANCE = "ACCOUNT_BALANCE"; + public static final long DEFAULT_AMOUNT_TO_SEND = 20 * ONE_HBAR; + public static final String ED25519KEY = "ed25519key"; + public static final String ECDSA_KEY = "ecdsa"; + private static final String AUTO_RENEW_ACCOUNT = "autoRenewAccount"; + public static final String EXPLICIT_CREATE_RESULT = "Explicit create result is {}"; + private static final String CREATE_NFT_WITH_KEYS_AND_EXPIRY_FUNCTION = "createNFTTokenWithKeysAndExpiry"; + + public static void main(String... args) { + new CreatePrecompileV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + // TODO: Fix contract name in TokenCreateContract.sol + @Override + public List getSpecsInSuite() { + return allOf(positiveSpecs(), negativeSpecs()); + } + + List positiveSpecs() { + return List.of( + fungibleTokenCreateHappyPath(), + nonFungibleTokenCreateHappyPath(), + fungibleTokenCreateThenQueryAndTransfer(), + nonFungibleTokenCreateThenQuery(), + inheritsSenderAutoRenewAccountIfAnyForNftCreate(), + inheritsSenderAutoRenewAccountForTokenCreate(), + createTokenWithDefaultExpiryAndEmptyKeys()); + } + + List negativeSpecs() { + // TODO: Where are the security model v1 _negative_ tests? + return List.of(); + } + + // TEST-001 + private HapiSpec fungibleTokenCreateHappyPath() { + final var tokenCreateContractAsKeyDelegate = "tokenCreateContractAsKeyDelegate"; + final var createTokenNum = new AtomicLong(); + return propertyPreservingHapiSpec("fungibleTokenCreateHappyPath") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ED25519KEY).shape(ED25519), + newKeyNamed(ECDSA_KEY).shape(SECP256K1), + newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), + newKeyNamed(CONTRACT_ADMIN_KEY), + cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(ED25519KEY), + cryptoCreate(AUTO_RENEW_ACCOUNT) + .balance(ONE_HUNDRED_HBARS) + .key(ED25519KEY), + cryptoCreate(ACCOUNT_2).balance(ONE_HUNDRED_HBARS).key(ECDSA_KEY), + cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), + uploadInitCode(TOKEN_CREATE_CONTRACT), + contractCreate(TOKEN_CREATE_CONTRACT) + .gas(GAS_TO_OFFER) + .adminKey(CONTRACT_ADMIN_KEY) + .autoRenewAccountId(AUTO_RENEW_ACCOUNT) + .signedBy(CONTRACT_ADMIN_KEY, DEFAULT_PAYER, AUTO_RENEW_ACCOUNT), + getContractInfo(TOKEN_CREATE_CONTRACT) + .has(ContractInfoAsserts.contractWith().autoRenewAccountId(AUTO_RENEW_ACCOUNT)) + .logged()) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_CREATE_CONTRACT, + "createTokenWithKeysAndExpiry", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_CREATE_CONTRACT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_CREATE_CONTRACT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + AUTO_RENEW_PERIOD, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT_TO_ASSOCIATE)))) + .via(FIRST_CREATE_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .payingWith(ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT_TO_ASSOCIATE_KEY) + .refusingEthConversion() + .exposingResultTo(result -> { + log.info(EXPLICIT_CREATE_RESULT, result[0]); + final var res = (Address) result[0]; + createTokenNum.set(res.value().longValueExact()); + }), + newKeyNamed(TOKEN_CREATE_CONTRACT_AS_KEY).shape(CONTRACT.signedWith(TOKEN_CREATE_CONTRACT)), + newKeyNamed(tokenCreateContractAsKeyDelegate) + .shape(DELEGATE_CONTRACT.signedWith(TOKEN_CREATE_CONTRACT))))) + .then( + getTxnRecord(FIRST_CREATE_TXN).andAllChildRecords().logged(), + getAccountBalance(ACCOUNT).logged(), + getAccountBalance(TOKEN_CREATE_CONTRACT).logged(), + getContractInfo(TOKEN_CREATE_CONTRACT).logged(), + childRecordsCheck( + FIRST_CREATE_TXN, + ResponseCodeEnum.SUCCESS, + TransactionRecordAsserts.recordWith().status(ResponseCodeEnum.SUCCESS), + TransactionRecordAsserts.recordWith().status(ResponseCodeEnum.SUCCESS), + TransactionRecordAsserts.recordWith().status(ResponseCodeEnum.SUCCESS)), + sourcing(() -> + getAccountInfo(ACCOUNT_TO_ASSOCIATE).logged().hasTokenRelationShipCount(1)), + sourcing(() -> getTokenInfo(asTokenString(TokenID.newBuilder() + .setTokenNum(createTokenNum.get()) + .build())) + .logged() + .hasTokenType(TokenType.FUNGIBLE_COMMON) + .hasSymbol(TOKEN_SYMBOL) + .hasName(TOKEN_NAME) + .hasDecimals(8) + .hasTotalSupply(100) + .hasEntityMemo(MEMO) + .hasTreasury(ACCOUNT) + // Token doesn't inherit contract's auto-renew + // account if set in tokenCreate + .hasAutoRenewAccount(ACCOUNT) + .hasAutoRenewPeriod(AUTO_RENEW_PERIOD) + .hasSupplyType(TokenSupplyType.INFINITE) + .searchKeysGlobally() + .hasAdminKey(ED25519KEY) + .hasKycKey(ED25519KEY) + .hasFreezeKey(ECDSA_KEY) + .hasWipeKey(ECDSA_KEY) + .hasSupplyKey(TOKEN_CREATE_CONTRACT_AS_KEY) + .hasFeeScheduleKey(tokenCreateContractAsKeyDelegate) + .hasPauseKey(CONTRACT_ADMIN_KEY) + .hasPauseStatus(TokenPauseStatus.Unpaused)), + cryptoDelete(ACCOUNT).hasKnownStatus(ACCOUNT_IS_TREASURY)); + } + + // TEST-002 + + private HapiSpec inheritsSenderAutoRenewAccountIfAnyForNftCreate() { + final var createdNftTokenNum = new AtomicLong(); + return propertyPreservingHapiSpec("inheritsSenderAutoRenewAccountIfAnyForNftCreate") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ED25519KEY).shape(ED25519), + newKeyNamed(ECDSA_KEY), + cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(ED25519KEY), + cryptoCreate(AUTO_RENEW_ACCOUNT) + .balance(ONE_HUNDRED_HBARS) + .key(ED25519KEY), + uploadInitCode(TOKEN_CREATE_CONTRACT)) + .when( + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate(TOKEN_CREATE_CONTRACT) + .autoRenewAccountId(AUTO_RENEW_ACCOUNT) + .gas(GAS_TO_OFFER))), + getContractInfo(TOKEN_CREATE_CONTRACT) + .has(ContractInfoAsserts.contractWith().autoRenewAccountId(AUTO_RENEW_ACCOUNT)) + .logged()) + .then(withOpContext((spec, ignore) -> { + final var subop1 = balanceSnapshot(ACCOUNT_BALANCE, ACCOUNT); + final var subop2 = contractCall( + TOKEN_CREATE_CONTRACT, + CREATE_NFT_WITH_KEYS_AND_EXPIRY_FUNCTION, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + HapiParserUtil.asHeadlongAddress(new byte[20]), + AUTO_RENEW_PERIOD) + .via(FIRST_CREATE_TXN) + .gas(GAS_TO_OFFER) + .payingWith(ACCOUNT) + .sending(DEFAULT_AMOUNT_TO_SEND) + .refusingEthConversion() + .exposingResultTo(result -> { + log.info("Explicit create result is" + " {}", result[0]); + final var res = (Address) result[0]; + createdNftTokenNum.set(res.value().longValueExact()); + }) + .hasKnownStatus(SUCCESS); + + allRunFor( + spec, + subop1, + subop2, + childRecordsCheck( + FIRST_CREATE_TXN, + SUCCESS, + TransactionRecordAsserts.recordWith().status(SUCCESS))); + + final var nftInfo = getTokenInfo(asTokenString(TokenID.newBuilder() + .setTokenNum(createdNftTokenNum.get()) + .build())) + .hasAutoRenewAccount(AUTO_RENEW_ACCOUNT) + .logged(); + + allRunFor(spec, nftInfo); + })); + } + + private HapiSpec inheritsSenderAutoRenewAccountForTokenCreate() { + final var createTokenNum = new AtomicLong(); + return propertyPreservingHapiSpec("inheritsSenderAutoRenewAccountForTokenCreate") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ED25519KEY).shape(ED25519), + newKeyNamed(ECDSA_KEY).shape(SECP256K1), + newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), + newKeyNamed(CONTRACT_ADMIN_KEY), + cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(ED25519KEY), + cryptoCreate(AUTO_RENEW_ACCOUNT) + .balance(ONE_HUNDRED_HBARS) + .key(ED25519KEY), + cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), + uploadInitCode(TOKEN_CREATE_CONTRACT), + contractCreate(TOKEN_CREATE_CONTRACT) + .gas(GAS_TO_OFFER) + .adminKey(CONTRACT_ADMIN_KEY) + .autoRenewAccountId(AUTO_RENEW_ACCOUNT) // inherits if the tokenCreateOp doesn't + // have + // autoRenewAccount + .signedBy(CONTRACT_ADMIN_KEY, DEFAULT_PAYER, AUTO_RENEW_ACCOUNT), + getContractInfo(TOKEN_CREATE_CONTRACT) + .has(ContractInfoAsserts.contractWith().autoRenewAccountId(AUTO_RENEW_ACCOUNT)) + .logged()) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_CREATE_CONTRACT, + "createTokenWithKeysAndExpiry", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_CREATE_CONTRACT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_CREATE_CONTRACT))), + HapiParserUtil.asHeadlongAddress(new byte[20]), // set empty + // autoRenewAccount + AUTO_RENEW_PERIOD, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT_TO_ASSOCIATE)))) + .via(FIRST_CREATE_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .payingWith(ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT_TO_ASSOCIATE_KEY) + .refusingEthConversion() + .exposingResultTo(result -> { + log.info(EXPLICIT_CREATE_RESULT, result[0]); + final var res = (Address) result[0]; + createTokenNum.set(res.value().longValueExact()); + })))) + .then(sourcing(() -> getTokenInfo(asTokenString(TokenID.newBuilder() + .setTokenNum(createTokenNum.get()) + .build())) + .logged() + .hasAutoRenewAccount(AUTO_RENEW_ACCOUNT) + .hasPauseStatus(TokenPauseStatus.Unpaused))); + } + + // TEST-003 & TEST-019 + private HapiSpec nonFungibleTokenCreateHappyPath() { + final var createdTokenNum = new AtomicLong(); + return propertyPreservingHapiSpec("nonFungibleTokenCreateHappyPath") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ED25519KEY).shape(ED25519), + cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(ED25519KEY), + uploadInitCode(TOKEN_CREATE_CONTRACT), + getAccountInfo(DEFAULT_CONTRACT_SENDER).savingSnapshot(DEFAULT_CONTRACT_SENDER)) + .when(withOpContext((spec, opLog) -> + allRunFor(spec, contractCreate(TOKEN_CREATE_CONTRACT).gas(GAS_TO_OFFER)))) + .then(withOpContext((spec, ignore) -> { + final var subop1 = balanceSnapshot(ACCOUNT_BALANCE, ACCOUNT); + final var subop2 = contractCall( + TOKEN_CREATE_CONTRACT, + CREATE_NFT_WITH_KEYS_AND_EXPIRY_FUNCTION, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + AUTO_RENEW_PERIOD) + .via(FIRST_CREATE_TXN) + .gas(GAS_TO_OFFER) + .payingWith(ACCOUNT) + .sending(DEFAULT_AMOUNT_TO_SEND) + .exposingResultTo(result -> { + log.info("Explicit create result is" + " {}", result[0]); + final var res = (Address) result[0]; + createdTokenNum.set(res.value().longValueExact()); + }) + .refusingEthConversion() + .hasKnownStatus(SUCCESS); + final var subop3 = getTxnRecord(FIRST_CREATE_TXN); + allRunFor( + spec, + subop1, + subop2, + subop3, + childRecordsCheck( + FIRST_CREATE_TXN, + SUCCESS, + TransactionRecordAsserts.recordWith().status(SUCCESS))); + + final var delta = subop3.getResponseRecord().getTransactionFee(); + final var effectivePayer = ACCOUNT; + final var subop4 = getAccountBalance(effectivePayer) + .hasTinyBars(changeFromSnapshot(ACCOUNT_BALANCE, -(delta + DEFAULT_AMOUNT_TO_SEND))); + final var contractBalanceCheck = getContractInfo(TOKEN_CREATE_CONTRACT) + .has(ContractInfoAsserts.contractWith() + .balanceGreaterThan(0L) + .balanceLessThan(DEFAULT_AMOUNT_TO_SEND)); + final var getAccountTokenBalance = getAccountBalance(ACCOUNT) + .hasTokenBalance( + asTokenString(TokenID.newBuilder() + .setTokenNum(createdTokenNum.get()) + .build()), + 0); + final var tokenInfo = getTokenInfo(asTokenString(TokenID.newBuilder() + .setTokenNum(createdTokenNum.get()) + .build())) + .hasTokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .hasSymbol(TOKEN_SYMBOL) + .hasName(TOKEN_NAME) + .hasDecimals(0) + .hasTotalSupply(0) + .hasEntityMemo(MEMO) + .hasTreasury(ACCOUNT) + .hasAutoRenewAccount(ACCOUNT) + .hasAutoRenewPeriod(AUTO_RENEW_PERIOD) + .hasSupplyType(TokenSupplyType.FINITE) + .hasFreezeDefault(TokenFreezeStatus.Frozen) + .hasMaxSupply(10) + .searchKeysGlobally() + .hasAdminKey(ED25519KEY) + .hasSupplyKey(ED25519KEY) + .hasPauseKey(ED25519KEY) + .hasFreezeKey(ED25519KEY) + .hasKycKey(ED25519KEY) + .hasFeeScheduleKey(ED25519KEY) + .hasWipeKey(ED25519KEY) + .hasPauseStatus(TokenPauseStatus.Unpaused) + .logged(); + allRunFor(spec, subop4, getAccountTokenBalance, tokenInfo, contractBalanceCheck); + })); + } + + // TEST-005 + private HapiSpec fungibleTokenCreateThenQueryAndTransfer() { + final var createdTokenNum = new AtomicLong(); + return propertyPreservingHapiSpec("fungibleTokenCreateThenQueryAndTransfer") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ED25519KEY).shape(ED25519), + cryptoCreate(ACCOUNT) + .balance(ONE_MILLION_HBARS) + .key(ED25519KEY) + .maxAutomaticTokenAssociations(1), + uploadInitCode(TOKEN_CREATE_CONTRACT), + contractCreate(TOKEN_CREATE_CONTRACT).gas(GAS_TO_OFFER)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_CREATE_CONTRACT, + "createTokenThenQueryAndTransfer", + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + AUTO_RENEW_PERIOD) + .via(FIRST_CREATE_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .payingWith(ACCOUNT) + .refusingEthConversion() + .exposingResultTo(result -> { + log.info(EXPLICIT_CREATE_RESULT, result[0]); + final var res = (Address) result[0]; + createdTokenNum.set(res.value().longValueExact()); + }), + newKeyNamed(TOKEN_CREATE_CONTRACT_AS_KEY).shape(CONTRACT.signedWith(TOKEN_CREATE_CONTRACT))))) + .then( + getTxnRecord(FIRST_CREATE_TXN).andAllChildRecords().logged(), + getAccountBalance(ACCOUNT).logged(), + getAccountBalance(TOKEN_CREATE_CONTRACT).logged(), + getContractInfo(TOKEN_CREATE_CONTRACT).logged(), + childRecordsCheck( + FIRST_CREATE_TXN, + ResponseCodeEnum.SUCCESS, + TransactionRecordAsserts.recordWith().status(SUCCESS), + TransactionRecordAsserts.recordWith().status(SUCCESS), + TransactionRecordAsserts.recordWith().status(SUCCESS), + TransactionRecordAsserts.recordWith().status(SUCCESS)), + sourcing(() -> getAccountBalance(ACCOUNT) + .hasTokenBalance( + asTokenString(TokenID.newBuilder() + .setTokenNum(createdTokenNum.get()) + .build()), + 20)), + sourcing(() -> getAccountBalance(TOKEN_CREATE_CONTRACT) + .hasTokenBalance( + asTokenString(TokenID.newBuilder() + .setTokenNum(createdTokenNum.get()) + .build()), + 10)), + sourcing(() -> getTokenInfo(asTokenString(TokenID.newBuilder() + .setTokenNum(createdTokenNum.get()) + .build())) + .hasTokenType(TokenType.FUNGIBLE_COMMON) + .hasSymbol(TOKEN_SYMBOL) + .hasName(TOKEN_NAME) + .hasDecimals(8) + .hasTotalSupply(30) + .hasEntityMemo(MEMO) + .hasTreasury(TOKEN_CREATE_CONTRACT) + .hasAutoRenewAccount(ACCOUNT) + .hasAutoRenewPeriod(AUTO_RENEW_PERIOD) + .hasSupplyType(TokenSupplyType.INFINITE) + .searchKeysGlobally() + .hasAdminKey(ED25519KEY) + .hasSupplyKey(TOKEN_CREATE_CONTRACT_AS_KEY) + .hasPauseKey(TOKEN_CREATE_CONTRACT_AS_KEY) + .hasPauseStatus(TokenPauseStatus.Unpaused) + .logged())); + } + + // TEST-006 + private HapiSpec nonFungibleTokenCreateThenQuery() { + final var createdTokenNum = new AtomicLong(); + return propertyPreservingHapiSpec("nonFungibleTokenCreateThenQuery") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS), + uploadInitCode(TOKEN_CREATE_CONTRACT), + contractCreate(TOKEN_CREATE_CONTRACT).gas(GAS_TO_OFFER)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_CREATE_CONTRACT, + "createNonFungibleTokenThenQuery", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_CREATE_CONTRACT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + AUTO_RENEW_PERIOD) + .via(FIRST_CREATE_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .payingWith(ACCOUNT) + .refusingEthConversion() + .exposingResultTo(result -> { + log.info(EXPLICIT_CREATE_RESULT, result[0]); + final var res = (Address) result[0]; + createdTokenNum.set(res.value().longValueExact()); + }), + newKeyNamed(TOKEN_CREATE_CONTRACT_AS_KEY).shape(CONTRACT.signedWith(TOKEN_CREATE_CONTRACT))))) + .then( + getTxnRecord(FIRST_CREATE_TXN).andAllChildRecords().logged(), + getAccountBalance(ACCOUNT).logged(), + getAccountBalance(TOKEN_CREATE_CONTRACT).logged(), + getContractInfo(TOKEN_CREATE_CONTRACT).logged(), + childRecordsCheck( + FIRST_CREATE_TXN, + ResponseCodeEnum.SUCCESS, + TransactionRecordAsserts.recordWith().status(SUCCESS), + TransactionRecordAsserts.recordWith().status(SUCCESS), + TransactionRecordAsserts.recordWith().status(SUCCESS)), + sourcing(() -> getAccountBalance(TOKEN_CREATE_CONTRACT) + .hasTokenBalance( + asTokenString(TokenID.newBuilder() + .setTokenNum(createdTokenNum.get()) + .build()), + 0)), + sourcing(() -> getTokenInfo(asTokenString(TokenID.newBuilder() + .setTokenNum(createdTokenNum.get()) + .build())) + .hasTokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .hasSymbol(TOKEN_SYMBOL) + .hasName(TOKEN_NAME) + .hasDecimals(0) + .hasTotalSupply(0) + .hasEntityMemo(MEMO) + .hasTreasury(TOKEN_CREATE_CONTRACT) + .hasAutoRenewAccount(ACCOUNT) + .hasAutoRenewPeriod(AUTO_RENEW_PERIOD) + .hasSupplyType(TokenSupplyType.INFINITE) + .searchKeysGlobally() + .hasAdminKey(TOKEN_CREATE_CONTRACT_AS_KEY) + .hasSupplyKey(TOKEN_CREATE_CONTRACT_AS_KEY) + .hasPauseStatus(TokenPauseStatus.PauseNotApplicable) + .logged())); + } + + private HapiSpec createTokenWithDefaultExpiryAndEmptyKeys() { + final var tokenCreateContractAsKeyDelegate = "createTokenWithDefaultExpiryAndEmptyKeys"; + final var createTokenNum = new AtomicLong(); + return propertyPreservingHapiSpec("createTokenWithDefaultExpiryAndEmptyKeys") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ED25519KEY).shape(ED25519), + newKeyNamed(ECDSA_KEY).shape(SECP256K1), + newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), + newKeyNamed(CONTRACT_ADMIN_KEY), + cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(ED25519KEY), + cryptoCreate(AUTO_RENEW_ACCOUNT) + .balance(ONE_HUNDRED_HBARS) + .key(ED25519KEY), + cryptoCreate(ACCOUNT_2).balance(ONE_HUNDRED_HBARS).key(ECDSA_KEY), + cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), + uploadInitCode(TOKEN_CREATE_CONTRACT), + contractCreate(TOKEN_CREATE_CONTRACT) + .gas(GAS_TO_OFFER) + .adminKey(CONTRACT_ADMIN_KEY) + .autoRenewAccountId(AUTO_RENEW_ACCOUNT) + .signedBy(CONTRACT_ADMIN_KEY, DEFAULT_PAYER, AUTO_RENEW_ACCOUNT), + getContractInfo(TOKEN_CREATE_CONTRACT) + .has(ContractInfoAsserts.contractWith().autoRenewAccountId(AUTO_RENEW_ACCOUNT)) + .logged()) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall(TOKEN_CREATE_CONTRACT, tokenCreateContractAsKeyDelegate) + .via(FIRST_CREATE_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .payingWith(ACCOUNT) + .refusingEthConversion() + .alsoSigningWithFullPrefix(ACCOUNT_TO_ASSOCIATE_KEY) + .exposingResultTo(result -> { + log.info(EXPLICIT_CREATE_RESULT, result[0]); + final var res = (Address) result[0]; + createTokenNum.set(res.value().longValueExact()); + }), + newKeyNamed(TOKEN_CREATE_CONTRACT_AS_KEY).shape(CONTRACT.signedWith(TOKEN_CREATE_CONTRACT)), + newKeyNamed(tokenCreateContractAsKeyDelegate) + .shape(DELEGATE_CONTRACT.signedWith(TOKEN_CREATE_CONTRACT))))) + .then(getTxnRecord(FIRST_CREATE_TXN).andAllChildRecords().logged()); + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CryptoTransferHTSSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CryptoTransferHTSSuite.java index d7ed3935afe2..065bef71f174 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CryptoTransferHTSSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CryptoTransferHTSSuite.java @@ -28,7 +28,6 @@ import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountDetails; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; @@ -65,7 +64,6 @@ import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.AMOUNT_EXCEEDS_ALLOWANCE; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SPENDER_DOES_NOT_HAVE_ALLOWANCE; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; @@ -76,7 +74,6 @@ import com.hedera.node.app.hapi.utils.ByteStringUtils; import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; import com.hedera.services.bdd.spec.HapiSpec; -import com.hedera.services.bdd.spec.assertions.ContractInfoAsserts; import com.hedera.services.bdd.spec.assertions.NonFungibleTransfers; import com.hedera.services.bdd.spec.assertions.SomeFungibleTransfers; import com.hedera.services.bdd.spec.keys.KeyShape; @@ -118,10 +115,6 @@ public class CryptoTransferHTSSuite extends HapiSuite { private static final ByteString META2 = ByteStringUtils.wrapUnsafely("meta2".getBytes()); private static final ByteString META3 = ByteStringUtils.wrapUnsafely("meta3".getBytes()); private static final ByteString META4 = ByteStringUtils.wrapUnsafely("meta4".getBytes()); - private static final ByteString META5 = ByteStringUtils.wrapUnsafely("meta5".getBytes()); - private static final ByteString META6 = ByteStringUtils.wrapUnsafely("meta6".getBytes()); - private static final ByteString META7 = ByteStringUtils.wrapUnsafely("meta7".getBytes()); - private static final ByteString META8 = ByteStringUtils.wrapUnsafely("meta8".getBytes()); private static final String NFT_TOKEN_WITH_FIXED_HBAR_FEE = "nftTokenWithFixedHbarFee"; private static final String NFT_TOKEN_WITH_FIXED_TOKEN_FEE = "nftTokenWithFixedTokenFee"; private static final String NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK = @@ -147,20 +140,15 @@ public boolean canRunConcurrent() { @Override public List getSpecsInSuite() { return List.of( - nonNestedCryptoTransferForFungibleToken(), nonNestedCryptoTransferForFungibleTokenWithMultipleReceivers(), nonNestedCryptoTransferForNonFungibleToken(), nonNestedCryptoTransferForMultipleNonFungibleTokens(), nonNestedCryptoTransferForFungibleAndNonFungibleToken(), nonNestedCryptoTransferForFungibleTokenWithMultipleSendersAndReceiversAndNonFungibleTokens(), repeatedTokenIdsAreAutomaticallyConsolidated(), - activeContractInFrameIsVerifiedWithoutNeedForSignature(), hapiTransferFromForFungibleToken(), hapiTransferFromForNFT(), - cryptoTransferNFTsWithCustomFeesMixedScenario(), hapiTransferFromForNFTWithCustomFeesWithoutApproveFails(), - hapiTransferFromForNFTWithCustomFeesWithApproveForAll(), - hapiTransferFromForNFTWithCustomFeesWithBothApproveForAllAndAssignedSpender(), hapiTransferFromForFungibleTokenWithCustomFeesWithoutApproveFails(), hapiTransferFromForFungibleTokenWithCustomFeesWithBothApproveForAllAndAssignedSpender()); } @@ -513,82 +501,6 @@ private HapiSpec repeatedTokenIdsAreAutomaticallyConsolidated() { .including(FUNGIBLE_TOKEN, RECEIVER, 2 * toSendEachTuple)))); } - private HapiSpec nonNestedCryptoTransferForFungibleToken() { - final var cryptoTransferTxn = CRYPTO_TRANSFER_TXN; - - return defaultHapiSpec("NonNestedCryptoTransferForFungibleToken") - .given( - cryptoCreate(SENDER).balance(10 * ONE_HUNDRED_HBARS), - cryptoCreate(RECEIVER).balance(2 * ONE_HUNDRED_HBARS).receiverSigRequired(true), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(FUNGIBLE_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(TOTAL_SUPPLY) - .treasury(TOKEN_TREASURY), - tokenAssociate(SENDER, List.of(FUNGIBLE_TOKEN)), - tokenAssociate(RECEIVER, List.of(FUNGIBLE_TOKEN)), - cryptoTransfer(moving(200, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, SENDER)), - uploadInitCode(CONTRACT), - contractCreate(CONTRACT).maxAutomaticTokenAssociations(1), - getContractInfo(CONTRACT) - .has(ContractInfoAsserts.contractWith().maxAutoAssociations(1)) - .logged()) - .when( - withOpContext((spec, opLog) -> { - final var token = spec.registry().getTokenID(FUNGIBLE_TOKEN); - final var sender = spec.registry().getAccountID(SENDER); - final var receiver = spec.registry().getAccountID(RECEIVER); - final var amountToBeSent = 50L; - - allRunFor( - spec, - newKeyNamed(DELEGATE_KEY) - .shape(DELEGATE_CONTRACT_KEY_SHAPE.signedWith(sigs(ON, CONTRACT))), - cryptoUpdate(SENDER).key(DELEGATE_KEY), - cryptoUpdate(RECEIVER).key(DELEGATE_KEY), - contractCall(CONTRACT, TRANSFER_MULTIPLE_TOKENS, (Object) new Tuple[] { - tokenTransferList() - .forToken(token) - .withAccountAmounts( - accountAmount(sender, -amountToBeSent), - accountAmount(receiver, amountToBeSent)) - .build() - }) - .payingWith(GENESIS) - .via(cryptoTransferTxn) - .gas(GAS_TO_OFFER), - contractCall(CONTRACT, TRANSFER_MULTIPLE_TOKENS, (Object) new Tuple[] { - tokenTransferList() - .forToken(token) - .withAccountAmounts( - accountAmount(sender, -0L), accountAmount(receiver, 0L)) - .build() - }) - .payingWith(GENESIS) - .via("cryptoTransferZero") - .gas(GAS_TO_OFFER)); - }), - getTxnRecord(cryptoTransferTxn).andAllChildRecords().logged(), - getTxnRecord("cryptoTransferZero").andAllChildRecords().logged()) - .then( - getTokenInfo(FUNGIBLE_TOKEN).hasTotalSupply(TOTAL_SUPPLY), - getAccountBalance(RECEIVER).hasTokenBalance(FUNGIBLE_TOKEN, 50), - getAccountBalance(SENDER).hasTokenBalance(FUNGIBLE_TOKEN, 150), - getTokenInfo(FUNGIBLE_TOKEN).logged(), - childRecordsCheck( - cryptoTransferTxn, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)) - .gasUsed(14085L)) - .tokenTransfers(SomeFungibleTransfers.changingFungibleBalances() - .including(FUNGIBLE_TOKEN, SENDER, -50) - .including(FUNGIBLE_TOKEN, RECEIVER, 50)))); - } - private HapiSpec nonNestedCryptoTransferForFungibleTokenWithMultipleReceivers() { final var cryptoTransferTxn = CRYPTO_TRANSFER_TXN; @@ -1028,178 +940,6 @@ private HapiSpec nonNestedCryptoTransferForFungibleTokenWithMultipleSendersAndRe .including(NFT_TOKEN, SENDER2, RECEIVER2, 2L)))); } - private HapiSpec activeContractInFrameIsVerifiedWithoutNeedForSignature() { - final var revertedFungibleTransferTxn = "revertedFungibleTransferTxn"; - final var successfulFungibleTransferTxn = "successfulFungibleTransferTxn"; - final var revertedNftTransferTxn = "revertedNftTransferTxn"; - final var successfulNftTransferTxn = "successfulNftTransferTxn"; - final var senderStartBalance = 200L; - final var receiverStartBalance = 0L; - final var toSendEachTuple = 50L; - final var multiKey = MULTI_KEY; - final var senderKey = "senderKey"; - final var contractKey = "contractAdminKey"; - - return defaultHapiSpec("activeContractInFrameIsVerifiedWithoutNeedForSignature") - .given( - newKeyNamed(multiKey), - newKeyNamed(senderKey), - newKeyNamed(contractKey), - cryptoCreate(SENDER).balance(10 * ONE_HUNDRED_HBARS).key(senderKey), - cryptoCreate(RECEIVER).balance(2 * ONE_HUNDRED_HBARS), - cryptoCreate(TOKEN_TREASURY), - uploadInitCode(CONTRACT), - contractCreate(CONTRACT) - .payingWith(GENESIS) - .adminKey(contractKey) - .gas(GAS_TO_OFFER), - tokenCreate(FUNGIBLE_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(TOTAL_SUPPLY) - .treasury(TOKEN_TREASURY), - tokenCreate(NFT_TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .adminKey(multiKey) - .supplyKey(multiKey) - .supplyType(TokenSupplyType.INFINITE) - .initialSupply(0) - .treasury(TOKEN_TREASURY), - mintToken(NFT_TOKEN, List.of(metadata(FIRST_MEMO), metadata(SECOND_MEMO))), - tokenAssociate(SENDER, List.of(FUNGIBLE_TOKEN, NFT_TOKEN)), - tokenAssociate(RECEIVER, List.of(FUNGIBLE_TOKEN, NFT_TOKEN)), - tokenAssociate(CONTRACT, List.of(FUNGIBLE_TOKEN, NFT_TOKEN)), - cryptoTransfer( - moving(senderStartBalance, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, SENDER)), - cryptoTransfer(movingUnique(NFT_TOKEN, 1L).between(TOKEN_TREASURY, SENDER)), - cryptoTransfer( - moving(senderStartBalance, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, CONTRACT), - movingUnique(NFT_TOKEN, 2L).between(TOKEN_TREASURY, CONTRACT))) - .when(withOpContext((spec, opLog) -> { - final var token = spec.registry().getTokenID(FUNGIBLE_TOKEN); - final var nftToken = spec.registry().getTokenID(NFT_TOKEN); - final var sender = spec.registry().getAccountID(SENDER); - final var receiver = spec.registry().getAccountID(RECEIVER); - final var contractId = spec.registry().getAccountID(CONTRACT); - allRunFor( - spec, - contractCall(CONTRACT, TRANSFER_MULTIPLE_TOKENS, (Object) new Tuple[] { - tokenTransferList() - .forToken(token) - .withAccountAmounts( - accountAmount(contractId, -toSendEachTuple), - accountAmount(receiver, toSendEachTuple)) - .build(), - tokenTransferList() - .forToken(token) - .withAccountAmounts( - accountAmount(sender, -toSendEachTuple), - accountAmount(receiver, toSendEachTuple)) - .build() - }) - .payingWith(GENESIS) - .via(revertedFungibleTransferTxn) - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall(CONTRACT, TRANSFER_MULTIPLE_TOKENS, (Object) new Tuple[] { - tokenTransferList() - .forToken(token) - .withAccountAmounts( - accountAmount(contractId, -toSendEachTuple), - accountAmount(receiver, toSendEachTuple)) - .build(), - tokenTransferList() - .forToken(token) - .withAccountAmounts( - accountAmount(sender, -toSendEachTuple), - accountAmount(receiver, toSendEachTuple)) - .build() - }) - .payingWith(GENESIS) - .alsoSigningWithFullPrefix(senderKey) - .via(successfulFungibleTransferTxn) - .gas(GAS_TO_OFFER) - .hasKnownStatus(SUCCESS), - contractCall(CONTRACT, TRANSFER_MULTIPLE_TOKENS, (Object) new Tuple[] { - tokenTransferList() - .forToken(nftToken) - .withNftTransfers(nftTransfer(contractId, receiver, 2L)) - .build(), - tokenTransferList() - .forToken(nftToken) - .withNftTransfers(nftTransfer(sender, receiver, 1L)) - .build() - }) - .payingWith(GENESIS) - .via(revertedNftTransferTxn) - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall(CONTRACT, TRANSFER_MULTIPLE_TOKENS, (Object) new Tuple[] { - tokenTransferList() - .forToken(nftToken) - .withNftTransfers(nftTransfer(contractId, receiver, 2L)) - .build(), - tokenTransferList() - .forToken(nftToken) - .withNftTransfers(nftTransfer(sender, receiver, 1L)) - .build() - }) - .payingWith(GENESIS) - .via(successfulNftTransferTxn) - .alsoSigningWithFullPrefix(senderKey) - .gas(GAS_TO_OFFER) - .hasKnownStatus(SUCCESS)); - })) - .then( - getAccountBalance(RECEIVER) - .hasTokenBalance(FUNGIBLE_TOKEN, receiverStartBalance + 2 * toSendEachTuple) - .hasTokenBalance(NFT_TOKEN, 2L), - getAccountBalance(SENDER) - .hasTokenBalance(FUNGIBLE_TOKEN, senderStartBalance - toSendEachTuple) - .hasTokenBalance(NFT_TOKEN, 0L), - getAccountBalance(CONTRACT) - .hasTokenBalance(FUNGIBLE_TOKEN, senderStartBalance - toSendEachTuple) - .hasTokenBalance(NFT_TOKEN, 0L), - childRecordsCheck( - revertedFungibleTransferTxn, - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), - childRecordsCheck( - successfulFungibleTransferTxn, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))) - .tokenTransfers(SomeFungibleTransfers.changingFungibleBalances() - .including(FUNGIBLE_TOKEN, SENDER, -toSendEachTuple) - .including(FUNGIBLE_TOKEN, CONTRACT, -toSendEachTuple) - .including(FUNGIBLE_TOKEN, RECEIVER, 2 * toSendEachTuple))), - childRecordsCheck( - revertedNftTransferTxn, - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), - childRecordsCheck( - successfulNftTransferTxn, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))) - .tokenTransfers(NonFungibleTransfers.changingNFTBalances() - .including(NFT_TOKEN, SENDER, RECEIVER, 1L) - .including(NFT_TOKEN, CONTRACT, RECEIVER, 2L)))); - } - private HapiSpec hapiTransferFromForNFTWithCustomFeesWithoutApproveFails() { return defaultHapiSpec("HapiTransferFromForNFTWithCustomFeesWithoutApproveFails") .given( @@ -1342,462 +1082,6 @@ private HapiSpec hapiTransferFromForNFTWithCustomFeesWithoutApproveFails() { .then(); } - private HapiSpec cryptoTransferNFTsWithCustomFeesMixedScenario() { - final var SPENDER_SIGNATURE = "spenderSignature"; - return defaultHapiSpec("CryptoTransferNFTsWithCustomFeesMixedScenario") - .given( - newKeyNamed(MULTI_KEY), - newKeyNamed(RECEIVER_SIGNATURE), - newKeyNamed(SPENDER_SIGNATURE), - uploadInitCode(CONTRACT), - contractCreate(CONTRACT), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(OWNER) - .balance(ONE_HUNDRED_HBARS) - .maxAutomaticTokenAssociations(5) - .key(MULTI_KEY), - cryptoCreate(SENDER).balance(ONE_HUNDRED_HBARS), - cryptoCreate(RECEIVER).balance(ONE_HUNDRED_HBARS).key(RECEIVER_SIGNATURE), - tokenCreate(NFT_TOKEN_WITH_FIXED_HBAR_FEE) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(OWNER) - .initialSupply(0L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .withCustom(fixedHbarFee(1, OWNER)), - tokenCreate(FUNGIBLE_TOKEN_FEE) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .initialSupply(1000L), - tokenAssociate(CONTRACT, FUNGIBLE_TOKEN_FEE), - tokenAssociate(OWNER, FUNGIBLE_TOKEN_FEE), - tokenAssociate(RECEIVER, FUNGIBLE_TOKEN_FEE), - tokenCreate(NFT_TOKEN_WITH_FIXED_TOKEN_FEE) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(OWNER) - .initialSupply(0L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .withCustom(fixedHtsFee(1, FUNGIBLE_TOKEN_FEE, OWNER)), - tokenCreate(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(OWNER) - .initialSupply(0L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .withCustom( - royaltyFeeWithFallback(1, 2, fixedHbarFeeInheritingRoyaltyCollector(1), OWNER)), - tokenCreate(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(OWNER) - .initialSupply(0L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .withCustom(royaltyFeeWithFallback( - 1, 2, fixedHtsFeeInheritingRoyaltyCollector(1, FUNGIBLE_TOKEN_FEE), OWNER)), - tokenAssociate( - CONTRACT, - List.of( - NFT_TOKEN_WITH_FIXED_HBAR_FEE, - NFT_TOKEN_WITH_FIXED_TOKEN_FEE, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)), - tokenAssociate( - RECEIVER, - List.of( - NFT_TOKEN_WITH_FIXED_HBAR_FEE, - NFT_TOKEN_WITH_FIXED_TOKEN_FEE, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)), - mintToken(NFT_TOKEN_WITH_FIXED_HBAR_FEE, List.of(META1, META2)), - mintToken(NFT_TOKEN_WITH_FIXED_TOKEN_FEE, List.of(META3, META4)), - mintToken(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, List.of(META5, META6)), - mintToken(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, List.of(META7, META8)), - cryptoTransfer( - movingUnique(NFT_TOKEN_WITH_FIXED_HBAR_FEE, 1L).between(OWNER, CONTRACT)), - cryptoTransfer( - movingUnique(NFT_TOKEN_WITH_FIXED_TOKEN_FEE, 1L).between(OWNER, CONTRACT)), - cryptoTransfer(movingUnique(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, 1L) - .between(OWNER, CONTRACT)), - cryptoTransfer(movingUnique(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, 1L) - .between(OWNER, CONTRACT)), - cryptoTransfer(moving(1L, FUNGIBLE_TOKEN_FEE).between(TOKEN_TREASURY, CONTRACT)), - cryptoTransfer(moving(1L, FUNGIBLE_TOKEN_FEE).between(TOKEN_TREASURY, RECEIVER)), - cryptoTransfer(TokenMovement.movingHbar(100L).between(OWNER, CONTRACT))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - CONTRACT, - TRANSFER_MULTIPLE_TOKENS, - tokenTransferLists() - .withTokenTransferList( - tokenTransferList() - .forToken(spec.registry() - .getTokenID(NFT_TOKEN_WITH_FIXED_HBAR_FEE)) - .withNftTransfers(nftTransfer( - spec.registry() - .getAccountID(CONTRACT), - spec.registry() - .getAccountID(RECEIVER), - 1L)) - .build(), - tokenTransferList() - .forToken(spec.registry() - .getTokenID(NFT_TOKEN_WITH_FIXED_TOKEN_FEE)) - .withNftTransfers(nftTransfer( - spec.registry() - .getAccountID(CONTRACT), - spec.registry() - .getAccountID(RECEIVER), - 1L)) - .build(), - tokenTransferList() - .forToken( - spec.registry() - .getTokenID( - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK)) - .withNftTransfers(nftTransfer( - spec.registry() - .getAccountID(CONTRACT), - spec.registry() - .getAccountID(RECEIVER), - 1L)) - .build(), - tokenTransferList() - .forToken( - spec.registry() - .getTokenID( - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)) - .withNftTransfers(nftTransfer( - spec.registry() - .getAccountID(CONTRACT), - spec.registry() - .getAccountID(RECEIVER), - 1L)) - .build()) - .build()) - .payingWith(GENESIS) - .alsoSigningWithFullPrefix(RECEIVER_SIGNATURE) - .gas(1_000_000L)))) - .then(); - } - - private HapiSpec hapiTransferFromForNFTWithCustomFeesWithApproveForAll() { - return defaultHapiSpec("HapiTransferFromForNFTWithCustomFeesWithApproveForAll") - .given( - newKeyNamed(MULTI_KEY), - newKeyNamed(RECEIVER_SIGNATURE), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(OWNER) - .balance(ONE_HUNDRED_HBARS) - .maxAutomaticTokenAssociations(5) - .key(MULTI_KEY), - cryptoCreate(SENDER).balance(ONE_HUNDRED_HBARS), - cryptoCreate(RECEIVER).balance(ONE_HUNDRED_HBARS).key(RECEIVER_SIGNATURE), - tokenCreate(NFT_TOKEN_WITH_FIXED_HBAR_FEE) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(OWNER) - .initialSupply(0L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .withCustom(fixedHbarFee(1, OWNER)), - tokenCreate(FUNGIBLE_TOKEN_FEE) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .initialSupply(1000L), - tokenAssociate(SENDER, FUNGIBLE_TOKEN_FEE), - tokenAssociate(OWNER, FUNGIBLE_TOKEN_FEE), - tokenAssociate(RECEIVER, FUNGIBLE_TOKEN_FEE), - tokenCreate(NFT_TOKEN_WITH_FIXED_TOKEN_FEE) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(OWNER) - .initialSupply(0L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .withCustom(fixedHtsFee(1, FUNGIBLE_TOKEN_FEE, OWNER)), - tokenCreate(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(OWNER) - .initialSupply(0L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .withCustom( - royaltyFeeWithFallback(1, 2, fixedHbarFeeInheritingRoyaltyCollector(1), OWNER)), - tokenCreate(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(OWNER) - .initialSupply(0L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .withCustom(royaltyFeeWithFallback( - 1, 2, fixedHtsFeeInheritingRoyaltyCollector(1, FUNGIBLE_TOKEN_FEE), OWNER)), - tokenAssociate( - SENDER, - List.of( - NFT_TOKEN_WITH_FIXED_HBAR_FEE, - NFT_TOKEN_WITH_FIXED_TOKEN_FEE, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)), - tokenAssociate( - RECEIVER, - List.of( - NFT_TOKEN_WITH_FIXED_HBAR_FEE, - NFT_TOKEN_WITH_FIXED_TOKEN_FEE, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)), - mintToken(NFT_TOKEN_WITH_FIXED_HBAR_FEE, List.of(META1, META2)), - mintToken(NFT_TOKEN_WITH_FIXED_TOKEN_FEE, List.of(META3, META4)), - mintToken(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, List.of(META5, META6)), - mintToken(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, List.of(META7, META8)), - cryptoTransfer( - movingUnique(NFT_TOKEN_WITH_FIXED_HBAR_FEE, 1L).between(OWNER, SENDER)), - cryptoTransfer( - movingUnique(NFT_TOKEN_WITH_FIXED_TOKEN_FEE, 1L).between(OWNER, SENDER)), - cryptoTransfer(movingUnique(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, 1L) - .between(OWNER, SENDER)), - cryptoTransfer(movingUnique(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, 1L) - .between(OWNER, SENDER)), - uploadInitCode(HTS_TRANSFER_FROM_CONTRACT), - contractCreate(HTS_TRANSFER_FROM_CONTRACT), - cryptoTransfer(moving(1L, FUNGIBLE_TOKEN_FEE).between(TOKEN_TREASURY, SENDER)), - cryptoTransfer(moving(1L, FUNGIBLE_TOKEN_FEE).between(TOKEN_TREASURY, RECEIVER)), - cryptoApproveAllowance() - .payingWith(DEFAULT_PAYER) - .addNftAllowance( - SENDER, - NFT_TOKEN_WITH_FIXED_HBAR_FEE, - HTS_TRANSFER_FROM_CONTRACT, - true, - List.of()) - .addNftAllowance( - SENDER, - NFT_TOKEN_WITH_FIXED_TOKEN_FEE, - HTS_TRANSFER_FROM_CONTRACT, - true, - List.of()) - .addNftAllowance( - SENDER, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, - HTS_TRANSFER_FROM_CONTRACT, - true, - List.of()) - .addNftAllowance( - SENDER, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, - HTS_TRANSFER_FROM_CONTRACT, - true, - List.of()) - .via(APPROVE_TXN) - .signedBy(DEFAULT_PAYER, SENDER)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - HTS_TRANSFER_FROM_CONTRACT, - HTS_TRANSFER_FROM_NFT, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(NFT_TOKEN_WITH_FIXED_HBAR_FEE))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(SENDER))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(RECEIVER))), - BigInteger.valueOf(1L)) - .payingWith(GENESIS), - contractCall( - HTS_TRANSFER_FROM_CONTRACT, - HTS_TRANSFER_FROM_NFT, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(NFT_TOKEN_WITH_FIXED_TOKEN_FEE))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(SENDER))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(RECEIVER))), - BigInteger.valueOf(1L)) - .payingWith(GENESIS), - contractCall( - HTS_TRANSFER_FROM_CONTRACT, - HTS_TRANSFER_FROM_NFT, - HapiParserUtil.asHeadlongAddress(asAddress(spec.registry() - .getTokenID(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(SENDER))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(RECEIVER))), - BigInteger.valueOf(1L)) - .payingWith(GENESIS) - .alsoSigningWithFullPrefix(RECEIVER_SIGNATURE), - contractCall( - HTS_TRANSFER_FROM_CONTRACT, - HTS_TRANSFER_FROM_NFT, - HapiParserUtil.asHeadlongAddress(asAddress(spec.registry() - .getTokenID(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(SENDER))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(RECEIVER))), - BigInteger.valueOf(1L)) - .payingWith(GENESIS) - .alsoSigningWithFullPrefix(RECEIVER_SIGNATURE)))) - .then(); - } - - private HapiSpec hapiTransferFromForNFTWithCustomFeesWithBothApproveForAllAndAssignedSpender() { - return defaultHapiSpec("HapiTransferFromForNFTWithCustomFeesWithBothApproveForAllAndAssignedSpender") - .given( - newKeyNamed(MULTI_KEY), - newKeyNamed(RECEIVER_SIGNATURE), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(OWNER) - .balance(ONE_HUNDRED_HBARS) - .maxAutomaticTokenAssociations(5) - .key(MULTI_KEY), - cryptoCreate(SENDER).balance(ONE_HUNDRED_HBARS), - cryptoCreate(RECEIVER).balance(ONE_HUNDRED_HBARS).key(RECEIVER_SIGNATURE), - tokenCreate(NFT_TOKEN_WITH_FIXED_HBAR_FEE) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(OWNER) - .initialSupply(0L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .withCustom(fixedHbarFee(1, OWNER)), - tokenCreate(FUNGIBLE_TOKEN_FEE) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .initialSupply(1000L), - tokenAssociate(SENDER, FUNGIBLE_TOKEN_FEE), - tokenAssociate(OWNER, FUNGIBLE_TOKEN_FEE), - tokenAssociate(RECEIVER, FUNGIBLE_TOKEN_FEE), - tokenCreate(NFT_TOKEN_WITH_FIXED_TOKEN_FEE) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(OWNER) - .initialSupply(0L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .withCustom(fixedHtsFee(1, FUNGIBLE_TOKEN_FEE, OWNER)), - tokenCreate(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(OWNER) - .initialSupply(0L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .withCustom( - royaltyFeeWithFallback(1, 2, fixedHbarFeeInheritingRoyaltyCollector(1), OWNER)), - tokenCreate(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(OWNER) - .initialSupply(0L) - .supplyKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .withCustom(royaltyFeeWithFallback( - 1, 2, fixedHtsFeeInheritingRoyaltyCollector(1, FUNGIBLE_TOKEN_FEE), OWNER)), - tokenAssociate( - SENDER, - List.of( - NFT_TOKEN_WITH_FIXED_HBAR_FEE, - NFT_TOKEN_WITH_FIXED_TOKEN_FEE, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)), - tokenAssociate( - RECEIVER, - List.of( - NFT_TOKEN_WITH_FIXED_HBAR_FEE, - NFT_TOKEN_WITH_FIXED_TOKEN_FEE, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)), - mintToken(NFT_TOKEN_WITH_FIXED_HBAR_FEE, List.of(META1, META2)), - mintToken(NFT_TOKEN_WITH_FIXED_TOKEN_FEE, List.of(META3, META4)), - mintToken(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, List.of(META5, META6)), - mintToken(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, List.of(META7, META8)), - cryptoTransfer( - movingUnique(NFT_TOKEN_WITH_FIXED_HBAR_FEE, 1L).between(OWNER, SENDER)), - cryptoTransfer( - movingUnique(NFT_TOKEN_WITH_FIXED_TOKEN_FEE, 1L).between(OWNER, SENDER)), - cryptoTransfer(movingUnique(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, 1L) - .between(OWNER, SENDER)), - cryptoTransfer(movingUnique(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, 1L) - .between(OWNER, SENDER)), - uploadInitCode(HTS_TRANSFER_FROM_CONTRACT), - contractCreate(HTS_TRANSFER_FROM_CONTRACT), - cryptoTransfer(moving(1L, FUNGIBLE_TOKEN_FEE).between(TOKEN_TREASURY, SENDER)), - cryptoTransfer(moving(1L, FUNGIBLE_TOKEN_FEE).between(TOKEN_TREASURY, RECEIVER)), - cryptoApproveAllowance() - .payingWith(DEFAULT_PAYER) - .addNftAllowance( - SENDER, - NFT_TOKEN_WITH_FIXED_HBAR_FEE, - HTS_TRANSFER_FROM_CONTRACT, - true, - List.of(1L)) - .addNftAllowance( - SENDER, - NFT_TOKEN_WITH_FIXED_TOKEN_FEE, - HTS_TRANSFER_FROM_CONTRACT, - true, - List.of(1L)) - .addNftAllowance( - SENDER, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, - HTS_TRANSFER_FROM_CONTRACT, - true, - List.of(1L)) - .addNftAllowance( - SENDER, - NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, - HTS_TRANSFER_FROM_CONTRACT, - true, - List.of(1L)) - .via(APPROVE_TXN) - .signedBy(DEFAULT_PAYER, SENDER)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - HTS_TRANSFER_FROM_CONTRACT, - HTS_TRANSFER_FROM_NFT, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(NFT_TOKEN_WITH_FIXED_HBAR_FEE))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(SENDER))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(RECEIVER))), - BigInteger.valueOf(1L)) - .payingWith(GENESIS), - contractCall( - HTS_TRANSFER_FROM_CONTRACT, - HTS_TRANSFER_FROM_NFT, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(NFT_TOKEN_WITH_FIXED_TOKEN_FEE))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(SENDER))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(RECEIVER))), - BigInteger.valueOf(1L)) - .payingWith(GENESIS), - contractCall( - HTS_TRANSFER_FROM_CONTRACT, - HTS_TRANSFER_FROM_NFT, - HapiParserUtil.asHeadlongAddress(asAddress(spec.registry() - .getTokenID(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(SENDER))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(RECEIVER))), - BigInteger.valueOf(1L)) - .payingWith(GENESIS) - .alsoSigningWithFullPrefix(RECEIVER_SIGNATURE), - contractCall( - HTS_TRANSFER_FROM_CONTRACT, - HTS_TRANSFER_FROM_NFT, - HapiParserUtil.asHeadlongAddress(asAddress(spec.registry() - .getTokenID(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(SENDER))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(RECEIVER))), - BigInteger.valueOf(1L)) - .payingWith(GENESIS) - .alsoSigningWithFullPrefix(RECEIVER_SIGNATURE)))) - .then(); - } - private HapiSpec hapiTransferFromForFungibleTokenWithCustomFeesWithoutApproveFails() { final var FUNGIBLE_TOKEN_WITH_FIXED_HBAR_FEE = "fungibleTokenWithFixedHbarFee"; final var FUNGIBLE_TOKEN_WITH_FIXED_TOKEN_FEE = "fungibleTokenWithFixedTokenFee"; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CryptoTransferHTSV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CryptoTransferHTSV1SecurityModelSuite.java new file mode 100644 index 000000000000..0fc2b308f8ed --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/CryptoTransferHTSV1SecurityModelSuite.java @@ -0,0 +1,858 @@ +/* + * Copyright (C) 2021-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.keys.KeyShape.DELEGATE_CONTRACT; +import static com.hedera.services.bdd.spec.keys.KeyShape.sigs; +import static com.hedera.services.bdd.spec.keys.SigControl.ON; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoApproveAllowance; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fixedHbarFee; +import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fixedHbarFeeInheritingRoyaltyCollector; +import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fixedHtsFee; +import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fixedHtsFeeInheritingRoyaltyCollector; +import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.royaltyFeeWithFallback; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.accountAmount; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.nftTransfer; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overriding; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.tokenTransferList; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.tokenTransferLists; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.utils.MiscEETUtils.metadata; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; +import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; + +import com.esaulpaugh.headlong.abi.Tuple; +import com.google.protobuf.ByteString; +import com.hedera.node.app.hapi.utils.ByteStringUtils; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.assertions.NonFungibleTransfers; +import com.hedera.services.bdd.spec.assertions.SomeFungibleTransfers; +import com.hedera.services.bdd.spec.keys.KeyShape; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.spec.transactions.token.TokenMovement; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.TokenSupplyType; +import com.hederahashgraph.api.proto.java.TokenType; +import java.math.BigInteger; +import java.util.List; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse +public class CryptoTransferHTSV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(CryptoTransferHTSV1SecurityModelSuite.class); + + private static final long GAS_TO_OFFER = 4_000_000L; + public static final long TOTAL_SUPPLY = 1_000; + private static final String FUNGIBLE_TOKEN = "TokenA"; + private static final String NFT_TOKEN = "Token_NFT"; + + private static final String RECEIVER = "receiver"; + private static final String SENDER = "sender"; + private static final KeyShape DELEGATE_CONTRACT_KEY_SHAPE = + KeyShape.threshOf(1, KeyShape.SIMPLE, DELEGATE_CONTRACT); + + public static final String DELEGATE_KEY = "contractKey"; + private static final String CONTRACT = "CryptoTransfer"; + private static final String MULTI_KEY = "purpose"; + private static final String HTS_TRANSFER_FROM_CONTRACT = "HtsTransferFrom"; + private static final String OWNER = "Owner"; + private static final String HTS_TRANSFER_FROM_NFT = "htsTransferFromNFT"; + public static final String TRANSFER_MULTIPLE_TOKENS = "transferMultipleTokens"; + private static final ByteString META1 = ByteStringUtils.wrapUnsafely("meta1".getBytes()); + private static final ByteString META2 = ByteStringUtils.wrapUnsafely("meta2".getBytes()); + private static final ByteString META3 = ByteStringUtils.wrapUnsafely("meta3".getBytes()); + private static final ByteString META4 = ByteStringUtils.wrapUnsafely("meta4".getBytes()); + private static final ByteString META5 = ByteStringUtils.wrapUnsafely("meta5".getBytes()); + private static final ByteString META6 = ByteStringUtils.wrapUnsafely("meta6".getBytes()); + private static final ByteString META7 = ByteStringUtils.wrapUnsafely("meta7".getBytes()); + private static final ByteString META8 = ByteStringUtils.wrapUnsafely("meta8".getBytes()); + private static final String NFT_TOKEN_WITH_FIXED_HBAR_FEE = "nftTokenWithFixedHbarFee"; + private static final String NFT_TOKEN_WITH_FIXED_TOKEN_FEE = "nftTokenWithFixedTokenFee"; + private static final String NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK = + "nftTokenWithRoyaltyFeeWithHbarFallback"; + private static final String NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK = + "nftTokenWithRoyaltyFeeWithTokenFallback"; + private static final String FUNGIBLE_TOKEN_FEE = "fungibleTokenFee"; + private static final String RECEIVER_SIGNATURE = "receiverSignature"; + private static final String APPROVE_TXN = "approveTxn"; + private static final String FIRST_MEMO = "firstMemo"; + private static final String SECOND_MEMO = "secondMemo"; + private static final String CRYPTO_TRANSFER_TXN = "cryptoTransferTxn"; + + public static void main(final String... args) { + new CryptoTransferHTSV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + public List getSpecsInSuite() { + return List.of( + nonNestedCryptoTransferForFungibleToken(), + activeContractInFrameIsVerifiedWithoutNeedForSignature(), + cryptoTransferNFTsWithCustomFeesMixedScenario(), + hapiTransferFromForNFTWithCustomFeesWithApproveForAll(), + hapiTransferFromForNFTWithCustomFeesWithBothApproveForAllAndAssignedSpender()); + } + + private HapiSpec nonNestedCryptoTransferForFungibleToken() { + final var cryptoTransferTxn = CRYPTO_TRANSFER_TXN; + + return propertyPreservingHapiSpec("nonNestedCryptoTransferForFungibleToken") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(SENDER).balance(10 * ONE_HUNDRED_HBARS), + cryptoCreate(RECEIVER).balance(2 * ONE_HUNDRED_HBARS).receiverSigRequired(true), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(TOTAL_SUPPLY) + .treasury(TOKEN_TREASURY), + tokenAssociate(SENDER, List.of(FUNGIBLE_TOKEN)), + tokenAssociate(RECEIVER, List.of(FUNGIBLE_TOKEN)), + cryptoTransfer(moving(200, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, SENDER)), + uploadInitCode(CONTRACT), + contractCreate(CONTRACT)) + .when( + withOpContext((spec, opLog) -> { + final var token = spec.registry().getTokenID(FUNGIBLE_TOKEN); + final var sender = spec.registry().getAccountID(SENDER); + final var receiver = spec.registry().getAccountID(RECEIVER); + final var amountToBeSent = 50L; + + allRunFor( + spec, + newKeyNamed(DELEGATE_KEY) + .shape(DELEGATE_CONTRACT_KEY_SHAPE.signedWith(sigs(ON, CONTRACT))), + cryptoUpdate(SENDER).key(DELEGATE_KEY), + cryptoUpdate(RECEIVER).key(DELEGATE_KEY), + contractCall(CONTRACT, TRANSFER_MULTIPLE_TOKENS, (Object) new Tuple[] { + tokenTransferList() + .forToken(token) + .withAccountAmounts( + accountAmount(sender, -amountToBeSent), + accountAmount(receiver, amountToBeSent)) + .build() + }) + .payingWith(GENESIS) + .via(cryptoTransferTxn) + .gas(GAS_TO_OFFER), + contractCall(CONTRACT, TRANSFER_MULTIPLE_TOKENS, (Object) new Tuple[] { + tokenTransferList() + .forToken(token) + .withAccountAmounts( + accountAmount(sender, -0L), accountAmount(receiver, 0L)) + .build() + }) + .payingWith(GENESIS) + .via("cryptoTransferZero") + .gas(GAS_TO_OFFER)); + }), + getTxnRecord(cryptoTransferTxn).andAllChildRecords().logged(), + getTxnRecord("cryptoTransferZero").andAllChildRecords().logged()) + .then( + getTokenInfo(FUNGIBLE_TOKEN).hasTotalSupply(TOTAL_SUPPLY), + getAccountBalance(RECEIVER).hasTokenBalance(FUNGIBLE_TOKEN, 50), + getAccountBalance(SENDER).hasTokenBalance(FUNGIBLE_TOKEN, 150), + getTokenInfo(FUNGIBLE_TOKEN).logged(), + childRecordsCheck( + cryptoTransferTxn, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)) + .gasUsed(14085L)) + .tokenTransfers(SomeFungibleTransfers.changingFungibleBalances() + .including(FUNGIBLE_TOKEN, SENDER, -50) + .including(FUNGIBLE_TOKEN, RECEIVER, 50)))); + } + + private HapiSpec activeContractInFrameIsVerifiedWithoutNeedForSignature() { + final var revertedFungibleTransferTxn = "revertedFungibleTransferTxn"; + final var successfulFungibleTransferTxn = "successfulFungibleTransferTxn"; + final var revertedNftTransferTxn = "revertedNftTransferTxn"; + final var successfulNftTransferTxn = "successfulNftTransferTxn"; + final var senderStartBalance = 200L; + final var receiverStartBalance = 0L; + final var toSendEachTuple = 50L; + final var multiKey = MULTI_KEY; + final var senderKey = "senderKey"; + final var contractKey = "contractAdminKey"; + + return propertyPreservingHapiSpec("activeContractInFrameIsVerifiedWithoutNeedForSignature") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(multiKey), + newKeyNamed(senderKey), + newKeyNamed(contractKey), + cryptoCreate(SENDER).balance(10 * ONE_HUNDRED_HBARS).key(senderKey), + cryptoCreate(RECEIVER).balance(2 * ONE_HUNDRED_HBARS), + cryptoCreate(TOKEN_TREASURY), + uploadInitCode(CONTRACT), + contractCreate(CONTRACT) + .payingWith(GENESIS) + .adminKey(contractKey) + .gas(GAS_TO_OFFER), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(TOTAL_SUPPLY) + .treasury(TOKEN_TREASURY), + tokenCreate(NFT_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .adminKey(multiKey) + .supplyKey(multiKey) + .supplyType(TokenSupplyType.INFINITE) + .initialSupply(0) + .treasury(TOKEN_TREASURY), + mintToken(NFT_TOKEN, List.of(metadata(FIRST_MEMO), metadata(SECOND_MEMO))), + tokenAssociate(SENDER, List.of(FUNGIBLE_TOKEN, NFT_TOKEN)), + tokenAssociate(RECEIVER, List.of(FUNGIBLE_TOKEN, NFT_TOKEN)), + tokenAssociate(CONTRACT, List.of(FUNGIBLE_TOKEN, NFT_TOKEN)), + cryptoTransfer( + moving(senderStartBalance, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, SENDER)), + cryptoTransfer(movingUnique(NFT_TOKEN, 1L).between(TOKEN_TREASURY, SENDER)), + cryptoTransfer( + moving(senderStartBalance, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, CONTRACT), + movingUnique(NFT_TOKEN, 2L).between(TOKEN_TREASURY, CONTRACT))) + .when(withOpContext((spec, opLog) -> { + final var token = spec.registry().getTokenID(FUNGIBLE_TOKEN); + final var nftToken = spec.registry().getTokenID(NFT_TOKEN); + final var sender = spec.registry().getAccountID(SENDER); + final var receiver = spec.registry().getAccountID(RECEIVER); + final var contractId = spec.registry().getAccountID(CONTRACT); + allRunFor( + spec, + contractCall(CONTRACT, TRANSFER_MULTIPLE_TOKENS, (Object) new Tuple[] { + tokenTransferList() + .forToken(token) + .withAccountAmounts( + accountAmount(contractId, -toSendEachTuple), + accountAmount(receiver, toSendEachTuple)) + .build(), + tokenTransferList() + .forToken(token) + .withAccountAmounts( + accountAmount(sender, -toSendEachTuple), + accountAmount(receiver, toSendEachTuple)) + .build() + }) + .payingWith(GENESIS) + .via(revertedFungibleTransferTxn) + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall(CONTRACT, TRANSFER_MULTIPLE_TOKENS, (Object) new Tuple[] { + tokenTransferList() + .forToken(token) + .withAccountAmounts( + accountAmount(contractId, -toSendEachTuple), + accountAmount(receiver, toSendEachTuple)) + .build(), + tokenTransferList() + .forToken(token) + .withAccountAmounts( + accountAmount(sender, -toSendEachTuple), + accountAmount(receiver, toSendEachTuple)) + .build() + }) + .payingWith(GENESIS) + .alsoSigningWithFullPrefix(senderKey) + .via(successfulFungibleTransferTxn) + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS), + contractCall(CONTRACT, TRANSFER_MULTIPLE_TOKENS, (Object) new Tuple[] { + tokenTransferList() + .forToken(nftToken) + .withNftTransfers(nftTransfer(contractId, receiver, 2L)) + .build(), + tokenTransferList() + .forToken(nftToken) + .withNftTransfers(nftTransfer(sender, receiver, 1L)) + .build() + }) + .payingWith(GENESIS) + .via(revertedNftTransferTxn) + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall(CONTRACT, TRANSFER_MULTIPLE_TOKENS, (Object) new Tuple[] { + tokenTransferList() + .forToken(nftToken) + .withNftTransfers(nftTransfer(contractId, receiver, 2L)) + .build(), + tokenTransferList() + .forToken(nftToken) + .withNftTransfers(nftTransfer(sender, receiver, 1L)) + .build() + }) + .payingWith(GENESIS) + .via(successfulNftTransferTxn) + .alsoSigningWithFullPrefix(senderKey) + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS)); + })) + .then( + getAccountBalance(RECEIVER) + .hasTokenBalance(FUNGIBLE_TOKEN, receiverStartBalance + 2 * toSendEachTuple) + .hasTokenBalance(NFT_TOKEN, 2L), + getAccountBalance(SENDER) + .hasTokenBalance(FUNGIBLE_TOKEN, senderStartBalance - toSendEachTuple) + .hasTokenBalance(NFT_TOKEN, 0L), + getAccountBalance(CONTRACT) + .hasTokenBalance(FUNGIBLE_TOKEN, senderStartBalance - toSendEachTuple) + .hasTokenBalance(NFT_TOKEN, 0L), + childRecordsCheck( + revertedFungibleTransferTxn, + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), + childRecordsCheck( + successfulFungibleTransferTxn, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))) + .tokenTransfers(SomeFungibleTransfers.changingFungibleBalances() + .including(FUNGIBLE_TOKEN, SENDER, -toSendEachTuple) + .including(FUNGIBLE_TOKEN, CONTRACT, -toSendEachTuple) + .including(FUNGIBLE_TOKEN, RECEIVER, 2 * toSendEachTuple))), + childRecordsCheck( + revertedNftTransferTxn, + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .withStatus(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))), + childRecordsCheck( + successfulNftTransferTxn, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))) + .tokenTransfers(NonFungibleTransfers.changingNFTBalances() + .including(NFT_TOKEN, SENDER, RECEIVER, 1L) + .including(NFT_TOKEN, CONTRACT, RECEIVER, 2L)))); + } + + private HapiSpec cryptoTransferNFTsWithCustomFeesMixedScenario() { + final var SPENDER_SIGNATURE = "spenderSignature"; + return propertyPreservingHapiSpec("cryptoTransferNFTsWithCustomFeesMixedScenario") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + newKeyNamed(RECEIVER_SIGNATURE), + newKeyNamed(SPENDER_SIGNATURE), + uploadInitCode(CONTRACT), + contractCreate(CONTRACT), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(OWNER) + .balance(ONE_HUNDRED_HBARS) + .maxAutomaticTokenAssociations(5) + .key(MULTI_KEY), + cryptoCreate(SENDER).balance(ONE_HUNDRED_HBARS), + cryptoCreate(RECEIVER).balance(ONE_HUNDRED_HBARS).key(RECEIVER_SIGNATURE), + tokenCreate(NFT_TOKEN_WITH_FIXED_HBAR_FEE) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(OWNER) + .initialSupply(0L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .withCustom(fixedHbarFee(1, OWNER)), + tokenCreate(FUNGIBLE_TOKEN_FEE) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .initialSupply(1000L), + tokenAssociate(CONTRACT, FUNGIBLE_TOKEN_FEE), + tokenAssociate(OWNER, FUNGIBLE_TOKEN_FEE), + tokenAssociate(RECEIVER, FUNGIBLE_TOKEN_FEE), + tokenCreate(NFT_TOKEN_WITH_FIXED_TOKEN_FEE) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(OWNER) + .initialSupply(0L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .withCustom(fixedHtsFee(1, FUNGIBLE_TOKEN_FEE, OWNER)), + tokenCreate(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(OWNER) + .initialSupply(0L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .withCustom( + royaltyFeeWithFallback(1, 2, fixedHbarFeeInheritingRoyaltyCollector(1), OWNER)), + tokenCreate(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(OWNER) + .initialSupply(0L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .withCustom(royaltyFeeWithFallback( + 1, 2, fixedHtsFeeInheritingRoyaltyCollector(1, FUNGIBLE_TOKEN_FEE), OWNER)), + tokenAssociate( + CONTRACT, + List.of( + NFT_TOKEN_WITH_FIXED_HBAR_FEE, + NFT_TOKEN_WITH_FIXED_TOKEN_FEE, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)), + tokenAssociate( + RECEIVER, + List.of( + NFT_TOKEN_WITH_FIXED_HBAR_FEE, + NFT_TOKEN_WITH_FIXED_TOKEN_FEE, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)), + mintToken(NFT_TOKEN_WITH_FIXED_HBAR_FEE, List.of(META1, META2)), + mintToken(NFT_TOKEN_WITH_FIXED_TOKEN_FEE, List.of(META3, META4)), + mintToken(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, List.of(META5, META6)), + mintToken(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, List.of(META7, META8)), + cryptoTransfer( + movingUnique(NFT_TOKEN_WITH_FIXED_HBAR_FEE, 1L).between(OWNER, CONTRACT)), + cryptoTransfer( + movingUnique(NFT_TOKEN_WITH_FIXED_TOKEN_FEE, 1L).between(OWNER, CONTRACT)), + cryptoTransfer(movingUnique(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, 1L) + .between(OWNER, CONTRACT)), + cryptoTransfer(movingUnique(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, 1L) + .between(OWNER, CONTRACT)), + cryptoTransfer(moving(1L, FUNGIBLE_TOKEN_FEE).between(TOKEN_TREASURY, CONTRACT)), + cryptoTransfer(moving(1L, FUNGIBLE_TOKEN_FEE).between(TOKEN_TREASURY, RECEIVER)), + cryptoTransfer(TokenMovement.movingHbar(100L).between(OWNER, CONTRACT))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + CONTRACT, + TRANSFER_MULTIPLE_TOKENS, + tokenTransferLists() + .withTokenTransferList( + tokenTransferList() + .forToken(spec.registry() + .getTokenID(NFT_TOKEN_WITH_FIXED_HBAR_FEE)) + .withNftTransfers(nftTransfer( + spec.registry() + .getAccountID(CONTRACT), + spec.registry() + .getAccountID(RECEIVER), + 1L)) + .build(), + tokenTransferList() + .forToken(spec.registry() + .getTokenID(NFT_TOKEN_WITH_FIXED_TOKEN_FEE)) + .withNftTransfers(nftTransfer( + spec.registry() + .getAccountID(CONTRACT), + spec.registry() + .getAccountID(RECEIVER), + 1L)) + .build(), + tokenTransferList() + .forToken( + spec.registry() + .getTokenID( + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK)) + .withNftTransfers(nftTransfer( + spec.registry() + .getAccountID(CONTRACT), + spec.registry() + .getAccountID(RECEIVER), + 1L)) + .build(), + tokenTransferList() + .forToken( + spec.registry() + .getTokenID( + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)) + .withNftTransfers(nftTransfer( + spec.registry() + .getAccountID(CONTRACT), + spec.registry() + .getAccountID(RECEIVER), + 1L)) + .build()) + .build()) + .payingWith(GENESIS) + .alsoSigningWithFullPrefix(RECEIVER_SIGNATURE) + .gas(1_000_000L)))) + .then(); + } + + private HapiSpec hapiTransferFromForNFTWithCustomFeesWithApproveForAll() { + return propertyPreservingHapiSpec("hapiTransferFromForNFTWithCustomFeesWithApproveForAll") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + newKeyNamed(RECEIVER_SIGNATURE), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(OWNER) + .balance(ONE_HUNDRED_HBARS) + .maxAutomaticTokenAssociations(5) + .key(MULTI_KEY), + cryptoCreate(SENDER).balance(ONE_HUNDRED_HBARS), + cryptoCreate(RECEIVER).balance(ONE_HUNDRED_HBARS).key(RECEIVER_SIGNATURE), + tokenCreate(NFT_TOKEN_WITH_FIXED_HBAR_FEE) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(OWNER) + .initialSupply(0L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .withCustom(fixedHbarFee(1, OWNER)), + tokenCreate(FUNGIBLE_TOKEN_FEE) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .initialSupply(1000L), + tokenAssociate(SENDER, FUNGIBLE_TOKEN_FEE), + tokenAssociate(OWNER, FUNGIBLE_TOKEN_FEE), + tokenAssociate(RECEIVER, FUNGIBLE_TOKEN_FEE), + tokenCreate(NFT_TOKEN_WITH_FIXED_TOKEN_FEE) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(OWNER) + .initialSupply(0L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .withCustom(fixedHtsFee(1, FUNGIBLE_TOKEN_FEE, OWNER)), + tokenCreate(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(OWNER) + .initialSupply(0L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .withCustom( + royaltyFeeWithFallback(1, 2, fixedHbarFeeInheritingRoyaltyCollector(1), OWNER)), + tokenCreate(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(OWNER) + .initialSupply(0L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .withCustom(royaltyFeeWithFallback( + 1, 2, fixedHtsFeeInheritingRoyaltyCollector(1, FUNGIBLE_TOKEN_FEE), OWNER)), + tokenAssociate( + SENDER, + List.of( + NFT_TOKEN_WITH_FIXED_HBAR_FEE, + NFT_TOKEN_WITH_FIXED_TOKEN_FEE, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)), + tokenAssociate( + RECEIVER, + List.of( + NFT_TOKEN_WITH_FIXED_HBAR_FEE, + NFT_TOKEN_WITH_FIXED_TOKEN_FEE, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)), + mintToken(NFT_TOKEN_WITH_FIXED_HBAR_FEE, List.of(META1, META2)), + mintToken(NFT_TOKEN_WITH_FIXED_TOKEN_FEE, List.of(META3, META4)), + mintToken(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, List.of(META5, META6)), + mintToken(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, List.of(META7, META8)), + cryptoTransfer( + movingUnique(NFT_TOKEN_WITH_FIXED_HBAR_FEE, 1L).between(OWNER, SENDER)), + cryptoTransfer( + movingUnique(NFT_TOKEN_WITH_FIXED_TOKEN_FEE, 1L).between(OWNER, SENDER)), + cryptoTransfer(movingUnique(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, 1L) + .between(OWNER, SENDER)), + cryptoTransfer(movingUnique(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, 1L) + .between(OWNER, SENDER)), + uploadInitCode(HTS_TRANSFER_FROM_CONTRACT), + contractCreate(HTS_TRANSFER_FROM_CONTRACT), + cryptoTransfer(moving(1L, FUNGIBLE_TOKEN_FEE).between(TOKEN_TREASURY, SENDER)), + cryptoTransfer(moving(1L, FUNGIBLE_TOKEN_FEE).between(TOKEN_TREASURY, RECEIVER)), + cryptoApproveAllowance() + .payingWith(DEFAULT_PAYER) + .addNftAllowance( + SENDER, + NFT_TOKEN_WITH_FIXED_HBAR_FEE, + HTS_TRANSFER_FROM_CONTRACT, + true, + List.of()) + .addNftAllowance( + SENDER, + NFT_TOKEN_WITH_FIXED_TOKEN_FEE, + HTS_TRANSFER_FROM_CONTRACT, + true, + List.of()) + .addNftAllowance( + SENDER, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, + HTS_TRANSFER_FROM_CONTRACT, + true, + List.of()) + .addNftAllowance( + SENDER, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, + HTS_TRANSFER_FROM_CONTRACT, + true, + List.of()) + .via(APPROVE_TXN) + .signedBy(DEFAULT_PAYER, SENDER)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + HTS_TRANSFER_FROM_CONTRACT, + HTS_TRANSFER_FROM_NFT, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(NFT_TOKEN_WITH_FIXED_HBAR_FEE))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(SENDER))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(RECEIVER))), + BigInteger.valueOf(1L)) + .payingWith(GENESIS), + contractCall( + HTS_TRANSFER_FROM_CONTRACT, + HTS_TRANSFER_FROM_NFT, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(NFT_TOKEN_WITH_FIXED_TOKEN_FEE))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(SENDER))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(RECEIVER))), + BigInteger.valueOf(1L)) + .payingWith(GENESIS), + contractCall( + HTS_TRANSFER_FROM_CONTRACT, + HTS_TRANSFER_FROM_NFT, + HapiParserUtil.asHeadlongAddress(asAddress(spec.registry() + .getTokenID(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(SENDER))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(RECEIVER))), + BigInteger.valueOf(1L)) + .payingWith(GENESIS) + .alsoSigningWithFullPrefix(RECEIVER_SIGNATURE), + contractCall( + HTS_TRANSFER_FROM_CONTRACT, + HTS_TRANSFER_FROM_NFT, + HapiParserUtil.asHeadlongAddress(asAddress(spec.registry() + .getTokenID(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(SENDER))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(RECEIVER))), + BigInteger.valueOf(1L)) + .payingWith(GENESIS) + .alsoSigningWithFullPrefix(RECEIVER_SIGNATURE)))) + .then(); + } + + private HapiSpec hapiTransferFromForNFTWithCustomFeesWithBothApproveForAllAndAssignedSpender() { + return propertyPreservingHapiSpec("hapiTransferFromForNFTWithCustomFeesWithBothApproveForAllAndAssignedSpender") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + newKeyNamed(RECEIVER_SIGNATURE), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(OWNER) + .balance(ONE_HUNDRED_HBARS) + .maxAutomaticTokenAssociations(5) + .key(MULTI_KEY), + cryptoCreate(SENDER).balance(ONE_HUNDRED_HBARS), + cryptoCreate(RECEIVER).balance(ONE_HUNDRED_HBARS).key(RECEIVER_SIGNATURE), + tokenCreate(NFT_TOKEN_WITH_FIXED_HBAR_FEE) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(OWNER) + .initialSupply(0L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .withCustom(fixedHbarFee(1, OWNER)), + tokenCreate(FUNGIBLE_TOKEN_FEE) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .initialSupply(1000L), + tokenAssociate(SENDER, FUNGIBLE_TOKEN_FEE), + tokenAssociate(OWNER, FUNGIBLE_TOKEN_FEE), + tokenAssociate(RECEIVER, FUNGIBLE_TOKEN_FEE), + tokenCreate(NFT_TOKEN_WITH_FIXED_TOKEN_FEE) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(OWNER) + .initialSupply(0L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .withCustom(fixedHtsFee(1, FUNGIBLE_TOKEN_FEE, OWNER)), + tokenCreate(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(OWNER) + .initialSupply(0L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .withCustom( + royaltyFeeWithFallback(1, 2, fixedHbarFeeInheritingRoyaltyCollector(1), OWNER)), + tokenCreate(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(OWNER) + .initialSupply(0L) + .supplyKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .withCustom(royaltyFeeWithFallback( + 1, 2, fixedHtsFeeInheritingRoyaltyCollector(1, FUNGIBLE_TOKEN_FEE), OWNER)), + tokenAssociate( + SENDER, + List.of( + NFT_TOKEN_WITH_FIXED_HBAR_FEE, + NFT_TOKEN_WITH_FIXED_TOKEN_FEE, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)), + tokenAssociate( + RECEIVER, + List.of( + NFT_TOKEN_WITH_FIXED_HBAR_FEE, + NFT_TOKEN_WITH_FIXED_TOKEN_FEE, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK)), + mintToken(NFT_TOKEN_WITH_FIXED_HBAR_FEE, List.of(META1, META2)), + mintToken(NFT_TOKEN_WITH_FIXED_TOKEN_FEE, List.of(META3, META4)), + mintToken(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, List.of(META5, META6)), + mintToken(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, List.of(META7, META8)), + cryptoTransfer( + movingUnique(NFT_TOKEN_WITH_FIXED_HBAR_FEE, 1L).between(OWNER, SENDER)), + cryptoTransfer( + movingUnique(NFT_TOKEN_WITH_FIXED_TOKEN_FEE, 1L).between(OWNER, SENDER)), + cryptoTransfer(movingUnique(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, 1L) + .between(OWNER, SENDER)), + cryptoTransfer(movingUnique(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, 1L) + .between(OWNER, SENDER)), + uploadInitCode(HTS_TRANSFER_FROM_CONTRACT), + contractCreate(HTS_TRANSFER_FROM_CONTRACT), + cryptoTransfer(moving(1L, FUNGIBLE_TOKEN_FEE).between(TOKEN_TREASURY, SENDER)), + cryptoTransfer(moving(1L, FUNGIBLE_TOKEN_FEE).between(TOKEN_TREASURY, RECEIVER)), + cryptoApproveAllowance() + .payingWith(DEFAULT_PAYER) + .addNftAllowance( + SENDER, + NFT_TOKEN_WITH_FIXED_HBAR_FEE, + HTS_TRANSFER_FROM_CONTRACT, + true, + List.of(1L)) + .addNftAllowance( + SENDER, + NFT_TOKEN_WITH_FIXED_TOKEN_FEE, + HTS_TRANSFER_FROM_CONTRACT, + true, + List.of(1L)) + .addNftAllowance( + SENDER, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK, + HTS_TRANSFER_FROM_CONTRACT, + true, + List.of(1L)) + .addNftAllowance( + SENDER, + NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK, + HTS_TRANSFER_FROM_CONTRACT, + true, + List.of(1L)) + .via(APPROVE_TXN) + .signedBy(DEFAULT_PAYER, SENDER)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + HTS_TRANSFER_FROM_CONTRACT, + HTS_TRANSFER_FROM_NFT, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(NFT_TOKEN_WITH_FIXED_HBAR_FEE))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(SENDER))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(RECEIVER))), + BigInteger.valueOf(1L)) + .payingWith(GENESIS), + contractCall( + HTS_TRANSFER_FROM_CONTRACT, + HTS_TRANSFER_FROM_NFT, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(NFT_TOKEN_WITH_FIXED_TOKEN_FEE))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(SENDER))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(RECEIVER))), + BigInteger.valueOf(1L)) + .payingWith(GENESIS), + contractCall( + HTS_TRANSFER_FROM_CONTRACT, + HTS_TRANSFER_FROM_NFT, + HapiParserUtil.asHeadlongAddress(asAddress(spec.registry() + .getTokenID(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_HBAR_FALLBACK))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(SENDER))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(RECEIVER))), + BigInteger.valueOf(1L)) + .payingWith(GENESIS) + .alsoSigningWithFullPrefix(RECEIVER_SIGNATURE), + contractCall( + HTS_TRANSFER_FROM_CONTRACT, + HTS_TRANSFER_FROM_NFT, + HapiParserUtil.asHeadlongAddress(asAddress(spec.registry() + .getTokenID(NFT_TOKEN_WITH_ROYALTY_FEE_WITH_TOKEN_FALLBACK))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(SENDER))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(RECEIVER))), + BigInteger.valueOf(1L)) + .payingWith(GENESIS) + .alsoSigningWithFullPrefix(RECEIVER_SIGNATURE)))) + .then(); + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DelegatePrecompileSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DelegatePrecompileSuite.java index a7916189841a..3e9bacb1e70c 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DelegatePrecompileSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DelegatePrecompileSuite.java @@ -43,6 +43,7 @@ import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.getNestedContractAddress; import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; @@ -60,7 +61,6 @@ import java.util.concurrent.atomic.AtomicReference; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.jetbrains.annotations.NotNull; public class DelegatePrecompileSuite extends HapiSuite { private static final Logger log = LogManager.getLogger(DelegatePrecompileSuite.class); @@ -246,11 +246,6 @@ OUTER_CONTRACT, asHeadlongAddress(getNestedContractAddress(NESTED_CONTRACT, spec getAccountBalance(TOKEN_TREASURY).hasTokenBalance(VANILLA_TOKEN, 51)); } - @NotNull - private String getNestedContractAddress(final String outerContract, final HapiSpec spec) { - return AssociatePrecompileSuite.getNestedContractAddress(outerContract, spec); - } - @Override protected Logger getResultsLogger() { return log; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DeleteTokenPrecompileSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DeleteTokenPrecompileSuite.java index 8278a20d88ef..8fdd5b5049dd 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DeleteTokenPrecompileSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DeleteTokenPrecompileSuite.java @@ -16,57 +16,15 @@ package com.hedera.services.bdd.suites.contract.precompile; -import static com.google.protobuf.ByteString.copyFromUtf8; -import static com.hedera.services.bdd.spec.HapiPropertySource.asToken; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; -import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; -import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; -import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; -import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; -import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; -import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; -import static com.hedera.services.bdd.suites.contract.Utils.asHexedAddress; -import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; -import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_WAS_DELETED; -import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; -import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; - import com.hedera.services.bdd.spec.HapiSpec; import com.hedera.services.bdd.suites.HapiSuite; -import com.hederahashgraph.api.proto.java.AccountID; -import com.hederahashgraph.api.proto.java.TokenID; import java.util.List; -import java.util.concurrent.atomic.AtomicReference; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; public class DeleteTokenPrecompileSuite extends HapiSuite { private static final Logger log = LogManager.getLogger(DeleteTokenPrecompileSuite.class); - private static final long GAS_TO_OFFER = 4_000_000L; - public static final String DELETE_TOKEN_CONTRACT = "DeleteTokenContract"; - public static final String TOKEN_DELETE_FUNCTION = "tokenDelete"; - private static final String ACCOUNT = "anybody"; - private static final String MULTI_KEY = "purpose"; - private static final String DELETE_TXN = "deleteTxn"; - final AtomicReference accountID = new AtomicReference<>(); - public static void main(String... args) { new DeleteTokenPrecompileSuite().runSuiteAsync(); } @@ -78,112 +36,7 @@ public boolean canRunConcurrent() { @Override public List getSpecsInSuite() { - return List.of(deleteFungibleTokenWithNegativeCases(), deleteNftTokenWithNegativeCases()); - } - - private HapiSpec deleteFungibleTokenWithNegativeCases() { - final AtomicReference vanillaTokenID = new AtomicReference<>(); - final var tokenAlreadyDeletedTxn = "tokenAlreadyDeletedTxn"; - - return defaultHapiSpec("deleteFungibleTokenWithNegativeCases") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(ACCOUNT) - .key(MULTI_KEY) - .balance(100 * ONE_HBAR) - .exposingCreatedIdTo(accountID::set), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))) - .initialSupply(1110), - uploadInitCode(DELETE_TOKEN_CONTRACT), - contractCreate(DELETE_TOKEN_CONTRACT), - tokenAssociate(ACCOUNT, VANILLA_TOKEN), - cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - DELETE_TOKEN_CONTRACT, - TOKEN_DELETE_FUNCTION, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .gas(GAS_TO_OFFER) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via(DELETE_TXN), - getTokenInfo(VANILLA_TOKEN).isDeleted().logged(), - cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT)) - .hasKnownStatus(TOKEN_WAS_DELETED), - contractCall( - DELETE_TOKEN_CONTRACT, - TOKEN_DELETE_FUNCTION, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .gas(GAS_TO_OFFER) - .via(tokenAlreadyDeletedTxn) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)))) - .then(childRecordsCheck( - tokenAlreadyDeletedTxn, - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(TOKEN_WAS_DELETED) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(TOKEN_WAS_DELETED))))); - } - - private HapiSpec deleteNftTokenWithNegativeCases() { - final AtomicReference vanillaTokenID = new AtomicReference<>(); - final var notAnAdminTxn = "notAnAdminTxn"; - - return defaultHapiSpec("deleteNftTokenWithNegativeCases") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(ACCOUNT).balance(100 * ONE_HBAR).exposingCreatedIdTo(accountID::set), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(VANILLA_TOKEN) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))) - .initialSupply(0), - mintToken(VANILLA_TOKEN, List.of(copyFromUtf8("First!"))), - uploadInitCode(DELETE_TOKEN_CONTRACT), - contractCreate(DELETE_TOKEN_CONTRACT), - tokenAssociate(ACCOUNT, VANILLA_TOKEN), - cryptoTransfer(movingUnique(VANILLA_TOKEN, 1L).between(TOKEN_TREASURY, ACCOUNT))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - DELETE_TOKEN_CONTRACT, - TOKEN_DELETE_FUNCTION, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .gas(GAS_TO_OFFER) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via(notAnAdminTxn) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - cryptoUpdate(ACCOUNT).key(MULTI_KEY), - contractCall( - DELETE_TOKEN_CONTRACT, - TOKEN_DELETE_FUNCTION, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER), - getTokenInfo(VANILLA_TOKEN).isDeleted().logged()))) - .then(childRecordsCheck( - notAnAdminTxn, - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_SIGNATURE) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_SIGNATURE))))); + return List.of(); } @Override diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DeleteTokenPrecompileV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DeleteTokenPrecompileV1SecurityModelSuite.java new file mode 100644 index 000000000000..e176dc5b4aca --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DeleteTokenPrecompileV1SecurityModelSuite.java @@ -0,0 +1,209 @@ +/* + * Copyright (C) 2022-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.google.protobuf.ByteString.copyFromUtf8; +import static com.hedera.services.bdd.spec.HapiPropertySource.asToken; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overridingTwo; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asHexedAddress; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_WAS_DELETED; +import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; +import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; + +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.AccountID; +import com.hederahashgraph.api.proto.java.TokenID; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class DeleteTokenPrecompileV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(DeleteTokenPrecompileV1SecurityModelSuite.class); + + private static final long GAS_TO_OFFER = 4_000_000L; + public static final String DELETE_TOKEN_CONTRACT = "DeleteTokenContract"; + public static final String TOKEN_DELETE_FUNCTION = "tokenDelete"; + private static final String ACCOUNT = "anybody"; + private static final String MULTI_KEY = "purpose"; + private static final String DELETE_TXN = "deleteTxn"; + final AtomicReference accountID = new AtomicReference<>(); + + public static void main(String... args) { + new DeleteTokenPrecompileV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return true; + } + + @Override + public List getSpecsInSuite() { + return List.of(deleteFungibleTokenWithNegativeCases(), deleteNftTokenWithNegativeCases()); + } + + private HapiSpec deleteFungibleTokenWithNegativeCases() { + final AtomicReference vanillaTokenID = new AtomicReference<>(); + final var tokenAlreadyDeletedTxn = "tokenAlreadyDeletedTxn"; + + return propertyPreservingHapiSpec("deleteFungibleTokenWithNegativeCases") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenDelete", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(ACCOUNT) + .key(MULTI_KEY) + .balance(100 * ONE_HBAR) + .exposingCreatedIdTo(accountID::set), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))) + .initialSupply(1110), + uploadInitCode(DELETE_TOKEN_CONTRACT), + contractCreate(DELETE_TOKEN_CONTRACT), + tokenAssociate(ACCOUNT, VANILLA_TOKEN), + cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + DELETE_TOKEN_CONTRACT, + TOKEN_DELETE_FUNCTION, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .gas(GAS_TO_OFFER) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via(DELETE_TXN), + getTokenInfo(VANILLA_TOKEN).isDeleted().logged(), + cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT)) + .hasKnownStatus(TOKEN_WAS_DELETED), + contractCall( + DELETE_TOKEN_CONTRACT, + TOKEN_DELETE_FUNCTION, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .gas(GAS_TO_OFFER) + .via(tokenAlreadyDeletedTxn) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)))) + .then(childRecordsCheck( + tokenAlreadyDeletedTxn, + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(TOKEN_WAS_DELETED) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(TOKEN_WAS_DELETED))))); + } + + private HapiSpec deleteNftTokenWithNegativeCases() { + final AtomicReference vanillaTokenID = new AtomicReference<>(); + final var notAnAdminTxn = "notAnAdminTxn"; + + return propertyPreservingHapiSpec("deleteNftTokenWithNegativeCases") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenDelete", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(ACCOUNT).balance(100 * ONE_HBAR).exposingCreatedIdTo(accountID::set), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(VANILLA_TOKEN) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))) + .initialSupply(0), + mintToken(VANILLA_TOKEN, List.of(copyFromUtf8("First!"))), + uploadInitCode(DELETE_TOKEN_CONTRACT), + contractCreate(DELETE_TOKEN_CONTRACT), + tokenAssociate(ACCOUNT, VANILLA_TOKEN), + cryptoTransfer(movingUnique(VANILLA_TOKEN, 1L).between(TOKEN_TREASURY, ACCOUNT))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + DELETE_TOKEN_CONTRACT, + TOKEN_DELETE_FUNCTION, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .gas(GAS_TO_OFFER) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via(notAnAdminTxn) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + cryptoUpdate(ACCOUNT).key(MULTI_KEY), + contractCall( + DELETE_TOKEN_CONTRACT, + TOKEN_DELETE_FUNCTION, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER), + getTokenInfo(VANILLA_TOKEN).isDeleted().logged()))) + .then(childRecordsCheck( + notAnAdminTxn, + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_SIGNATURE) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_SIGNATURE))))); + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DissociatePrecompileSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DissociatePrecompileSuite.java index 7a788317b84b..9ddfdf9964c9 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DissociatePrecompileSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DissociatePrecompileSuite.java @@ -16,69 +16,15 @@ package com.hedera.services.bdd.suites.contract.precompile; -import static com.hedera.services.bdd.spec.HapiPropertySource.asDotDelimitedLongArray; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; -import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; -import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; -import static com.hedera.services.bdd.spec.queries.crypto.ExpectedTokenRel.relationshipWith; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenDelete; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenFreeze; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenUnfreeze; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; -import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; -import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; -import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; -import static com.hedera.services.bdd.suites.contract.Utils.asAddress; -import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.FREEZABLE_TOKEN_ON_BY_DEFAULT; -import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.KNOWABLE_TOKEN; -import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.TBD_TOKEN; -import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; -import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; -import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; - -import com.esaulpaugh.headlong.abi.Address; -import com.google.protobuf.ByteString; import com.hedera.services.bdd.spec.HapiSpec; -import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; import com.hedera.services.bdd.suites.HapiSuite; -import com.hederahashgraph.api.proto.java.AccountID; -import com.hederahashgraph.api.proto.java.ResponseCodeEnum; -import com.hederahashgraph.api.proto.java.TokenID; -import com.hederahashgraph.api.proto.java.TokenType; -import java.nio.charset.StandardCharsets; import java.util.List; -import java.util.concurrent.atomic.AtomicReference; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.jetbrains.annotations.NotNull; public class DissociatePrecompileSuite extends HapiSuite { private static final Logger log = LogManager.getLogger(DissociatePrecompileSuite.class); - private static final long GAS_TO_OFFER = 2_000_000L; - - private static final long TOTAL_SUPPLY = 1_000; - private static final String TOKEN_TREASURY = "treasury"; - private static final String OUTER_CONTRACT = "NestedAssociateDissociate"; - private static final String NESTED_CONTRACT = "AssociateDissociate"; - private static final String CONTRACT = "AssociateDissociate"; - private static final String ACCOUNT = "anybody"; - private static final String MULTI_KEY = "Multi key"; - public static void main(String... args) { new DissociatePrecompileSuite().runSuiteAsync(); } @@ -90,303 +36,11 @@ public boolean canRunConcurrent() { @Override public List getSpecsInSuite() { - return List.of( - dissociatePrecompileHasExpectedSemanticsForDeletedTokens(), - nestedDissociateWorksAsExpected(), - multiplePrecompileDissociationWithSigsForFungibleWorks()); - } - - /* -- Not specifically required in the HTS Precompile Test Plan -- */ - public HapiSpec dissociatePrecompileHasExpectedSemanticsForDeletedTokens() { - final var tbdUniqToken = "UniqToBeDeleted"; - final var zeroBalanceFrozen = "0bFrozen"; - final var zeroBalanceUnfrozen = "0bUnfrozen"; - final var nonZeroBalanceFrozen = "1bFrozen"; - final var nonZeroBalanceUnfrozen = "1bUnfrozen"; - final var initialSupply = 100L; - final var nonZeroXfer = 10L; - final var firstMeta = ByteString.copyFrom("FIRST".getBytes(StandardCharsets.UTF_8)); - final var secondMeta = ByteString.copyFrom("SECOND".getBytes(StandardCharsets.UTF_8)); - final var thirdMeta = ByteString.copyFrom("THIRD".getBytes(StandardCharsets.UTF_8)); - - final AtomicReference accountID = new AtomicReference<>(); - final AtomicReference treasuryID = new AtomicReference<>(); - final AtomicReference zeroBalanceFrozenID = new AtomicReference<>(); - final AtomicReference zeroBalanceUnfrozenID = new AtomicReference<>(); - final AtomicReference nonZeroBalanceFrozenID = new AtomicReference<>(); - final AtomicReference nonZeroBalanceUnfrozenID = new AtomicReference<>(); - final AtomicReference tbdTokenID = new AtomicReference<>(); - final AtomicReference tbdUniqueTokenID = new AtomicReference<>(); - - return defaultHapiSpec("DissociatePrecompileHasExpectedSemanticsForDeletedTokens") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).exposingCreatedIdTo(accountID::set), - cryptoCreate(TOKEN_TREASURY) - .balance(10 * ONE_HUNDRED_HBARS) - .exposingCreatedIdTo(treasuryID::set), - tokenCreate(TBD_TOKEN) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .initialSupply(initialSupply) - .treasury(TOKEN_TREASURY) - .freezeKey(MULTI_KEY) - .freezeDefault(true) - .exposingCreatedIdTo(id -> tbdTokenID.set(asToken(id))), - tokenCreate(tbdUniqToken) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .initialSupply(0) - .exposingCreatedIdTo(id -> tbdUniqueTokenID.set(asToken(id))), - cryptoCreate(zeroBalanceFrozen) - .balance(10 * ONE_HUNDRED_HBARS) - .exposingCreatedIdTo(zeroBalanceFrozenID::set), - cryptoCreate(zeroBalanceUnfrozen) - .balance(10 * ONE_HUNDRED_HBARS) - .exposingCreatedIdTo(zeroBalanceUnfrozenID::set), - cryptoCreate(nonZeroBalanceFrozen) - .balance(10 * ONE_HUNDRED_HBARS) - .exposingCreatedIdTo(nonZeroBalanceFrozenID::set), - cryptoCreate(nonZeroBalanceUnfrozen) - .balance(10 * ONE_HUNDRED_HBARS) - .exposingCreatedIdTo(nonZeroBalanceUnfrozenID::set), - uploadInitCode(CONTRACT), - contractCreate(CONTRACT)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - tokenAssociate(zeroBalanceFrozen, TBD_TOKEN), - tokenAssociate(zeroBalanceUnfrozen, TBD_TOKEN), - tokenAssociate(nonZeroBalanceFrozen, TBD_TOKEN), - tokenAssociate(nonZeroBalanceUnfrozen, TBD_TOKEN), - mintToken(tbdUniqToken, List.of(firstMeta, secondMeta, thirdMeta)), - getAccountInfo(TOKEN_TREASURY).hasOwnedNfts(3), - tokenUnfreeze(TBD_TOKEN, zeroBalanceUnfrozen), - tokenUnfreeze(TBD_TOKEN, nonZeroBalanceUnfrozen), - tokenUnfreeze(TBD_TOKEN, nonZeroBalanceFrozen), - cryptoTransfer(moving(nonZeroXfer, TBD_TOKEN).between(TOKEN_TREASURY, nonZeroBalanceFrozen)), - cryptoTransfer(moving(nonZeroXfer, TBD_TOKEN).between(TOKEN_TREASURY, nonZeroBalanceUnfrozen)), - tokenFreeze(TBD_TOKEN, nonZeroBalanceFrozen), - getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TBD_TOKEN, initialSupply - 2 * nonZeroXfer), - tokenDelete(TBD_TOKEN), - tokenDelete(tbdUniqToken), - contractCall( - CONTRACT, - "tokenDissociate", - HapiParserUtil.asHeadlongAddress(asAddress(zeroBalanceFrozenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(tbdTokenID.get()))) - .alsoSigningWithFullPrefix(zeroBalanceFrozen) - .gas(GAS_TO_OFFER) - .via("dissociateZeroBalanceFrozenTxn"), - getTxnRecord("dissociateZeroBalanceFrozenTxn") - .andAllChildRecords() - .logged(), - contractCall( - CONTRACT, - "tokenDissociate", - HapiParserUtil.asHeadlongAddress(asAddress(zeroBalanceUnfrozenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(tbdTokenID.get()))) - .alsoSigningWithFullPrefix(zeroBalanceUnfrozen) - .gas(GAS_TO_OFFER) - .via("dissociateZeroBalanceUnfrozenTxn"), - getTxnRecord("dissociateZeroBalanceUnfrozenTxn") - .andAllChildRecords() - .logged(), - contractCall( - CONTRACT, - "tokenDissociate", - HapiParserUtil.asHeadlongAddress(asAddress(nonZeroBalanceFrozenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(tbdTokenID.get()))) - .alsoSigningWithFullPrefix(nonZeroBalanceFrozen) - .gas(GAS_TO_OFFER) - .via("dissociateNonZeroBalanceFrozenTxn"), - getTxnRecord("dissociateNonZeroBalanceFrozenTxn") - .andAllChildRecords() - .logged(), - contractCall( - CONTRACT, - "tokenDissociate", - HapiParserUtil.asHeadlongAddress(asAddress(nonZeroBalanceUnfrozenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(tbdTokenID.get()))) - .alsoSigningWithFullPrefix(nonZeroBalanceUnfrozen) - .gas(GAS_TO_OFFER) - .via("dissociateNonZeroBalanceUnfrozenTxn"), - getTxnRecord("dissociateNonZeroBalanceUnfrozenTxn") - .andAllChildRecords() - .logged(), - contractCall( - CONTRACT, - "tokenDissociate", - HapiParserUtil.asHeadlongAddress(asAddress(treasuryID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(tbdUniqueTokenID.get()))) - .alsoSigningWithFullPrefix(TOKEN_TREASURY) - .gas(GAS_TO_OFFER)))) - .then( - childRecordsCheck( - "dissociateZeroBalanceFrozenTxn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)))), - childRecordsCheck( - "dissociateZeroBalanceUnfrozenTxn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)))), - childRecordsCheck( - "dissociateNonZeroBalanceFrozenTxn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)))), - childRecordsCheck( - "dissociateNonZeroBalanceUnfrozenTxn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)))), - getAccountInfo(zeroBalanceFrozen).hasNoTokenRelationship(TBD_TOKEN), - getAccountInfo(zeroBalanceUnfrozen).hasNoTokenRelationship(TBD_TOKEN), - getAccountInfo(nonZeroBalanceFrozen).hasNoTokenRelationship(TBD_TOKEN), - getAccountInfo(nonZeroBalanceUnfrozen).hasNoTokenRelationship(TBD_TOKEN), - getAccountInfo(TOKEN_TREASURY) - .hasToken(relationshipWith(TBD_TOKEN)) - .hasNoTokenRelationship(tbdUniqToken) - .hasOwnedNfts(0), - getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TBD_TOKEN, initialSupply - 2 * nonZeroXfer)); - } - - /* -- Not specifically required in the HTS Precompile Test Plan -- */ - private HapiSpec nestedDissociateWorksAsExpected() { - final AtomicReference accountID = new AtomicReference<>(); - final AtomicReference vanillaTokenID = new AtomicReference<>(); - - return defaultHapiSpec("nestedDissociateWorksAsExpected") - .given( - cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).exposingCreatedIdTo(accountID::set), - cryptoCreate(TOKEN_TREASURY).balance(0L), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - uploadInitCode(OUTER_CONTRACT, NESTED_CONTRACT), - contractCreate(NESTED_CONTRACT)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCreate( - OUTER_CONTRACT, asHeadlongAddress(getNestedContractAddress(NESTED_CONTRACT, spec))), - tokenAssociate(ACCOUNT, VANILLA_TOKEN), - contractCall( - OUTER_CONTRACT, - "dissociateAssociateContractCall", - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get()))) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("nestedDissociateTxn") - .gas(3_000_000L) - .hasKnownStatus(ResponseCodeEnum.SUCCESS), - getTxnRecord("nestedDissociateTxn").andAllChildRecords().logged()))) - .then( - childRecordsCheck( - "nestedDissociateTxn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)))), - getAccountInfo(ACCOUNT).hasToken(relationshipWith(VANILLA_TOKEN))); - } - - /* -- HSCS-PREC-007 from HTS Precompile Test Plan -- */ - public HapiSpec multiplePrecompileDissociationWithSigsForFungibleWorks() { - final AtomicReference knowableTokenTokenID = new AtomicReference<>(); - final AtomicReference vanillaTokenID = new AtomicReference<>(); - final AtomicReference accountID = new AtomicReference<>(); - final AtomicReference treasuryID = new AtomicReference<>(); - - return defaultHapiSpec("multiplePrecompileDissociationWithSigsForFungibleWorks") - .given( - cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).exposingCreatedIdTo(accountID::set), - cryptoCreate(TOKEN_TREASURY).balance(0L).exposingCreatedIdTo(treasuryID::set), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .initialSupply(TOTAL_SUPPLY) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - tokenCreate(KNOWABLE_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .initialSupply(TOTAL_SUPPLY) - .exposingCreatedIdTo(id -> knowableTokenTokenID.set(asToken(id))), - uploadInitCode(CONTRACT), - contractCreate(CONTRACT)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - tokenAssociate(ACCOUNT, List.of(VANILLA_TOKEN, KNOWABLE_TOKEN)), - getAccountInfo(ACCOUNT).hasToken(relationshipWith(VANILLA_TOKEN)), - getAccountInfo(ACCOUNT).hasToken(relationshipWith(KNOWABLE_TOKEN)), - contractCall( - CONTRACT, - "tokensDissociate", - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - new Address[] { - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(knowableTokenTokenID.get())) - }) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("multipleDissociationTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(SUCCESS), - getTxnRecord("multipleDissociationTxn") - .andAllChildRecords() - .logged()))) - .then( - childRecordsCheck( - "multipleDissociationTxn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)))), - getAccountInfo(ACCOUNT).hasNoTokenRelationship(FREEZABLE_TOKEN_ON_BY_DEFAULT), - getAccountInfo(ACCOUNT).hasNoTokenRelationship(KNOWABLE_TOKEN)); + return List.of(); } @Override protected Logger getResultsLogger() { return log; } - - /* --- Helpers --- */ - - private static TokenID asToken(String v) { - long[] nativeParts = asDotDelimitedLongArray(v); - return TokenID.newBuilder() - .setShardNum(nativeParts[0]) - .setRealmNum(nativeParts[1]) - .setTokenNum(nativeParts[2]) - .build(); - } - - @NotNull - private String getNestedContractAddress(final String outerContract, final HapiSpec spec) { - return AssociatePrecompileSuite.getNestedContractAddress(outerContract, spec); - } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DissociatePrecompileV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DissociatePrecompileV1SecurityModelSuite.java new file mode 100644 index 000000000000..537c6ff8a853 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/DissociatePrecompileV1SecurityModelSuite.java @@ -0,0 +1,397 @@ +/* + * Copyright (C) 2021-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.spec.HapiPropertySource.asDotDelimitedLongArray; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; +import static com.hedera.services.bdd.spec.queries.crypto.ExpectedTokenRel.relationshipWith; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenDelete; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenFreeze; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenUnfreeze; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overriding; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.getNestedContractAddress; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.FREEZABLE_TOKEN_ON_BY_DEFAULT; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.KNOWABLE_TOKEN; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.TBD_TOKEN; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; + +import com.esaulpaugh.headlong.abi.Address; +import com.google.protobuf.ByteString; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.AccountID; +import com.hederahashgraph.api.proto.java.ResponseCodeEnum; +import com.hederahashgraph.api.proto.java.TokenID; +import com.hederahashgraph.api.proto.java.TokenType; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse +public class DissociatePrecompileV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(DissociatePrecompileV1SecurityModelSuite.class); + + private static final long GAS_TO_OFFER = 2_000_000L; + + private static final long TOTAL_SUPPLY = 1_000; + private static final String TOKEN_TREASURY = "treasury"; + private static final String OUTER_CONTRACT = "NestedAssociateDissociate"; + private static final String NESTED_CONTRACT = "AssociateDissociate"; + private static final String CONTRACT = "AssociateDissociate"; + private static final String ACCOUNT = "anybody"; + private static final String MULTI_KEY = "Multi key"; + + public static void main(String... args) { + new DissociatePrecompileV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + public List getSpecsInSuite() { + return List.of( + dissociatePrecompileHasExpectedSemanticsForDeletedTokens(), + nestedDissociateWorksAsExpected(), + multiplePrecompileDissociationWithSigsForFungibleWorks()); + } + + /* -- Not specifically required in the HTS Precompile Test Plan -- */ + public HapiSpec dissociatePrecompileHasExpectedSemanticsForDeletedTokens() { + final var tbdUniqToken = "UniqToBeDeleted"; + final var zeroBalanceFrozen = "0bFrozen"; + final var zeroBalanceUnfrozen = "0bUnfrozen"; + final var nonZeroBalanceFrozen = "1bFrozen"; + final var nonZeroBalanceUnfrozen = "1bUnfrozen"; + final var initialSupply = 100L; + final var nonZeroXfer = 10L; + final var firstMeta = ByteString.copyFrom("FIRST".getBytes(StandardCharsets.UTF_8)); + final var secondMeta = ByteString.copyFrom("SECOND".getBytes(StandardCharsets.UTF_8)); + final var thirdMeta = ByteString.copyFrom("THIRD".getBytes(StandardCharsets.UTF_8)); + + final AtomicReference accountID = new AtomicReference<>(); + final AtomicReference treasuryID = new AtomicReference<>(); + final AtomicReference zeroBalanceFrozenID = new AtomicReference<>(); + final AtomicReference zeroBalanceUnfrozenID = new AtomicReference<>(); + final AtomicReference nonZeroBalanceFrozenID = new AtomicReference<>(); + final AtomicReference nonZeroBalanceUnfrozenID = new AtomicReference<>(); + final AtomicReference tbdTokenID = new AtomicReference<>(); + final AtomicReference tbdUniqueTokenID = new AtomicReference<>(); + + return propertyPreservingHapiSpec("dissociatePrecompileHasExpectedSemanticsForDeletedTokens") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).exposingCreatedIdTo(accountID::set), + cryptoCreate(TOKEN_TREASURY) + .balance(10 * ONE_HUNDRED_HBARS) + .exposingCreatedIdTo(treasuryID::set), + tokenCreate(TBD_TOKEN) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .initialSupply(initialSupply) + .treasury(TOKEN_TREASURY) + .freezeKey(MULTI_KEY) + .freezeDefault(true) + .exposingCreatedIdTo(id -> tbdTokenID.set(asToken(id))), + tokenCreate(tbdUniqToken) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .initialSupply(0) + .exposingCreatedIdTo(id -> tbdUniqueTokenID.set(asToken(id))), + cryptoCreate(zeroBalanceFrozen) + .balance(10 * ONE_HUNDRED_HBARS) + .exposingCreatedIdTo(zeroBalanceFrozenID::set), + cryptoCreate(zeroBalanceUnfrozen) + .balance(10 * ONE_HUNDRED_HBARS) + .exposingCreatedIdTo(zeroBalanceUnfrozenID::set), + cryptoCreate(nonZeroBalanceFrozen) + .balance(10 * ONE_HUNDRED_HBARS) + .exposingCreatedIdTo(nonZeroBalanceFrozenID::set), + cryptoCreate(nonZeroBalanceUnfrozen) + .balance(10 * ONE_HUNDRED_HBARS) + .exposingCreatedIdTo(nonZeroBalanceUnfrozenID::set), + uploadInitCode(CONTRACT), + contractCreate(CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + tokenAssociate(zeroBalanceFrozen, TBD_TOKEN), + tokenAssociate(zeroBalanceUnfrozen, TBD_TOKEN), + tokenAssociate(nonZeroBalanceFrozen, TBD_TOKEN), + tokenAssociate(nonZeroBalanceUnfrozen, TBD_TOKEN), + mintToken(tbdUniqToken, List.of(firstMeta, secondMeta, thirdMeta)), + getAccountInfo(TOKEN_TREASURY).hasOwnedNfts(3), + tokenUnfreeze(TBD_TOKEN, zeroBalanceUnfrozen), + tokenUnfreeze(TBD_TOKEN, nonZeroBalanceUnfrozen), + tokenUnfreeze(TBD_TOKEN, nonZeroBalanceFrozen), + cryptoTransfer(moving(nonZeroXfer, TBD_TOKEN).between(TOKEN_TREASURY, nonZeroBalanceFrozen)), + cryptoTransfer(moving(nonZeroXfer, TBD_TOKEN).between(TOKEN_TREASURY, nonZeroBalanceUnfrozen)), + tokenFreeze(TBD_TOKEN, nonZeroBalanceFrozen), + getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TBD_TOKEN, initialSupply - 2 * nonZeroXfer), + tokenDelete(TBD_TOKEN), + tokenDelete(tbdUniqToken), + contractCall( + CONTRACT, + "tokenDissociate", + HapiParserUtil.asHeadlongAddress(asAddress(zeroBalanceFrozenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(tbdTokenID.get()))) + .alsoSigningWithFullPrefix(zeroBalanceFrozen) + .gas(GAS_TO_OFFER) + .via("dissociateZeroBalanceFrozenTxn"), + getTxnRecord("dissociateZeroBalanceFrozenTxn") + .andAllChildRecords() + .logged(), + contractCall( + CONTRACT, + "tokenDissociate", + HapiParserUtil.asHeadlongAddress(asAddress(zeroBalanceUnfrozenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(tbdTokenID.get()))) + .alsoSigningWithFullPrefix(zeroBalanceUnfrozen) + .gas(GAS_TO_OFFER) + .via("dissociateZeroBalanceUnfrozenTxn"), + getTxnRecord("dissociateZeroBalanceUnfrozenTxn") + .andAllChildRecords() + .logged(), + contractCall( + CONTRACT, + "tokenDissociate", + HapiParserUtil.asHeadlongAddress(asAddress(nonZeroBalanceFrozenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(tbdTokenID.get()))) + .alsoSigningWithFullPrefix(nonZeroBalanceFrozen) + .gas(GAS_TO_OFFER) + .via("dissociateNonZeroBalanceFrozenTxn"), + getTxnRecord("dissociateNonZeroBalanceFrozenTxn") + .andAllChildRecords() + .logged(), + contractCall( + CONTRACT, + "tokenDissociate", + HapiParserUtil.asHeadlongAddress(asAddress(nonZeroBalanceUnfrozenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(tbdTokenID.get()))) + .alsoSigningWithFullPrefix(nonZeroBalanceUnfrozen) + .gas(GAS_TO_OFFER) + .via("dissociateNonZeroBalanceUnfrozenTxn"), + getTxnRecord("dissociateNonZeroBalanceUnfrozenTxn") + .andAllChildRecords() + .logged(), + contractCall( + CONTRACT, + "tokenDissociate", + HapiParserUtil.asHeadlongAddress(asAddress(treasuryID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(tbdUniqueTokenID.get()))) + .alsoSigningWithFullPrefix(TOKEN_TREASURY) + .gas(GAS_TO_OFFER)))) + .then( + childRecordsCheck( + "dissociateZeroBalanceFrozenTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)))), + childRecordsCheck( + "dissociateZeroBalanceUnfrozenTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)))), + childRecordsCheck( + "dissociateNonZeroBalanceFrozenTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)))), + childRecordsCheck( + "dissociateNonZeroBalanceUnfrozenTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)))), + getAccountInfo(zeroBalanceFrozen).hasNoTokenRelationship(TBD_TOKEN), + getAccountInfo(zeroBalanceUnfrozen).hasNoTokenRelationship(TBD_TOKEN), + getAccountInfo(nonZeroBalanceFrozen).hasNoTokenRelationship(TBD_TOKEN), + getAccountInfo(nonZeroBalanceUnfrozen).hasNoTokenRelationship(TBD_TOKEN), + getAccountInfo(TOKEN_TREASURY) + .hasToken(relationshipWith(TBD_TOKEN)) + .hasNoTokenRelationship(tbdUniqToken) + .hasOwnedNfts(0), + getAccountBalance(TOKEN_TREASURY).hasTokenBalance(TBD_TOKEN, initialSupply - 2 * nonZeroXfer)); + } + + /* -- Not specifically required in the HTS Precompile Test Plan -- */ + private HapiSpec nestedDissociateWorksAsExpected() { + final AtomicReference accountID = new AtomicReference<>(); + final AtomicReference vanillaTokenID = new AtomicReference<>(); + + return propertyPreservingHapiSpec("nestedDissociateWorksAsExpected") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).exposingCreatedIdTo(accountID::set), + cryptoCreate(TOKEN_TREASURY).balance(0L), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + uploadInitCode(OUTER_CONTRACT, NESTED_CONTRACT), + contractCreate(NESTED_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCreate( + OUTER_CONTRACT, asHeadlongAddress(getNestedContractAddress(NESTED_CONTRACT, spec))), + tokenAssociate(ACCOUNT, VANILLA_TOKEN), + contractCall( + OUTER_CONTRACT, + "dissociateAssociateContractCall", + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get()))) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("nestedDissociateTxn") + .gas(3_000_000L) + .hasKnownStatus(ResponseCodeEnum.SUCCESS), + getTxnRecord("nestedDissociateTxn").andAllChildRecords().logged()))) + .then( + childRecordsCheck( + "nestedDissociateTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)))), + getAccountInfo(ACCOUNT).hasToken(relationshipWith(VANILLA_TOKEN))); + } + + /* -- HSCS-PREC-007 from HTS Precompile Test Plan -- */ + public HapiSpec multiplePrecompileDissociationWithSigsForFungibleWorks() { + final AtomicReference knowableTokenTokenID = new AtomicReference<>(); + final AtomicReference vanillaTokenID = new AtomicReference<>(); + final AtomicReference accountID = new AtomicReference<>(); + final AtomicReference treasuryID = new AtomicReference<>(); + + return propertyPreservingHapiSpec("multiplePrecompileDissociationWithSigsForFungibleWorks") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(ACCOUNT).balance(10 * ONE_HUNDRED_HBARS).exposingCreatedIdTo(accountID::set), + cryptoCreate(TOKEN_TREASURY).balance(0L).exposingCreatedIdTo(treasuryID::set), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .initialSupply(TOTAL_SUPPLY) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + tokenCreate(KNOWABLE_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .initialSupply(TOTAL_SUPPLY) + .exposingCreatedIdTo(id -> knowableTokenTokenID.set(asToken(id))), + uploadInitCode(CONTRACT), + contractCreate(CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + tokenAssociate(ACCOUNT, List.of(VANILLA_TOKEN, KNOWABLE_TOKEN)), + getAccountInfo(ACCOUNT).hasToken(relationshipWith(VANILLA_TOKEN)), + getAccountInfo(ACCOUNT).hasToken(relationshipWith(KNOWABLE_TOKEN)), + contractCall( + CONTRACT, + "tokensDissociate", + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + new Address[] { + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(knowableTokenTokenID.get())) + }) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("multipleDissociationTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS), + getTxnRecord("multipleDissociationTxn") + .andAllChildRecords() + .logged()))) + .then( + childRecordsCheck( + "multipleDissociationTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)))), + getAccountInfo(ACCOUNT).hasNoTokenRelationship(FREEZABLE_TOKEN_ON_BY_DEFAULT), + getAccountInfo(ACCOUNT).hasNoTokenRelationship(KNOWABLE_TOKEN)); + } + + @Override + protected Logger getResultsLogger() { + return log; + } + + /* --- Helpers --- */ + + private static TokenID asToken(String v) { + long[] nativeParts = asDotDelimitedLongArray(v); + return TokenID.newBuilder() + .setShardNum(nativeParts[0]) + .setRealmNum(nativeParts[1]) + .setTokenNum(nativeParts[2]) + .build(); + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ERCPrecompileSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ERCPrecompileSuite.java index 0f1326f5b8bd..3c1382717b30 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ERCPrecompileSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ERCPrecompileSuite.java @@ -16,9 +16,7 @@ package com.hedera.services.bdd.suites.contract.precompile; -import static com.hedera.services.bdd.spec.HapiPropertySource.asContractString; import static com.hedera.services.bdd.spec.HapiPropertySource.asHexedSolidityAddress; -import static com.hedera.services.bdd.spec.HapiPropertySource.contractIdFromHexedMirrorAddress; import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.assertions.AccountDetailsAsserts.accountDetailsWith; import static com.hedera.services.bdd.spec.assertions.AssertUtils.inOrder; @@ -59,7 +57,6 @@ import static com.hedera.services.bdd.suites.contract.Utils.asAddress; import static com.hedera.services.bdd.suites.contract.Utils.asHexedAddress; import static com.hedera.services.bdd.suites.contract.Utils.asToken; -import static com.hedera.services.bdd.suites.contract.Utils.captureChildCreate2MetaFor; import static com.hedera.services.bdd.suites.contract.Utils.eventSignatureOf; import static com.hedera.services.bdd.suites.contract.Utils.getABIFor; import static com.hedera.services.bdd.suites.contract.Utils.parsedToByteString; @@ -82,12 +79,10 @@ import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; -import com.esaulpaugh.headlong.abi.Address; import com.google.protobuf.ByteString; import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; import com.hedera.services.bdd.spec.HapiPropertySource; import com.hedera.services.bdd.spec.HapiSpec; -import com.hedera.services.bdd.spec.queries.crypto.ExpectedTokenRel; import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; import com.hedera.services.bdd.spec.transactions.token.TokenMovement; import com.hedera.services.bdd.suites.HapiSuite; @@ -199,7 +194,6 @@ List erc20() { someErc20ApproveAllowanceScenarioInOneCall(), getErc20TokenDecimalsFromErc721TokenFails(), transferErc20TokenReceiverContract(), - transferErc20TokenAliasedSender(), directCallsWorkForErc20(), erc20TransferFromAllowance(), erc20TransferFromSelf(), @@ -688,90 +682,6 @@ private HapiSpec transferErc20TokenReceiverContract() { getAccountBalance(nestedContract).hasTokenBalance(FUNGIBLE_TOKEN, 2)); } - private HapiSpec transferErc20TokenAliasedSender() { - final var aliasedTransferTxn = "aliasedTransferTxn"; - final var addLiquidityTxn = "addLiquidityTxn"; - final var create2Txn = "create2Txn"; - - final var ACCOUNT_A = "AccountA"; - final var ACCOUNT_B = "AccountB"; - - final var ALIASED_TRANSFER = "AliasedTransfer"; - final byte[][] ALIASED_ADDRESS = new byte[1][1]; - - final AtomicReference childMirror = new AtomicReference<>(); - final AtomicReference childEip1014 = new AtomicReference<>(); - - return defaultHapiSpec("transferErc20TokenAliasedSender") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(OWNER), - cryptoCreate(ACCOUNT), - cryptoCreate(ACCOUNT_A).key(MULTI_KEY).balance(ONE_MILLION_HBARS), - cryptoCreate(ACCOUNT_B).balance(ONE_MILLION_HBARS), - tokenCreate(TOKEN_NAME) - .adminKey(MULTI_KEY) - .initialSupply(10000) - .treasury(ACCOUNT_A), - tokenAssociate(ACCOUNT_B, TOKEN_NAME), - uploadInitCode(ALIASED_TRANSFER), - contractCreate(ALIASED_TRANSFER).gas(300_000), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - ALIASED_TRANSFER, - "deployWithCREATE2", - asHeadlongAddress(asHexedAddress( - spec.registry().getTokenID(TOKEN_NAME)))) - .exposingResultTo(result -> { - final var res = (Address) result[0]; - ALIASED_ADDRESS[0] = res.value().toByteArray(); - }) - .payingWith(ACCOUNT) - .alsoSigningWithFullPrefix(MULTI_KEY) - .via(create2Txn) - .gas(GAS_TO_OFFER) - .hasKnownStatus(SUCCESS)))) - .when( - captureChildCreate2MetaFor(2, 0, "setup", create2Txn, childMirror, childEip1014), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - ALIASED_TRANSFER, - "giveTokensToOperator", - HapiParserUtil.asHeadlongAddress(asAddress( - spec.registry().getTokenID(TOKEN_NAME))), - HapiParserUtil.asHeadlongAddress(asAddress( - spec.registry().getAccountID(ACCOUNT_A))), - 1500L) - .payingWith(ACCOUNT) - .alsoSigningWithFullPrefix(MULTI_KEY) - .via(addLiquidityTxn) - .gas(GAS_TO_OFFER) - .hasKnownStatus(SUCCESS))), - withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - ALIASED_TRANSFER, - TRANSFER, - HapiParserUtil.asHeadlongAddress(asAddress( - spec.registry().getAccountID(ACCOUNT_B))), - BigInteger.valueOf(1000)) - .payingWith(ACCOUNT) - .alsoSigningWithFullPrefix(MULTI_KEY) - .via(aliasedTransferTxn) - .gas(GAS_TO_OFFER) - .hasKnownStatus(SUCCESS)))) - .then( - sourcing(() -> getContractInfo( - asContractString(contractIdFromHexedMirrorAddress(childMirror.get()))) - .hasToken(ExpectedTokenRel.relationshipWith(TOKEN_NAME) - .balance(500)) - .logged()), - getAccountBalance(ACCOUNT_B).hasTokenBalance(TOKEN_NAME, 1000), - getAccountBalance(ACCOUNT_A).hasTokenBalance(TOKEN_NAME, 8500)); - } - private HapiSpec transferErc20TokenFromContractWithNoApproval() { final var transferFromOtherContractWithSignaturesTxn = "transferFromOtherContractWithSignaturesTxn"; final var nestedContract = NESTED_ERC_20_CONTRACT; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ERCPrecompileV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ERCPrecompileV1SecurityModelSuite.java new file mode 100644 index 000000000000..b96d2a4149bc --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/ERCPrecompileV1SecurityModelSuite.java @@ -0,0 +1,181 @@ +/* + * Copyright (C) 2022-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.spec.HapiPropertySource.asContractString; +import static com.hedera.services.bdd.spec.HapiPropertySource.contractIdFromHexedMirrorAddress; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overridingTwo; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.asHexedAddress; +import static com.hedera.services.bdd.suites.contract.Utils.captureChildCreate2MetaFor; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; + +import com.esaulpaugh.headlong.abi.Address; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.queries.crypto.ExpectedTokenRel; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.suites.HapiSuite; +import java.math.BigInteger; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class ERCPrecompileV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(ERCPrecompileV1SecurityModelSuite.class); + + private static final long GAS_TO_OFFER = 1_000_000L; + private static final String MULTI_KEY = "purpose"; + private static final String OWNER = "owner"; + private static final String ACCOUNT = "anybody"; + private static final String TOKEN_NAME = "TokenA"; + public static final String TRANSFER = "transfer"; + + public static void main(String... args) { + new ERCPrecompileV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + public List getSpecsInSuite() { + return allOf(erc20(), erc721()); + } + + List erc20() { + return List.of(transferErc20TokenAliasedSender()); + } + + List erc721() { + return List.of(); + } + + private HapiSpec transferErc20TokenAliasedSender() { + final var aliasedTransferTxn = "aliasedTransferTxn"; + final var addLiquidityTxn = "addLiquidityTxn"; + final var create2Txn = "create2Txn"; + + final var account_A = "AccountA"; + final var account_B = "AccountB"; + + final var aliasedTransfer = "AliasedTransfer"; + final byte[][] aliasedAddress = new byte[1][1]; + + final AtomicReference childMirror = new AtomicReference<>(); + final AtomicReference childEip1014 = new AtomicReference<>(); + + return propertyPreservingHapiSpec("transferErc20TokenAliasedSender") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(OWNER), + cryptoCreate(ACCOUNT), + cryptoCreate(account_A).key(MULTI_KEY).balance(ONE_MILLION_HBARS), + cryptoCreate(account_B).balance(ONE_MILLION_HBARS), + tokenCreate(TOKEN_NAME) + .adminKey(MULTI_KEY) + .initialSupply(10000) + .treasury(account_A), + tokenAssociate(account_B, TOKEN_NAME), + uploadInitCode(aliasedTransfer), + contractCreate(aliasedTransfer).gas(300_000), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + aliasedTransfer, + "deployWithCREATE2", + asHeadlongAddress(asHexedAddress( + spec.registry().getTokenID(TOKEN_NAME)))) + .exposingResultTo(result -> { + final var res = (Address) result[0]; + aliasedAddress[0] = res.value().toByteArray(); + }) + .payingWith(ACCOUNT) + .alsoSigningWithFullPrefix(MULTI_KEY) + .via(create2Txn) + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS)))) + .when( + captureChildCreate2MetaFor(2, 0, "setup", create2Txn, childMirror, childEip1014), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + aliasedTransfer, + "giveTokensToOperator", + HapiParserUtil.asHeadlongAddress(asAddress( + spec.registry().getTokenID(TOKEN_NAME))), + HapiParserUtil.asHeadlongAddress(asAddress( + spec.registry().getAccountID(account_A))), + 1500L) + .payingWith(ACCOUNT) + .alsoSigningWithFullPrefix(MULTI_KEY) + .via(addLiquidityTxn) + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS))), + withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + aliasedTransfer, + TRANSFER, + HapiParserUtil.asHeadlongAddress(asAddress( + spec.registry().getAccountID(account_B))), + BigInteger.valueOf(1000)) + .payingWith(ACCOUNT) + .alsoSigningWithFullPrefix(MULTI_KEY) + .via(aliasedTransferTxn) + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS)))) + .then( + sourcing(() -> getContractInfo( + asContractString(contractIdFromHexedMirrorAddress(childMirror.get()))) + .hasToken(ExpectedTokenRel.relationshipWith(TOKEN_NAME) + .balance(500)) + .logged()), + getAccountBalance(account_B).hasTokenBalance(TOKEN_NAME, 1000), + getAccountBalance(account_A).hasTokenBalance(TOKEN_NAME, 8500)); + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/FreezeUnfreezeTokenPrecompileSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/FreezeUnfreezeTokenPrecompileSuite.java index 906eb00af878..20ef7f91b0fc 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/FreezeUnfreezeTokenPrecompileSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/FreezeUnfreezeTokenPrecompileSuite.java @@ -16,52 +16,35 @@ package com.hedera.services.bdd.suites.contract.precompile; -import static com.google.protobuf.ByteString.copyFromUtf8; import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; -import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.isLiteralResult; -import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; import static com.hedera.services.bdd.spec.queries.QueryVerbs.contractCallLocal; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountDetails; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAliasedAccountInfo; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; import static com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoTransfer.tinyBarsFromAccountToAlias; -import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; -import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.assertionsHold; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.Utils.asAddress; import static com.hedera.services.bdd.suites.contract.Utils.asToken; -import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.KNOWABLE_TOKEN; import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; -import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TOKEN_ID; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_FREEZE_KEY; import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; -import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; -import com.hedera.node.app.hapi.utils.contracts.ParsingConstants; import com.hedera.services.bdd.spec.HapiSpec; -import com.hedera.services.bdd.spec.queries.crypto.ExpectedTokenRel; import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; import com.hedera.services.bdd.suites.HapiSuite; import com.hederahashgraph.api.proto.java.AccountID; -import com.hederahashgraph.api.proto.java.TokenFreezeStatus; import com.hederahashgraph.api.proto.java.TokenID; import java.util.List; import java.util.concurrent.atomic.AtomicReference; @@ -74,13 +57,7 @@ public class FreezeUnfreezeTokenPrecompileSuite extends HapiSuite { private static final String IS_FROZEN_FUNC = "isTokenFrozen"; public static final String TOKEN_FREEZE_FUNC = "tokenFreeze"; public static final String TOKEN_UNFREEZE_FUNC = "tokenUnfreeze"; - private static final String IS_FROZEN_TXN = "isFrozenTxn"; - private static final String ACCOUNT_HAS_NO_KEY_TXN = "accountHasNoFreezeKey"; - private static final String NO_KEY_FREEZE_TXN = "noKeyFreezeTxn"; - private static final String NO_KEY_UNFREEZE_TXN = "noKeyUnfreezeTxn"; private static final String ACCOUNT = "anybody"; - private static final String ACCOUNT_WITHOUT_KEY = "accountWithoutKey"; - private static final String TOKEN_WITHOUT_KEY = "withoutKey"; private static final String FREEZE_KEY = "freezeKey"; private static final String MULTI_KEY = "purpose"; private static final long GAS_TO_OFFER = 4_000_000L; @@ -102,12 +79,7 @@ protected Logger getResultsLogger() { @Override public List getSpecsInSuite() { - return List.of( - freezeUnfreezeFungibleWithNegativeCases(), - freezeUnfreezeNftsWithNegativeCases(), - isFrozenHappyPathWithLocalCall(), - noTokenIdReverts(), - isFrozenHappyPathWithAliasLocalCall()); + return List.of(isFrozenHappyPathWithAliasLocalCall(), noTokenIdReverts()); } private HapiSpec noTokenIdReverts() { @@ -160,244 +132,6 @@ private HapiSpec noTokenIdReverts() { recordWith().status(INVALID_TOKEN_ID))); } - private HapiSpec freezeUnfreezeFungibleWithNegativeCases() { - final AtomicReference withoutKeyID = new AtomicReference<>(); - final AtomicReference vanillaTokenID = new AtomicReference<>(); - final AtomicReference accountID = new AtomicReference<>(); - return defaultHapiSpec("freezeUnfreezeFungibleWithNegativeCases") - .given( - newKeyNamed(FREEZE_KEY), - newKeyNamed(MULTI_KEY), - cryptoCreate(ACCOUNT).balance(100 * ONE_HBAR).exposingCreatedIdTo(accountID::set), - cryptoCreate(ACCOUNT_WITHOUT_KEY), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(TOKEN_WITHOUT_KEY).exposingCreatedIdTo(id -> withoutKeyID.set(asToken(id))), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .freezeKey(FREEZE_KEY) - .initialSupply(1_000) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - uploadInitCode(FREEZE_CONTRACT), - contractCreate(FREEZE_CONTRACT), - tokenAssociate(ACCOUNT, VANILLA_TOKEN), - cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - FREEZE_CONTRACT, - TOKEN_FREEZE_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) - .logged() - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via(ACCOUNT_HAS_NO_KEY_TXN) - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - FREEZE_CONTRACT, - TOKEN_FREEZE_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(withoutKeyID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) - .logged() - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via(NO_KEY_FREEZE_TXN) - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - FREEZE_CONTRACT, - TOKEN_UNFREEZE_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(withoutKeyID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED) - .via(NO_KEY_UNFREEZE_TXN), - cryptoUpdate(ACCOUNT).key(FREEZE_KEY), - contractCall( - FREEZE_CONTRACT, - TOKEN_FREEZE_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) - .logged() - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER), - getAccountDetails(ACCOUNT) - .hasToken(ExpectedTokenRel.relationshipWith(VANILLA_TOKEN) - .freeze(TokenFreezeStatus.Frozen)), - contractCall( - FREEZE_CONTRACT, - TOKEN_UNFREEZE_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) - .logged() - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER), - getAccountDetails(ACCOUNT) - .hasToken(ExpectedTokenRel.relationshipWith(VANILLA_TOKEN) - .freeze(TokenFreezeStatus.Unfrozen))))) - .then( - childRecordsCheck( - ACCOUNT_HAS_NO_KEY_TXN, - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_SIGNATURE) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), - childRecordsCheck( - NO_KEY_FREEZE_TXN, - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(TOKEN_HAS_NO_FREEZE_KEY) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(TOKEN_HAS_NO_FREEZE_KEY)))), - childRecordsCheck( - NO_KEY_UNFREEZE_TXN, - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(TOKEN_HAS_NO_FREEZE_KEY) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(TOKEN_HAS_NO_FREEZE_KEY))))); - } - - private HapiSpec freezeUnfreezeNftsWithNegativeCases() { - final AtomicReference vanillaTokenID = new AtomicReference<>(); - final AtomicReference accountID = new AtomicReference<>(); - return defaultHapiSpec("freezeUnfreezeNftsWithNegativeCases") - .given( - newKeyNamed(FREEZE_KEY), - newKeyNamed(MULTI_KEY), - cryptoCreate(ACCOUNT).balance(100 * ONE_HBAR).exposingCreatedIdTo(accountID::set), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(KNOWABLE_TOKEN) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(TOKEN_TREASURY) - .freezeKey(FREEZE_KEY) - .supplyKey(MULTI_KEY) - .initialSupply(0) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - mintToken(KNOWABLE_TOKEN, List.of(copyFromUtf8("First!"))), - uploadInitCode(FREEZE_CONTRACT), - contractCreate(FREEZE_CONTRACT), - tokenAssociate(ACCOUNT, KNOWABLE_TOKEN), - cryptoTransfer(movingUnique(KNOWABLE_TOKEN, 1L).between(TOKEN_TREASURY, ACCOUNT))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - FREEZE_CONTRACT, - TOKEN_UNFREEZE_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER) - .via(ACCOUNT_HAS_NO_KEY_TXN) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - cryptoUpdate(ACCOUNT).key(FREEZE_KEY), - contractCall( - FREEZE_CONTRACT, - TOKEN_FREEZE_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER), - getAccountDetails(ACCOUNT) - .hasToken(ExpectedTokenRel.relationshipWith(KNOWABLE_TOKEN) - .freeze(TokenFreezeStatus.Frozen)), - contractCall( - FREEZE_CONTRACT, - TOKEN_UNFREEZE_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER), - contractCall( - FREEZE_CONTRACT, - IS_FROZEN_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) - .logged() - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via(IS_FROZEN_TXN) - .gas(GAS_TO_OFFER)))) - .then( - childRecordsCheck( - ACCOUNT_HAS_NO_KEY_TXN, - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_SIGNATURE) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), - childRecordsCheck( - IS_FROZEN_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(ParsingConstants.FunctionType.HAPI_IS_FROZEN) - .withStatus(SUCCESS) - .withIsFrozen(false))))); - } - - private HapiSpec isFrozenHappyPathWithLocalCall() { - final AtomicReference accountID = new AtomicReference<>(); - final AtomicReference vanillaTokenID = new AtomicReference<>(); - return defaultHapiSpec("isFrozenHappyPathWithLocalCall") - .given( - newKeyNamed(FREEZE_KEY), - newKeyNamed(MULTI_KEY), - cryptoCreate(ACCOUNT) - .balance(100 * ONE_HBAR) - .key(FREEZE_KEY) - .exposingCreatedIdTo(accountID::set), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .freezeKey(FREEZE_KEY) - .initialSupply(1_000) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - uploadInitCode(FREEZE_CONTRACT), - contractCreate(FREEZE_CONTRACT), - tokenAssociate(ACCOUNT, VANILLA_TOKEN), - cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) - .when(assertionsHold((spec, ctxLog) -> { - final var freezeCall = contractCall( - FREEZE_CONTRACT, - TOKEN_FREEZE_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) - .logged() - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .gas(GAS_TO_OFFER); - final var isFrozenLocalCall = contractCallLocal( - FREEZE_CONTRACT, - IS_FROZEN_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) - .has(resultWith() - .resultViaFunctionName( - IS_FROZEN_FUNC, FREEZE_CONTRACT, isLiteralResult(new Object[] {Boolean.TRUE - }))); - allRunFor(spec, freezeCall, isFrozenLocalCall); - })) - .then(); - } - private HapiSpec isFrozenHappyPathWithAliasLocalCall() { final AtomicReference vanillaTokenID = new AtomicReference<>(); final AtomicReference autoCreatedAccountId = new AtomicReference<>(); diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/FreezeUnfreezeTokenPrecompileV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/FreezeUnfreezeTokenPrecompileV1SecurityModelSuite.java new file mode 100644 index 000000000000..0831799457cf --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/FreezeUnfreezeTokenPrecompileV1SecurityModelSuite.java @@ -0,0 +1,365 @@ +/* + * Copyright (C) 2021-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.google.protobuf.ByteString.copyFromUtf8; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.isLiteralResult; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.contractCallLocal; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountDetails; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.assertionsHold; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overridingTwo; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.asToken; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.KNOWABLE_TOKEN; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_FREEZE_KEY; +import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; +import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; + +import com.hedera.node.app.hapi.utils.contracts.ParsingConstants; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.queries.crypto.ExpectedTokenRel; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.AccountID; +import com.hederahashgraph.api.proto.java.TokenFreezeStatus; +import com.hederahashgraph.api.proto.java.TokenID; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class FreezeUnfreezeTokenPrecompileV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(FreezeUnfreezeTokenPrecompileV1SecurityModelSuite.class); + public static final String FREEZE_CONTRACT = "FreezeUnfreezeContract"; + private static final String IS_FROZEN_FUNC = "isTokenFrozen"; + public static final String TOKEN_FREEZE_FUNC = "tokenFreeze"; + public static final String TOKEN_UNFREEZE_FUNC = "tokenUnfreeze"; + private static final String IS_FROZEN_TXN = "isFrozenTxn"; + private static final String ACCOUNT_HAS_NO_KEY_TXN = "accountHasNoFreezeKey"; + private static final String NO_KEY_FREEZE_TXN = "noKeyFreezeTxn"; + private static final String NO_KEY_UNFREEZE_TXN = "noKeyUnfreezeTxn"; + private static final String ACCOUNT = "anybody"; + private static final String ACCOUNT_WITHOUT_KEY = "accountWithoutKey"; + private static final String TOKEN_WITHOUT_KEY = "withoutKey"; + private static final String FREEZE_KEY = "freezeKey"; + private static final String MULTI_KEY = "purpose"; + private static final long GAS_TO_OFFER = 4_000_000L; + + public static void main(String... args) { + new FreezeUnfreezeTokenPrecompileV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + protected Logger getResultsLogger() { + return log; + } + + @Override + public List getSpecsInSuite() { + return List.of( + freezeUnfreezeFungibleWithNegativeCases(), + freezeUnfreezeNftsWithNegativeCases(), + isFrozenHappyPathWithLocalCall()); + } + + private HapiSpec freezeUnfreezeFungibleWithNegativeCases() { + final AtomicReference withoutKeyID = new AtomicReference<>(); + final AtomicReference vanillaTokenID = new AtomicReference<>(); + final AtomicReference accountID = new AtomicReference<>(); + return propertyPreservingHapiSpec("freezeUnfreezeFungibleWithNegativeCases") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenFreezeAccount,TokenUnfreezeAccount", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(FREEZE_KEY), + newKeyNamed(MULTI_KEY), + cryptoCreate(ACCOUNT).balance(100 * ONE_HBAR).exposingCreatedIdTo(accountID::set), + cryptoCreate(ACCOUNT_WITHOUT_KEY), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(TOKEN_WITHOUT_KEY).exposingCreatedIdTo(id -> withoutKeyID.set(asToken(id))), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .freezeKey(FREEZE_KEY) + .initialSupply(1_000) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + uploadInitCode(FREEZE_CONTRACT), + contractCreate(FREEZE_CONTRACT), + tokenAssociate(ACCOUNT, VANILLA_TOKEN), + cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + FREEZE_CONTRACT, + TOKEN_FREEZE_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) + .logged() + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via(ACCOUNT_HAS_NO_KEY_TXN) + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + FREEZE_CONTRACT, + TOKEN_FREEZE_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(withoutKeyID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) + .logged() + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via(NO_KEY_FREEZE_TXN) + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + FREEZE_CONTRACT, + TOKEN_UNFREEZE_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(withoutKeyID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED) + .via(NO_KEY_UNFREEZE_TXN), + cryptoUpdate(ACCOUNT).key(FREEZE_KEY), + contractCall( + FREEZE_CONTRACT, + TOKEN_FREEZE_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) + .logged() + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER), + getAccountDetails(ACCOUNT) + .hasToken(ExpectedTokenRel.relationshipWith(VANILLA_TOKEN) + .freeze(TokenFreezeStatus.Frozen)), + contractCall( + FREEZE_CONTRACT, + TOKEN_UNFREEZE_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) + .logged() + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER), + getAccountDetails(ACCOUNT) + .hasToken(ExpectedTokenRel.relationshipWith(VANILLA_TOKEN) + .freeze(TokenFreezeStatus.Unfrozen))))) + .then( + childRecordsCheck( + ACCOUNT_HAS_NO_KEY_TXN, + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_SIGNATURE) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), + childRecordsCheck( + NO_KEY_FREEZE_TXN, + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(TOKEN_HAS_NO_FREEZE_KEY) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(TOKEN_HAS_NO_FREEZE_KEY)))), + childRecordsCheck( + NO_KEY_UNFREEZE_TXN, + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(TOKEN_HAS_NO_FREEZE_KEY) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(TOKEN_HAS_NO_FREEZE_KEY))))); + } + + private HapiSpec freezeUnfreezeNftsWithNegativeCases() { + final AtomicReference vanillaTokenID = new AtomicReference<>(); + final AtomicReference accountID = new AtomicReference<>(); + return propertyPreservingHapiSpec("freezeUnfreezeNftsWithNegativeCases") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenFreezeAccount,TokenUnfreezeAccount", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(FREEZE_KEY), + newKeyNamed(MULTI_KEY), + cryptoCreate(ACCOUNT).balance(100 * ONE_HBAR).exposingCreatedIdTo(accountID::set), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(KNOWABLE_TOKEN) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(TOKEN_TREASURY) + .freezeKey(FREEZE_KEY) + .supplyKey(MULTI_KEY) + .initialSupply(0) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + mintToken(KNOWABLE_TOKEN, List.of(copyFromUtf8("First!"))), + uploadInitCode(FREEZE_CONTRACT), + contractCreate(FREEZE_CONTRACT), + tokenAssociate(ACCOUNT, KNOWABLE_TOKEN), + cryptoTransfer(movingUnique(KNOWABLE_TOKEN, 1L).between(TOKEN_TREASURY, ACCOUNT))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + FREEZE_CONTRACT, + TOKEN_UNFREEZE_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER) + .via(ACCOUNT_HAS_NO_KEY_TXN) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + cryptoUpdate(ACCOUNT).key(FREEZE_KEY), + contractCall( + FREEZE_CONTRACT, + TOKEN_FREEZE_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER), + getAccountDetails(ACCOUNT) + .hasToken(ExpectedTokenRel.relationshipWith(KNOWABLE_TOKEN) + .freeze(TokenFreezeStatus.Frozen)), + contractCall( + FREEZE_CONTRACT, + TOKEN_UNFREEZE_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER), + contractCall( + FREEZE_CONTRACT, + IS_FROZEN_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) + .logged() + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via(IS_FROZEN_TXN) + .gas(GAS_TO_OFFER)))) + .then( + childRecordsCheck( + ACCOUNT_HAS_NO_KEY_TXN, + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_SIGNATURE) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), + childRecordsCheck( + IS_FROZEN_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(ParsingConstants.FunctionType.HAPI_IS_FROZEN) + .withStatus(SUCCESS) + .withIsFrozen(false))))); + } + + private HapiSpec isFrozenHappyPathWithLocalCall() { + final AtomicReference accountID = new AtomicReference<>(); + final AtomicReference vanillaTokenID = new AtomicReference<>(); + return propertyPreservingHapiSpec("isFrozenHappyPathWithLocalCall") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenFreezeAccount", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(FREEZE_KEY), + newKeyNamed(MULTI_KEY), + cryptoCreate(ACCOUNT) + .balance(100 * ONE_HBAR) + .key(FREEZE_KEY) + .exposingCreatedIdTo(accountID::set), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .freezeKey(FREEZE_KEY) + .initialSupply(1_000) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + uploadInitCode(FREEZE_CONTRACT), + contractCreate(FREEZE_CONTRACT), + tokenAssociate(ACCOUNT, VANILLA_TOKEN), + cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) + .when(assertionsHold((spec, ctxLog) -> { + final var freezeCall = contractCall( + FREEZE_CONTRACT, + TOKEN_FREEZE_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) + .logged() + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .gas(GAS_TO_OFFER); + final var isFrozenLocalCall = contractCallLocal( + FREEZE_CONTRACT, + IS_FROZEN_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get()))) + .has(resultWith() + .resultViaFunctionName( + IS_FROZEN_FUNC, FREEZE_CONTRACT, isLiteralResult(new Object[] {Boolean.TRUE + }))); + allRunFor(spec, freezeCall, isFrozenLocalCall); + })) + .then(); + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/GrantRevokeKycSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/GrantRevokeKycSuite.java index 1354da9455c7..f61bda1882d3 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/GrantRevokeKycSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/GrantRevokeKycSuite.java @@ -20,7 +20,6 @@ import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; import static com.hedera.services.bdd.spec.queries.QueryVerbs.contractCallLocal; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountDetails; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAliasedAccountInfo; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; @@ -42,18 +41,14 @@ import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TOKEN_ID; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_KYC_KEY; import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; -import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; import com.hedera.services.bdd.spec.HapiSpec; -import com.hedera.services.bdd.spec.queries.crypto.ExpectedTokenRel; import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; import com.hedera.services.bdd.suites.HapiSuite; import com.hederahashgraph.api.proto.java.AccountID; import com.hederahashgraph.api.proto.java.TokenID; -import com.hederahashgraph.api.proto.java.TokenKycStatus; import java.util.List; import java.util.concurrent.atomic.AtomicReference; import org.apache.logging.log4j.LogManager; @@ -97,7 +92,7 @@ List negativeSpecs() { } List positiveSpecs() { - return List.of(grantRevokeKycSpec(), grantRevokeKycSpecWithAliasLocalCall()); + return List.of(grantRevokeKycSpecWithAliasLocalCall()); } private HapiSpec grantRevokeKycFail() { @@ -268,96 +263,6 @@ private HapiSpec grantRevokeKycFail() { htsPrecompileResult().withStatus(INVALID_TOKEN_ID))))); } - private HapiSpec grantRevokeKycSpec() { - final AtomicReference vanillaTokenID = new AtomicReference<>(); - final AtomicReference accountID = new AtomicReference<>(); - final AtomicReference secondAccountID = new AtomicReference<>(); - - return defaultHapiSpec("GrantRevokeKycSpec") - .given( - newKeyNamed(KYC_KEY), - cryptoCreate(ACCOUNT) - .balance(100 * ONE_HBAR) - .key(KYC_KEY) - .exposingCreatedIdTo(accountID::set), - cryptoCreate(SECOND_ACCOUNT).exposingCreatedIdTo(secondAccountID::set), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .kycKey(KYC_KEY) - .initialSupply(1_000) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - uploadInitCode(GRANT_REVOKE_KYC_CONTRACT), - contractCreate(GRANT_REVOKE_KYC_CONTRACT), - tokenAssociate(ACCOUNT, VANILLA_TOKEN), - tokenAssociate(SECOND_ACCOUNT, VANILLA_TOKEN)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - GRANT_REVOKE_KYC_CONTRACT, - TOKEN_GRANT_KYC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(secondAccountID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("GrantKycTx") - .gas(GAS_TO_OFFER), - getAccountDetails(SECOND_ACCOUNT) - .hasToken(ExpectedTokenRel.relationshipWith(VANILLA_TOKEN) - .kyc(TokenKycStatus.Granted)), - contractCallLocal( - GRANT_REVOKE_KYC_CONTRACT, - IS_KYC_GRANTED, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(secondAccountID.get()))), - contractCall( - GRANT_REVOKE_KYC_CONTRACT, - TOKEN_REVOKE_KYC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(secondAccountID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("RevokeKycTx") - .gas(GAS_TO_OFFER), - contractCall( - GRANT_REVOKE_KYC_CONTRACT, - IS_KYC_GRANTED, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(secondAccountID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("IsKycTx") - .gas(GAS_TO_OFFER)))) - .then( - childRecordsCheck( - "GrantKycTx", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)))), - childRecordsCheck( - "RevokeKycTx", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)))), - childRecordsCheck( - "IsKycTx", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_IS_KYC) - .withIsKyc(false) - .withStatus(SUCCESS))))); - } - private HapiSpec grantRevokeKycSpecWithAliasLocalCall() { final AtomicReference vanillaTokenID = new AtomicReference<>(); final AtomicReference autoCreatedAccountId = new AtomicReference<>(); diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/GrantRevokeKycV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/GrantRevokeKycV1SecurityModelSuite.java new file mode 100644 index 000000000000..b4c53f1fe760 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/GrantRevokeKycV1SecurityModelSuite.java @@ -0,0 +1,188 @@ +/* + * Copyright (C) 2021-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.contractCallLocal; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountDetails; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overriding; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.asToken; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; + +import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.queries.crypto.ExpectedTokenRel; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.AccountID; +import com.hederahashgraph.api.proto.java.TokenID; +import com.hederahashgraph.api.proto.java.TokenKycStatus; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse +public class GrantRevokeKycV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(GrantRevokeKycV1SecurityModelSuite.class); + public static final String GRANT_REVOKE_KYC_CONTRACT = "GrantRevokeKyc"; + private static final String IS_KYC_GRANTED = "isKycGranted"; + public static final String TOKEN_GRANT_KYC = "tokenGrantKyc"; + public static final String TOKEN_REVOKE_KYC = "tokenRevokeKyc"; + + private static final long GAS_TO_OFFER = 4_000_000L; + private static final String ACCOUNT = "anybody"; + public static final String SECOND_ACCOUNT = "anybodySecond"; + private static final String KYC_KEY = "kycKey"; + + public static void main(String... args) { + new GrantRevokeKycV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + protected Logger getResultsLogger() { + return log; + } + + @Override + public List getSpecsInSuite() { + return allOf(positiveSpecs(), negativeSpecs()); + } + + List negativeSpecs() { + return List.of(); + } + + List positiveSpecs() { + return List.of(grantRevokeKycSpec()); + } + + private HapiSpec grantRevokeKycSpec() { + final AtomicReference vanillaTokenID = new AtomicReference<>(); + final AtomicReference accountID = new AtomicReference<>(); + final AtomicReference secondAccountID = new AtomicReference<>(); + + return propertyPreservingHapiSpec("grantRevokeKycSpec") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(KYC_KEY), + cryptoCreate(ACCOUNT) + .balance(100 * ONE_HBAR) + .key(KYC_KEY) + .exposingCreatedIdTo(accountID::set), + cryptoCreate(SECOND_ACCOUNT).exposingCreatedIdTo(secondAccountID::set), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .kycKey(KYC_KEY) + .initialSupply(1_000) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + uploadInitCode(GRANT_REVOKE_KYC_CONTRACT), + contractCreate(GRANT_REVOKE_KYC_CONTRACT), + tokenAssociate(ACCOUNT, VANILLA_TOKEN), + tokenAssociate(SECOND_ACCOUNT, VANILLA_TOKEN)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + GRANT_REVOKE_KYC_CONTRACT, + TOKEN_GRANT_KYC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(secondAccountID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("GrantKycTx") + .gas(GAS_TO_OFFER), + getAccountDetails(SECOND_ACCOUNT) + .hasToken(ExpectedTokenRel.relationshipWith(VANILLA_TOKEN) + .kyc(TokenKycStatus.Granted)), + contractCallLocal( + GRANT_REVOKE_KYC_CONTRACT, + IS_KYC_GRANTED, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(secondAccountID.get()))), + contractCall( + GRANT_REVOKE_KYC_CONTRACT, + TOKEN_REVOKE_KYC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(secondAccountID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("RevokeKycTx") + .gas(GAS_TO_OFFER), + contractCall( + GRANT_REVOKE_KYC_CONTRACT, + IS_KYC_GRANTED, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(secondAccountID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("IsKycTx") + .gas(GAS_TO_OFFER)))) + .then( + childRecordsCheck( + "GrantKycTx", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)))), + childRecordsCheck( + "RevokeKycTx", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)))), + childRecordsCheck( + "IsKycTx", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_IS_KYC) + .withIsKyc(false) + .withStatus(SUCCESS))))); + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/LazyCreateThroughPrecompileSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/LazyCreateThroughPrecompileSuite.java index 8efcacf25753..4904057441e7 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/LazyCreateThroughPrecompileSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/LazyCreateThroughPrecompileSuite.java @@ -24,14 +24,11 @@ import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; import static com.hedera.services.bdd.spec.keys.KeyShape.ED25519; -import static com.hedera.services.bdd.spec.keys.SigControl.SECP256K1_ON; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountDetails; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAliasedAccountBalance; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAliasedAccountInfo; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getLiteralAliasAccountInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenNftInfo; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoApproveAllowance; @@ -41,23 +38,21 @@ import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; -import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fractionalFee; import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.accountAmount; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.accountAmountAlias; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.emptyChildRecordsCheck; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.inParallel; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.nftTransferToAlias; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.tokenTransferList; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.tokenTransferLists; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.transferList; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.headlongFromHexed; +import static com.hedera.services.bdd.suites.contract.Utils.mirrorAddrWith; +import static com.hedera.services.bdd.suites.contract.Utils.nCopiesOfSender; +import static com.hedera.services.bdd.suites.contract.Utils.nNonMirrorAddressFrom; +import static com.hedera.services.bdd.suites.contract.Utils.nonMirrorAddrWith; import static com.hedera.services.bdd.suites.crypto.AutoAccountCreationSuite.LAZY_MEMO; import static com.hedera.services.bdd.suites.file.FileUpdateSuite.CIVILIAN; import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; @@ -65,15 +60,11 @@ import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INSUFFICIENT_GAS; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ACCOUNT_ID; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_ALIAS_KEY; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.MAX_CHILD_RECORDS_EXCEEDED; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.NO_REMAINING_AUTOMATIC_ASSOCIATIONS; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.REVERTED_SUCCESS; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; import static com.swirlds.common.utility.CommonUtils.hex; -import com.esaulpaugh.headlong.abi.Address; -import com.esaulpaugh.headlong.abi.Tuple; import com.google.protobuf.ByteString; import com.hedera.node.app.hapi.utils.ByteStringUtils; import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; @@ -81,18 +72,13 @@ import com.hedera.services.bdd.spec.HapiSpec; import com.hedera.services.bdd.spec.HapiSpecOperation; import com.hedera.services.bdd.spec.assertions.AccountInfoAsserts; -import com.hedera.services.bdd.spec.assertions.ContractInfoAsserts; import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; -import com.hedera.services.bdd.spec.transactions.token.TokenMovement; -import com.hedera.services.bdd.spec.utilops.UtilVerbs; import com.hedera.services.bdd.suites.HapiSuite; import com.hederahashgraph.api.proto.java.TokenSupplyType; import com.hederahashgraph.api.proto.java.TokenType; import java.math.BigInteger; import java.nio.charset.StandardCharsets; -import java.util.Collections; import java.util.List; -import java.util.OptionalLong; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.IntStream; @@ -100,7 +86,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.tuweni.bytes.Bytes; -import org.junit.jupiter.api.Assertions; public class LazyCreateThroughPrecompileSuite extends HapiSuite { private static final Logger log = LogManager.getLogger(LazyCreateThroughPrecompileSuite.class); @@ -112,17 +97,9 @@ public class LazyCreateThroughPrecompileSuite extends HapiSuite { private static final String FIRST = "FIRST"; public static final ByteString FIRST_META = ByteString.copyFrom(FIRST.getBytes(StandardCharsets.UTF_8)); public static final ByteString SECOND_META = ByteString.copyFrom(FIRST.getBytes(StandardCharsets.UTF_8)); - private static final String TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT = "PrecompileAliasXfer"; private static final String SPENDER = "spender"; - private static final String TRANSFER_TOKEN_TXN = "transferTokenTxn"; - private static final String TRANSFER_TOKENS_TXN = "transferTokensTxn"; - private static final String TRANSFER_NFT_TXN = "transferNFTTxn"; - private static final String TRANSFER_NFTS_TXN = "transferNFTsTxn"; - private static final String SENDER = "sender"; - private static final long TOTAL_SUPPLY = 1_000; private static final String NFT_TOKEN = "Token_NFT"; private static final String TRANSFER_TXN = "transferTxn"; - private static final String TRANSFER_TXN2 = "transferTxn2"; private static final String NFT_KEY = "nftKey"; private static final String AUTO_CREATION_MODES = "AutoCreationModes"; private static final String CREATION_ATTEMPT = "creationAttempt"; @@ -144,8 +121,6 @@ public class LazyCreateThroughPrecompileSuite extends HapiSuite { private static final String HTS_TRANSFER_FROM = "htsTransferFrom"; private static final String HTS_TRANSFER_FROM_NFT = "htsTransferFromNFT"; private static final String BASE_APPROVE_TXN = "baseApproveTxn"; - private static final String TRANSFER_TXN3 = "transferTxn3"; - private static final Tuple[] EMPTY_TUPLE_ARRAY = new Tuple[] {}; private static final String RECIPIENT = "recipient"; private static final String NOT_ENOUGH_GAS_TXN = "NOT_ENOUGH_GAS_TXN"; private static final String ECDSA_KEY = "abcdECDSAkey"; @@ -167,84 +142,13 @@ protected Logger getResultsLogger() { @Override public List getSpecsInSuite() { return List.of( - cryptoTransferV1LazyCreate(), - cryptoTransferV2LazyCreate(), - transferTokenLazyCreate(), - transferTokensToEVMAddressAliasRevertAndTransferAgainSuccessfully(), - transferNftLazyCreate(), - transferNftsLazyCreate(), erc20TransferLazyCreate(), erc20TransferFromLazyCreate(), erc721TransferFromLazyCreate(), htsTransferFromFungibleTokenLazyCreate(), - htsTransferFromFungibleTokenLazyCreate(), htsTransferFromForNFTLazyCreate(), - hollowAccountSigningReqsStillEnforced(), resourceLimitExceededRevertsAllRecords(), - autoCreationFailsWithMirrorAddress(), - revertedAutoCreationRollsBackEvenIfTopLevelSucceeds(), - canCreateMultipleHollows(), - canCreateViaFungibleWithFractionalFee()); - } - - HapiSpec hollowAccountSigningReqsStillEnforced() { - final var nft = "nft"; - final var nftKey = NFT_KEY; - final var creationAttempt = CREATION_ATTEMPT; - final var creationReversal = "creationReversal"; - final AtomicLong civilianId = new AtomicLong(); - final AtomicReference nftMirrorAddr = new AtomicReference<>(); - - return defaultHapiSpec("HollowAccountSigningReqsStillEnforced") - .given( - newKeyNamed(nftKey), - uploadInitCode(AUTO_CREATION_MODES), - contractCreate(AUTO_CREATION_MODES), - cryptoCreate(CIVILIAN) - .keyShape(ED25519) - .exposingCreatedIdTo(id -> civilianId.set(id.getAccountNum())), - tokenCreate(nft) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .supplyKey(nftKey) - .initialSupply(0) - .treasury(CIVILIAN) - .exposingCreatedIdTo( - idLit -> nftMirrorAddr.set(asHexedSolidityAddress(asToken(idLit)))), - mintToken(nft, List.of(ByteString.copyFromUtf8(ONE_TIME)))) - .when(sourcing(() -> contractCall( - AUTO_CREATION_MODES, - CREATE_DIRECTLY, - headlongFromHexed(nftMirrorAddr.get()), - mirrorAddrWith(civilianId.get()), - nonMirrorAddrWith(civilianId.get() + 4_000_000), - 1L, - false) - .via(creationAttempt) - .gas(GAS_TO_OFFER) - .alsoSigningWithFullPrefix(CIVILIAN))) - .then( - getTxnRecord(creationAttempt).andAllChildRecords().logged(), - sourcing(() -> getLiteralAliasAccountInfo( - hex(Bytes.fromHexString(nonMirrorAddrWith(civilianId.get() + 4_000_000) - .toString()) - .toArray())) - .logged()), - // Now try to reverse the transfer and take the hollow account's NFT - sourcing(() -> contractCall( - AUTO_CREATION_MODES, - CREATE_DIRECTLY, - headlongFromHexed(nftMirrorAddr.get()), - nonMirrorAddrWith(civilianId.get() + 4_000_000), - mirrorAddrWith(civilianId.get()), - 1L, - false) - .via(creationReversal) - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), - sourcing(() -> childRecordsCheck( - creationReversal, - CONTRACT_REVERT_EXECUTED, - recordWith().status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))); + autoCreationFailsWithMirrorAddress()); } HapiSpec resourceLimitExceededRevertsAllRecords() { @@ -335,777 +239,6 @@ HapiSpec autoCreationFailsWithMirrorAddress() { creationAttempt, CONTRACT_REVERT_EXECUTED, recordWith().status(INVALID_ALIAS_KEY))); } - HapiSpec revertedAutoCreationRollsBackEvenIfTopLevelSucceeds() { - final var nft = "nft"; - final var nftKey = NFT_KEY; - final var creationAttempt = CREATION_ATTEMPT; - final AtomicLong civilianId = new AtomicLong(); - final AtomicReference nftMirrorAddr = new AtomicReference<>(); - - return defaultHapiSpec("RevertedAutoCreationRollsBackEvenIfTopLevelSucceeds") - .given( - newKeyNamed(nftKey), - uploadInitCode(AUTO_CREATION_MODES), - contractCreate(AUTO_CREATION_MODES), - cryptoCreate(CIVILIAN) - .keyShape(ED25519) - .exposingCreatedIdTo(id -> civilianId.set(id.getAccountNum())), - tokenCreate(nft) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .supplyKey(nftKey) - .initialSupply(0) - .treasury(CIVILIAN) - .exposingCreatedIdTo( - idLit -> nftMirrorAddr.set(asHexedSolidityAddress(asToken(idLit)))), - mintToken(nft, List.of(ByteString.copyFromUtf8(ONE_TIME)))) - .when(sourcing(() -> contractCall( - AUTO_CREATION_MODES, - "createIndirectlyRevertingAndRecover", - headlongFromHexed(nftMirrorAddr.get()), - mirrorAddrWith(civilianId.get()), - nonMirrorAddrWith(civilianId.get() + 8_000_000), - 1L) - .via(creationAttempt) - .gas(GAS_TO_OFFER) - .alsoSigningWithFullPrefix(CIVILIAN) - .hasKnownStatus(SUCCESS))) - .then(childRecordsCheck( - creationAttempt, - SUCCESS, - recordWith() - .status(REVERTED_SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS))))); - } - - @SuppressWarnings("java:S5960") - HapiSpec canCreateViaFungibleWithFractionalFee() { - final var ft = "ft"; - final var ftKey = NFT_KEY; - final var creationAttempt = CREATION_ATTEMPT; - final AtomicLong civilianId = new AtomicLong(); - final AtomicReference ftMirrorAddr = new AtomicReference<>(); - final long supply = 100_000_000; - - return defaultHapiSpec("CanCreateViaFungibleWithFractionalFee") - .given( - newKeyNamed(ftKey), - uploadInitCode(AUTO_CREATION_MODES), - contractCreate(AUTO_CREATION_MODES), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(CIVILIAN) - .maxAutomaticTokenAssociations(1) - .keyShape(ED25519) - .exposingCreatedIdTo(id -> civilianId.set(id.getAccountNum())), - // If running locally, ensures the entity 0.0. is an account w/ EVM address - cryptoCreate("somebody").keyShape(SECP256K1_ON).withMatchingEvmAddress(), - tokenCreate(ft) - .tokenType(TokenType.FUNGIBLE_COMMON) - .supplyKey(ftKey) - .initialSupply(supply) - .withCustom(fractionalFee(1L, 20L, 0L, OptionalLong.of(0L), TOKEN_TREASURY)) - .treasury(TOKEN_TREASURY) - .exposingCreatedIdTo(idLit -> ftMirrorAddr.set(asHexedSolidityAddress(asToken(idLit)))), - cryptoTransfer(TokenMovement.moving(supply, ft).between(TOKEN_TREASURY, CIVILIAN))) - .when(withOpContext((spec, opLog) -> { - final var op = contractCall( - AUTO_CREATION_MODES, - "createDirectlyViaFungible", - headlongFromHexed(ftMirrorAddr.get()), - mirrorAddrWith(civilianId.get()), - nonMirrorAddrWith(123, civilianId.get() + 1), - supply) - .via(creationAttempt) - .gas(GAS_TO_OFFER) - .alsoSigningWithFullPrefix(CIVILIAN) - .hasKnownStatusFrom(SUCCESS, CONTRACT_REVERT_EXECUTED); - allRunFor(spec, op); - // If this ContractCall was converted to an EthereumTransaction, then it will - // not be tracking the last receipt and we can't do this extra logging; this is - // fine for now, since the _Eth spec hasn't been flaky - if (op.hasActualStatus() && op.getActualStatus() == CONTRACT_REVERT_EXECUTED) { - final var lookup = getTxnRecord(creationAttempt).andAllChildRecords(); - allRunFor(spec, lookup); - Assertions.fail("canCreateViaFungibleWithFractionalFee() failed w/ record " - + lookup.getResponseRecord() - + " and child records " - + lookup.getChildRecords()); - } - })) - .then(); - } - - HapiSpec canCreateMultipleHollows() { - final var n = 3; - final var nft = "nft"; - final var nftKey = NFT_KEY; - final var creationAttempt = CREATION_ATTEMPT; - final AtomicLong civilianId = new AtomicLong(); - final AtomicReference nftMirrorAddr = new AtomicReference<>(); - - return defaultHapiSpec("CanCreateMultipleHollows") - .given( - newKeyNamed(nftKey), - uploadInitCode(AUTO_CREATION_MODES), - contractCreate(AUTO_CREATION_MODES), - cryptoCreate(CIVILIAN) - .keyShape(ED25519) - .exposingCreatedIdTo(id -> civilianId.set(id.getAccountNum())), - tokenCreate(nft) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .supplyKey(nftKey) - .initialSupply(0) - .treasury(CIVILIAN) - .exposingCreatedIdTo( - idLit -> nftMirrorAddr.set(asHexedSolidityAddress(asToken(idLit)))), - mintToken( - nft, - IntStream.range(0, n) - .mapToObj(i -> ByteString.copyFromUtf8(ONE_TIME + i)) - .toList())) - .when(sourcing(() -> contractCall( - AUTO_CREATION_MODES, - "createSeveralDirectly", - headlongFromHexed(nftMirrorAddr.get()), - nCopiesOfSender(n, mirrorAddrWith(civilianId.get())), - nNonMirrorAddressFrom(n, civilianId.get() + 1_234_567_890L), - LongStream.iterate(1L, l -> l + 1).limit(n).toArray()) - .via(creationAttempt) - .gas(GAS_TO_OFFER) - .alsoSigningWithFullPrefix(CIVILIAN) - .hasKnownStatus(SUCCESS))) - .then(getTxnRecord(creationAttempt).andAllChildRecords().logged()); - } - - private Address[] nCopiesOfSender(final int n, final Address mirrorAddr) { - return Collections.nCopies(n, mirrorAddr).toArray(Address[]::new); - } - - private Address[] nNonMirrorAddressFrom(final int n, final long m) { - return LongStream.range(m, m + n).mapToObj(this::nonMirrorAddrWith).toArray(Address[]::new); - } - - private Address headlongFromHexed(final String addr) { - return Address.wrap(Address.toChecksumAddress("0x" + addr)); - } - - public static Address mirrorAddrWith(final long num) { - return Address.wrap( - Address.toChecksumAddress(new BigInteger(1, HapiPropertySource.asSolidityAddress(0, 0, num)))); - } - - private Address nonMirrorAddrWith(final long num) { - return nonMirrorAddrWith(666, num); - } - - private Address nonMirrorAddrWith(final long seed, final long num) { - return Address.wrap(Address.toChecksumAddress( - new BigInteger(1, HapiPropertySource.asSolidityAddress((int) seed, seed, num)))); - } - - private HapiSpec cryptoTransferV1LazyCreate() { - final var NESTED_LAZY_PRECOMPILE_CONTRACT = "LazyPrecompileTransfers"; - final var FUNGIBLE_TOKEN_2 = "ftnt"; - return defaultHapiSpec("cryptoTransferV1LazyCreate") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(SENDER) - .balance(10 * ONE_HUNDRED_HBARS) - .key(MULTI_KEY) - .maxAutomaticTokenAssociations(5), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(FUNGIBLE_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(TOTAL_SUPPLY) - .treasury(TOKEN_TREASURY), - tokenCreate(FUNGIBLE_TOKEN_2) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(TOTAL_SUPPLY) - .treasury(TOKEN_TREASURY), - tokenCreate(NFT_TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasury(SENDER) - .initialSupply(0L) - .supplyKey(MULTI_KEY), - mintToken(NFT_TOKEN, List.of(META1, META2)), - newKeyNamed(ECDSA_KEY).shape(SECP_256K1_SHAPE), - cryptoTransfer(moving(500, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, SENDER)), - cryptoTransfer(moving(500, FUNGIBLE_TOKEN_2).between(TOKEN_TREASURY, SENDER)), - uploadInitCode(NESTED_LAZY_PRECOMPILE_CONTRACT), - contractCreate(NESTED_LAZY_PRECOMPILE_CONTRACT).maxAutomaticTokenAssociations(1), - getContractInfo(NESTED_LAZY_PRECOMPILE_CONTRACT) - .has(ContractInfoAsserts.contractWith().maxAutoAssociations(1)) - .logged()) - .when(withOpContext((spec, opLog) -> { - final var token = spec.registry().getTokenID(FUNGIBLE_TOKEN); - final var token2 = spec.registry().getTokenID(FUNGIBLE_TOKEN_2); - final var nftToken = spec.registry().getTokenID(NFT_TOKEN); - final var sender = spec.registry().getAccountID(SENDER); - final var ecdsaKey = spec.registry().getKey(ECDSA_KEY); - final var evmAddressBytes = ByteString.copyFrom(recoverAddressFromPubKey( - ecdsaKey.getECDSASecp256K1().toByteArray())); - final var amountToBeSent = 50L; - final var transferFn = "cryptoTransferV1LazyCreate"; - allRunFor( - spec, - contractCall( - NESTED_LAZY_PRECOMPILE_CONTRACT, - transferFn, - tokenTransferLists() - .withTokenTransferList( - tokenTransferList() - .forToken(token) - .withAccountAmounts( - accountAmount(sender, -amountToBeSent), - accountAmountAlias( - recoverAddressFromPubKey( - ecdsaKey.getECDSASecp256K1() - .toByteArray()), - amountToBeSent)) - .build(), - tokenTransferList() - .forToken(nftToken) - .withNftTransfers(nftTransferToAlias( - sender, - recoverAddressFromPubKey( - ecdsaKey.getECDSASecp256K1() - .toByteArray()), - 1L)) - .build()) - .build(), - tokenTransferLists() - .withTokenTransferList(tokenTransferList() - .forToken(token) - .withAccountAmounts( - accountAmount(sender, -amountToBeSent), - accountAmountAlias( - recoverAddressFromPubKey( - ecdsaKey.getECDSASecp256K1() - .toByteArray()), - amountToBeSent)) - .build()) - .build()) - .payingWith(GENESIS) - .via(TRANSFER_TXN) - .signedBy(GENESIS, MULTI_KEY) - .alsoSigningWithFullPrefix(MULTI_KEY) - .gas(GAS_TO_OFFER), - contractCall( - NESTED_LAZY_PRECOMPILE_CONTRACT, - transferFn, - tokenTransferLists() - .withTokenTransferList(tokenTransferList() - .forToken(token) - .withAccountAmounts( - accountAmount(sender, -1L), - accountAmountAlias( - recoverAddressFromPubKey( - ecdsaKey.getECDSASecp256K1() - .toByteArray()), - 1L)) - .build()) - .build(), - tokenTransferLists() - .withTokenTransferList(tokenTransferList() - .forToken(token) - .withAccountAmounts( - accountAmount(sender, -1L), - accountAmountAlias( - recoverAddressFromPubKey( - ecdsaKey.getECDSASecp256K1() - .toByteArray()), - 1L)) - .build()) - .build()) - .payingWith(GENESIS) - .signedBy(GENESIS, MULTI_KEY) - .alsoSigningWithFullPrefix(MULTI_KEY) - .via(TRANSFER_TXN2) - .gas(GAS_TO_OFFER), - contractCall( - NESTED_LAZY_PRECOMPILE_CONTRACT, - transferFn, - tokenTransferLists() - .withTokenTransferList(tokenTransferList() - .forToken(token2) - .withAccountAmounts( - accountAmount(sender, -1L), - accountAmountAlias( - recoverAddressFromPubKey( - ecdsaKey.getECDSASecp256K1() - .toByteArray()), - 1L)) - .build()) - .build(), - tokenTransferLists() - .withTokenTransferList(tokenTransferList() - .forToken(token2) - .withAccountAmounts( - accountAmount(sender, -1L), - accountAmountAlias( - recoverAddressFromPubKey( - ecdsaKey.getECDSASecp256K1() - .toByteArray()), - 1L)) - .build()) - .build()) - .payingWith(GENESIS) - .signedBy(GENESIS, MULTI_KEY) - .alsoSigningWithFullPrefix(MULTI_KEY) - .via(TRANSFER_TXN3) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED) - .gas(GAS_TO_OFFER), - childRecordsCheck( - TRANSFER_TXN, - SUCCESS, - recordWith().status(SUCCESS), - recordWith().status(SUCCESS), - recordWith().status(SUCCESS)), - childRecordsCheck( - TRANSFER_TXN2, - SUCCESS, - recordWith().status(SUCCESS), - recordWith().status(SUCCESS)), - childRecordsCheck( - TRANSFER_TXN3, - CONTRACT_REVERT_EXECUTED, - recordWith().status(NO_REMAINING_AUTOMATIC_ASSOCIATIONS)), - getAliasedAccountInfo(ECDSA_KEY) - .has(AccountInfoAsserts.accountWith() - .key(EMPTY_KEY) - .autoRenew(THREE_MONTHS_IN_SECONDS) - .receiverSigReq(false) - .memo(LAZY_MEMO)), - getAliasedAccountBalance(evmAddressBytes) - .hasTokenBalance(FUNGIBLE_TOKEN, amountToBeSent * 2 + 2) - .hasTokenBalance(NFT_TOKEN, 1) - .logged()); - })) - .then(); - } - - private HapiSpec cryptoTransferV2LazyCreate() { - final var NESTED_LAZY_PRECOMPILE_CONTRACT = "LazyPrecompileTransfersAtomic"; - final var FUNGIBLE_TOKEN_2 = "ftnt"; - final var INIT_BALANCE = 10 * ONE_HUNDRED_HBARS; - return defaultHapiSpec("cryptoTransferV2LazyCreate") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(SENDER) - .balance(INIT_BALANCE) - .key(MULTI_KEY) - .maxAutomaticTokenAssociations(5), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(FUNGIBLE_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(TOTAL_SUPPLY) - .treasury(TOKEN_TREASURY), - tokenCreate(FUNGIBLE_TOKEN_2) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(TOTAL_SUPPLY) - .treasury(TOKEN_TREASURY), - tokenCreate(NFT_TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasury(SENDER) - .initialSupply(0L) - .supplyKey(MULTI_KEY), - mintToken(NFT_TOKEN, List.of(META1, META2)), - newKeyNamed(ECDSA_KEY).shape(SECP_256K1_SHAPE), - cryptoTransfer(moving(500, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, SENDER)), - cryptoTransfer(moving(500, FUNGIBLE_TOKEN_2).between(TOKEN_TREASURY, SENDER)), - uploadInitCode(NESTED_LAZY_PRECOMPILE_CONTRACT), - contractCreate(NESTED_LAZY_PRECOMPILE_CONTRACT).maxAutomaticTokenAssociations(1), - getContractInfo(NESTED_LAZY_PRECOMPILE_CONTRACT) - .has(ContractInfoAsserts.contractWith().maxAutoAssociations(1)) - .logged()) - .when(withOpContext((spec, opLog) -> { - final var ecdsaKey = spec.registry().getKey(ECDSA_KEY); - final var tmp = ecdsaKey.getECDSASecp256K1().toByteArray(); - final var addressBytes = recoverAddressFromPubKey(tmp); - final var evmAddressBytes = ByteString.copyFrom(addressBytes); - final var token = spec.registry().getTokenID(FUNGIBLE_TOKEN); - final var token2 = spec.registry().getTokenID(FUNGIBLE_TOKEN_2); - final var nftToken = spec.registry().getTokenID(NFT_TOKEN); - final var sender = spec.registry().getAccountID(SENDER); - final var amountToBeSent = 50L; - - final var cryptoTransferV2LazyCreateFn = "cryptoTransferV2LazyCreate"; - allRunFor( - spec, - contractCall( - NESTED_LAZY_PRECOMPILE_CONTRACT, - cryptoTransferV2LazyCreateFn, - transferList() - .withAccountAmounts( - accountAmount(sender, -amountToBeSent, false), - UtilVerbs.accountAmountAlias( - addressBytes, amountToBeSent, false)) - .build(), - tokenTransferLists() - .withTokenTransferList( - tokenTransferList() - .forToken(token) - .withAccountAmounts( - accountAmount( - sender, -amountToBeSent, false), - UtilVerbs.accountAmountAlias( - addressBytes, - amountToBeSent, - false)) - .build(), - tokenTransferList() - .forToken(nftToken) - .withNftTransfers(UtilVerbs.nftTransferToAlias( - sender, addressBytes, 1L, false)) - .build()) - .build(), - transferList() - .withAccountAmounts( - accountAmount(sender, -amountToBeSent, false), - UtilVerbs.accountAmountAlias( - addressBytes, amountToBeSent, false)) - .build(), - tokenTransferLists() - .withTokenTransferList(tokenTransferList() - .forToken(token) - .withAccountAmounts( - accountAmount(sender, -amountToBeSent, false), - UtilVerbs.accountAmountAlias( - addressBytes, amountToBeSent, false)) - .build()) - .build()) - .payingWith(GENESIS) - .via(TRANSFER_TXN) - .signedBy(GENESIS, MULTI_KEY) - .alsoSigningWithFullPrefix(MULTI_KEY) - .gas(GAS_TO_OFFER), - contractCall( - NESTED_LAZY_PRECOMPILE_CONTRACT, - cryptoTransferV2LazyCreateFn, - transferList() - .withAccountAmounts( - accountAmount(sender, -amountToBeSent, false), - UtilVerbs.accountAmountAlias( - addressBytes, amountToBeSent, false)) - .build(), - EMPTY_TUPLE_ARRAY, - transferList() - .withAccountAmounts( - accountAmount(sender, -amountToBeSent, false), - UtilVerbs.accountAmountAlias( - addressBytes, amountToBeSent, false)) - .build(), - EMPTY_TUPLE_ARRAY) - .payingWith(GENESIS) - .signedBy(GENESIS, MULTI_KEY) - .alsoSigningWithFullPrefix(MULTI_KEY) - .via(TRANSFER_TXN2) - .gas(GAS_TO_OFFER), - contractCall( - NESTED_LAZY_PRECOMPILE_CONTRACT, - cryptoTransferV2LazyCreateFn, - transferList() - .withAccountAmounts(EMPTY_TUPLE_ARRAY) - .build(), - tokenTransferLists() - .withTokenTransferList(tokenTransferList() - .forToken(token2) - .withAccountAmounts( - accountAmount(sender, -amountToBeSent, false), - UtilVerbs.accountAmountAlias( - addressBytes, amountToBeSent, false)) - .build()) - .build(), - transferList() - .withAccountAmounts(EMPTY_TUPLE_ARRAY) - .build(), - EMPTY_TUPLE_ARRAY) - .payingWith(GENESIS) - .signedBy(GENESIS, MULTI_KEY) - .alsoSigningWithFullPrefix(MULTI_KEY) - .via(TRANSFER_TXN3) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED) - .gas(GAS_TO_OFFER), - childRecordsCheck( - TRANSFER_TXN, - SUCCESS, - recordWith().status(SUCCESS), - recordWith().status(SUCCESS), - recordWith().status(SUCCESS)), - childRecordsCheck( - TRANSFER_TXN2, - SUCCESS, - recordWith().status(SUCCESS), - recordWith().status(SUCCESS)), - childRecordsCheck( - TRANSFER_TXN3, - CONTRACT_REVERT_EXECUTED, - recordWith().status(NO_REMAINING_AUTOMATIC_ASSOCIATIONS)), - getAliasedAccountInfo(ECDSA_KEY) - .has(AccountInfoAsserts.accountWith() - .key(EMPTY_KEY) - .autoRenew(THREE_MONTHS_IN_SECONDS) - .receiverSigReq(false) - .memo(LAZY_MEMO)), - getAliasedAccountBalance(evmAddressBytes) - .hasTinyBars(4 * amountToBeSent) - .hasTokenBalance(FUNGIBLE_TOKEN, amountToBeSent * 2) - .hasTokenBalance(NFT_TOKEN, 1) - .logged()); - })) - .then(); - } - - private HapiSpec transferTokenLazyCreate() { - final AtomicReference tokenAddr = new AtomicReference<>(); - - return defaultHapiSpec("transferTokenLazyCreate") - .given( - newKeyNamed(ECDSA_KEY).shape(SECP_256K1_SHAPE), - newKeyNamed(MULTI_KEY), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(OWNER).balance(100 * ONE_HUNDRED_HBARS), - tokenCreate(FUNGIBLE_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(5) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .exposingCreatedIdTo(id -> tokenAddr.set( - HapiPropertySource.asHexedSolidityAddress(HapiPropertySource.asToken(id)))), - uploadInitCode(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), - contractCreate(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), - tokenAssociate(OWNER, List.of(FUNGIBLE_TOKEN)), - cryptoTransfer(moving(5, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, OWNER))) - .when(withOpContext((spec, opLog) -> { - final var ecdsaKey = spec.registry().getKey(ECDSA_KEY); - final var tmp = ecdsaKey.getECDSASecp256K1().toByteArray(); - final var addressBytes = recoverAddressFromPubKey(tmp); - final var alias = ByteStringUtils.wrapUnsafely(addressBytes); - allRunFor( - spec, - contractCall( - TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT, - "transferTokenCallNestedThenAgain", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(OWNER))), - HapiParserUtil.asHeadlongAddress(addressBytes), - 2L, - 2L) - .via(TRANSFER_TOKEN_TXN) - .alsoSigningWithFullPrefix(OWNER) - .gas(GAS_TO_OFFER) - .hasKnownStatus(SUCCESS), - getAliasedAccountInfo(ECDSA_KEY) - .has(AccountInfoAsserts.accountWith() - .key(EMPTY_KEY) - .autoRenew(THREE_MONTHS_IN_SECONDS) - .receiverSigReq(false) - .memo(LAZY_MEMO)), - getAliasedAccountBalance(alias) - .hasTokenBalance(FUNGIBLE_TOKEN, 4) - .logged(), - childRecordsCheck( - TRANSFER_TOKEN_TXN, - SUCCESS, - recordWith().status(SUCCESS), - recordWith().status(SUCCESS), - recordWith().status(SUCCESS))); - })) - .then(); - } - - private HapiSpec transferTokensToEVMAddressAliasRevertAndTransferAgainSuccessfully() { - final AtomicReference tokenAddr = new AtomicReference<>(); - - return defaultHapiSpec("transferTokensToEVMAddressAliasRevertAndTransferAgainSuccessfully") - .given( - newKeyNamed(ECDSA_KEY).shape(SECP_256K1_SHAPE), - newKeyNamed(MULTI_KEY), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(OWNER).balance(100 * ONE_HUNDRED_HBARS), - tokenCreate(FUNGIBLE_TOKEN) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(5) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .exposingCreatedIdTo(id -> tokenAddr.set( - HapiPropertySource.asHexedSolidityAddress(HapiPropertySource.asToken(id)))), - uploadInitCode(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), - contractCreate(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), - tokenAssociate(OWNER, List.of(FUNGIBLE_TOKEN)), - cryptoTransfer(moving(5, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, OWNER))) - .when(withOpContext((spec, opLog) -> { - final var ecdsaKey = spec.registry().getKey(ECDSA_KEY); - final var tmp = ecdsaKey.getECDSASecp256K1().toByteArray(); - final var addressBytes = recoverAddressFromPubKey(tmp); - final var alias = ByteStringUtils.wrapUnsafely(addressBytes); - assert addressBytes != null; - allRunFor( - spec, - contractCall( - TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT, - "transferTokensCallNestedThenAgain", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN))), - new Address[] { - HapiParserUtil.asHeadlongAddress(asAddress( - spec.registry().getAccountID(OWNER))), - HapiParserUtil.asHeadlongAddress(addressBytes) - }, - new long[] {-2L, 2L}, - new long[] {-2L, 2L}) - .via(TRANSFER_TOKENS_TXN) - .gas(GAS_TO_OFFER) - .alsoSigningWithFullPrefix(OWNER) - .hasKnownStatus(SUCCESS), - getAliasedAccountInfo(ECDSA_KEY) - .has(AccountInfoAsserts.accountWith() - .key(EMPTY_KEY) - .autoRenew(THREE_MONTHS_IN_SECONDS) - .receiverSigReq(false) - .memo(LAZY_MEMO)), - getAliasedAccountBalance(alias) - .hasTokenBalance(FUNGIBLE_TOKEN, 4) - .logged(), - childRecordsCheck( - TRANSFER_TOKENS_TXN, - SUCCESS, - recordWith().status(SUCCESS), - recordWith().status(SUCCESS), - recordWith().status(SUCCESS))); - })) - .then(); - } - - private HapiSpec transferNftLazyCreate() { - return defaultHapiSpec("transferNftLazyCreate") - .given( - newKeyNamed(ECDSA_KEY).shape(SECP_256K1_SHAPE), - newKeyNamed(MULTI_KEY), - cryptoCreate(OWNER).balance(100 * ONE_HUNDRED_HBARS), - cryptoCreate(SPENDER), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(NON_FUNGIBLE_TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .initialSupply(0) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY), - uploadInitCode(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), - contractCreate(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), - tokenAssociate(OWNER, NON_FUNGIBLE_TOKEN), - tokenAssociate(SPENDER, NON_FUNGIBLE_TOKEN), - tokenAssociate(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT, NON_FUNGIBLE_TOKEN), - mintToken(NON_FUNGIBLE_TOKEN, List.of(FIRST_META, SECOND_META)), - cryptoTransfer(movingUnique(NON_FUNGIBLE_TOKEN, 1L, 2L).between(TOKEN_TREASURY, OWNER))) - .when(withOpContext((spec, opLog) -> { - final var ecdsaKey = spec.registry().getKey(ECDSA_KEY); - final var tmp = ecdsaKey.getECDSASecp256K1().toByteArray(); - final var addressBytes = recoverAddressFromPubKey(tmp); - final var alias = ByteStringUtils.wrapUnsafely(addressBytes); - allRunFor( - spec, - contractCall( - TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT, - "transferNFTCallNestedThenAgain", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(NON_FUNGIBLE_TOKEN))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(OWNER))), - HapiParserUtil.asHeadlongAddress(addressBytes), - 1L, - 2L) - .via(TRANSFER_NFT_TXN) - .alsoSigningWithFullPrefix(OWNER) - .gas(GAS_TO_OFFER) - .hasKnownStatus(SUCCESS), - getAliasedAccountInfo(ECDSA_KEY) - .has(AccountInfoAsserts.accountWith() - .key(EMPTY_KEY) - .autoRenew(THREE_MONTHS_IN_SECONDS) - .receiverSigReq(false) - .memo(LAZY_MEMO)), - getAliasedAccountBalance(alias) - .hasTokenBalance(NON_FUNGIBLE_TOKEN, 2) - .logged(), - childRecordsCheck( - TRANSFER_NFT_TXN, - SUCCESS, - recordWith().status(SUCCESS), - recordWith().status(SUCCESS), - recordWith().status(SUCCESS))); - })) - .then(); - } - - private HapiSpec transferNftsLazyCreate() { - return defaultHapiSpec("transferNftsLazyCreate") - .given( - newKeyNamed(ECDSA_KEY).shape(SECP_256K1_SHAPE), - newKeyNamed(MULTI_KEY), - cryptoCreate(OWNER).balance(100 * ONE_HUNDRED_HBARS), - cryptoCreate(SPENDER), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(NON_FUNGIBLE_TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .initialSupply(0) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY), - uploadInitCode(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), - contractCreate(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), - tokenAssociate(OWNER, NON_FUNGIBLE_TOKEN), - tokenAssociate(SPENDER, NON_FUNGIBLE_TOKEN), - tokenAssociate(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT, NON_FUNGIBLE_TOKEN), - mintToken(NON_FUNGIBLE_TOKEN, List.of(FIRST_META, SECOND_META)), - cryptoTransfer(movingUnique(NON_FUNGIBLE_TOKEN, 1L, 2L).between(TOKEN_TREASURY, OWNER))) - .when(withOpContext((spec, opLog) -> { - final var ecdsaKey = spec.registry().getKey(ECDSA_KEY); - final var tmp = ecdsaKey.getECDSASecp256K1().toByteArray(); - final var addressBytes = recoverAddressFromPubKey(tmp); - final var alias = ByteStringUtils.wrapUnsafely(addressBytes); - assert addressBytes != null; - allRunFor( - spec, - contractCall( - TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT, - "transferNFTsCallNestedThenAgain", - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(NON_FUNGIBLE_TOKEN))), - new Address[] { - HapiParserUtil.asHeadlongAddress(asAddress( - spec.registry().getAccountID(OWNER))) - }, - new Address[] {HapiParserUtil.asHeadlongAddress(addressBytes)}, - new long[] {1L}, - new long[] {2L}) - .via(TRANSFER_NFTS_TXN) - .alsoSigningWithFullPrefix(OWNER) - .gas(GAS_TO_OFFER), - getAliasedAccountInfo(ECDSA_KEY) - .has(AccountInfoAsserts.accountWith() - .key(EMPTY_KEY) - .autoRenew(THREE_MONTHS_IN_SECONDS) - .receiverSigReq(false) - .memo(LAZY_MEMO)), - getAliasedAccountBalance(alias) - .hasTokenBalance(NON_FUNGIBLE_TOKEN, 2) - .logged(), - childRecordsCheck( - TRANSFER_NFTS_TXN, - SUCCESS, - recordWith().status(SUCCESS), - recordWith().status(SUCCESS), - recordWith().status(SUCCESS))); - })) - .then(); - } - private HapiSpec erc20TransferLazyCreate() { final AtomicReference tokenAddr = new AtomicReference<>(); diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/LazyCreateThroughPrecompileV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/LazyCreateThroughPrecompileV1SecurityModelSuite.java new file mode 100644 index 000000000000..aae9dee2afcb --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/LazyCreateThroughPrecompileV1SecurityModelSuite.java @@ -0,0 +1,1069 @@ +/* + * Copyright (C) 2022-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.node.app.service.evm.utils.EthSigsUtils.recoverAddressFromPubKey; +import static com.hedera.services.bdd.spec.HapiPropertySource.asHexedSolidityAddress; +import static com.hedera.services.bdd.spec.HapiPropertySource.asToken; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.AccountDetailsAsserts.accountDetailsWith; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.keys.KeyShape.ED25519; +import static com.hedera.services.bdd.spec.keys.SigControl.SECP256K1_ON; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountDetails; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAliasedAccountBalance; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAliasedAccountInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getLiteralAliasAccountInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoApproveAllowance; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fractionalFee; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.accountAmount; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.accountAmountAlias; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.nftTransferToAlias; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overriding; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.tokenTransferList; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.tokenTransferLists; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.transferList; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.headlongFromHexed; +import static com.hedera.services.bdd.suites.contract.Utils.mirrorAddrWith; +import static com.hedera.services.bdd.suites.contract.Utils.nCopiesOfSender; +import static com.hedera.services.bdd.suites.contract.Utils.nNonMirrorAddressFrom; +import static com.hedera.services.bdd.suites.contract.Utils.nonMirrorAddrWith; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.crypto.AutoAccountCreationSuite.LAZY_MEMO; +import static com.hedera.services.bdd.suites.file.FileUpdateSuite.CIVILIAN; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.NO_REMAINING_AUTOMATIC_ASSOCIATIONS; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.REVERTED_SUCCESS; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static com.swirlds.common.utility.CommonUtils.hex; + +import com.esaulpaugh.headlong.abi.Address; +import com.esaulpaugh.headlong.abi.Tuple; +import com.google.protobuf.ByteString; +import com.hedera.node.app.hapi.utils.ByteStringUtils; +import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; +import com.hedera.services.bdd.spec.HapiPropertySource; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.assertions.AccountInfoAsserts; +import com.hedera.services.bdd.spec.assertions.ContractInfoAsserts; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.spec.transactions.token.TokenMovement; +import com.hedera.services.bdd.spec.utilops.UtilVerbs; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.TokenSupplyType; +import com.hederahashgraph.api.proto.java.TokenType; +import java.math.BigInteger; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.OptionalLong; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.IntStream; +import java.util.stream.LongStream; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.tuweni.bytes.Bytes; +import org.junit.jupiter.api.Assertions; + +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse +public class LazyCreateThroughPrecompileV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(LazyCreateThroughPrecompileV1SecurityModelSuite.class); + private static final long GAS_TO_OFFER = 4_000_000L; + private static final String FUNGIBLE_TOKEN = "fungibleToken"; + private static final String NON_FUNGIBLE_TOKEN = "nonFungibleToken"; + private static final String MULTI_KEY = "purpose"; + private static final String OWNER = "owner"; + private static final String FIRST = "FIRST"; + public static final ByteString FIRST_META = ByteString.copyFrom(FIRST.getBytes(StandardCharsets.UTF_8)); + public static final ByteString SECOND_META = ByteString.copyFrom(FIRST.getBytes(StandardCharsets.UTF_8)); + private static final String TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT = "PrecompileAliasXfer"; + private static final String SPENDER = "spender"; + private static final String TRANSFER_TOKEN_TXN = "transferTokenTxn"; + private static final String TRANSFER_TOKENS_TXN = "transferTokensTxn"; + private static final String TRANSFER_NFT_TXN = "transferNFTTxn"; + private static final String TRANSFER_NFTS_TXN = "transferNFTsTxn"; + private static final String SENDER = "sender"; + private static final long TOTAL_SUPPLY = 1_000; + private static final String NFT_TOKEN = "Token_NFT"; + private static final String TRANSFER_TXN = "transferTxn"; + private static final String TRANSFER_TXN2 = "transferTxn2"; + private static final String NFT_KEY = "nftKey"; + private static final String AUTO_CREATION_MODES = "AutoCreationModes"; + private static final String CREATION_ATTEMPT = "creationAttempt"; + private static final String ONE_TIME = "ONE TIME"; + private static final String CREATE_DIRECTLY = "createDirectly"; + private static final String HTS_TRANSFER_FROM_CONTRACT = "HtsTransferFrom"; + private static final ByteString META1 = ByteStringUtils.wrapUnsafely("meta1".getBytes()); + private static final ByteString META2 = ByteStringUtils.wrapUnsafely("meta2".getBytes()); + private static final String TOKEN_TREASURY = "treasury"; + private static final String HTS_TRANSFER_FROM = "htsTransferFrom"; + private static final String BASE_APPROVE_TXN = "baseApproveTxn"; + private static final String TRANSFER_TXN3 = "transferTxn3"; + private static final Tuple[] EMPTY_TUPLE_ARRAY = new Tuple[] {}; + private static final String ECDSA_KEY = "abcdECDSAkey"; + + public static void main(String... args) { + new LazyCreateThroughPrecompileV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + protected Logger getResultsLogger() { + return log; + } + + @Override + public List getSpecsInSuite() { + return List.of( + cryptoTransferV1LazyCreate(), + cryptoTransferV2LazyCreate(), + transferTokenLazyCreate(), + transferTokensToEVMAddressAliasRevertAndTransferAgainSuccessfully(), + transferNftLazyCreate(), + transferNftsLazyCreate(), + htsTransferFromFungibleTokenLazyCreate(), + hollowAccountSigningReqsStillEnforced(), + revertedAutoCreationRollsBackEvenIfTopLevelSucceeds(), + canCreateMultipleHollows(), + canCreateViaFungibleWithFractionalFee()); + } + + HapiSpec hollowAccountSigningReqsStillEnforced() { + final var nft = "nft"; + final var nftKey = NFT_KEY; + final var creationAttempt = CREATION_ATTEMPT; + final var creationReversal = "creationReversal"; + final AtomicLong civilianId = new AtomicLong(); + final AtomicReference nftMirrorAddr = new AtomicReference<>(); + + return propertyPreservingHapiSpec("hollowAccountSigningReqsStillEnforced") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(nftKey), + uploadInitCode(AUTO_CREATION_MODES), + contractCreate(AUTO_CREATION_MODES), + cryptoCreate(CIVILIAN) + .keyShape(ED25519) + .exposingCreatedIdTo(id -> civilianId.set(id.getAccountNum())), + tokenCreate(nft) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .supplyKey(nftKey) + .initialSupply(0) + .treasury(CIVILIAN) + .exposingCreatedIdTo( + idLit -> nftMirrorAddr.set(asHexedSolidityAddress(asToken(idLit)))), + mintToken(nft, List.of(ByteString.copyFromUtf8(ONE_TIME)))) + .when(sourcing(() -> contractCall( + AUTO_CREATION_MODES, + CREATE_DIRECTLY, + headlongFromHexed(nftMirrorAddr.get()), + mirrorAddrWith(civilianId.get()), + nonMirrorAddrWith(civilianId.get() + 4_000_000), + 1L, + false) + .via(creationAttempt) + .gas(GAS_TO_OFFER) + .alsoSigningWithFullPrefix(CIVILIAN))) + .then( + getTxnRecord(creationAttempt).andAllChildRecords().logged(), + sourcing(() -> getLiteralAliasAccountInfo( + hex(Bytes.fromHexString(nonMirrorAddrWith(civilianId.get() + 4_000_000) + .toString()) + .toArray())) + .logged()), + // Now try to reverse the transfer and take the hollow account's NFT + sourcing(() -> contractCall( + AUTO_CREATION_MODES, + CREATE_DIRECTLY, + headlongFromHexed(nftMirrorAddr.get()), + nonMirrorAddrWith(civilianId.get() + 4_000_000), + mirrorAddrWith(civilianId.get()), + 1L, + false) + .via(creationReversal) + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), + sourcing(() -> childRecordsCheck( + creationReversal, + CONTRACT_REVERT_EXECUTED, + recordWith().status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)))); + } + + HapiSpec revertedAutoCreationRollsBackEvenIfTopLevelSucceeds() { + final var nft = "nft"; + final var nftKey = NFT_KEY; + final var creationAttempt = CREATION_ATTEMPT; + final AtomicLong civilianId = new AtomicLong(); + final AtomicReference nftMirrorAddr = new AtomicReference<>(); + + return propertyPreservingHapiSpec("revertedAutoCreationRollsBackEvenIfTopLevelSucceeds") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(nftKey), + uploadInitCode(AUTO_CREATION_MODES), + contractCreate(AUTO_CREATION_MODES), + cryptoCreate(CIVILIAN) + .keyShape(ED25519) + .exposingCreatedIdTo(id -> civilianId.set(id.getAccountNum())), + tokenCreate(nft) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .supplyKey(nftKey) + .initialSupply(0) + .treasury(CIVILIAN) + .exposingCreatedIdTo( + idLit -> nftMirrorAddr.set(asHexedSolidityAddress(asToken(idLit)))), + mintToken(nft, List.of(ByteString.copyFromUtf8(ONE_TIME)))) + .when(sourcing(() -> contractCall( + AUTO_CREATION_MODES, + "createIndirectlyRevertingAndRecover", + headlongFromHexed(nftMirrorAddr.get()), + mirrorAddrWith(civilianId.get()), + nonMirrorAddrWith(civilianId.get() + 8_000_000), + 1L) + .via(creationAttempt) + .gas(GAS_TO_OFFER) + .alsoSigningWithFullPrefix(CIVILIAN) + .hasKnownStatus(SUCCESS))) + .then(childRecordsCheck( + creationAttempt, + SUCCESS, + recordWith() + .status(REVERTED_SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS))))); + } + + @SuppressWarnings("java:S5960") + HapiSpec canCreateViaFungibleWithFractionalFee() { + final var ft = "ft"; + final var ftKey = NFT_KEY; + final var creationAttempt = CREATION_ATTEMPT; + final AtomicLong civilianId = new AtomicLong(); + final AtomicReference ftMirrorAddr = new AtomicReference<>(); + final long supply = 100_000_000; + + return propertyPreservingHapiSpec("canCreateViaFungibleWithFractionalFee") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ftKey), + uploadInitCode(AUTO_CREATION_MODES), + contractCreate(AUTO_CREATION_MODES), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(CIVILIAN) + .maxAutomaticTokenAssociations(1) + .keyShape(ED25519) + .exposingCreatedIdTo(id -> civilianId.set(id.getAccountNum())), + // If running locally, ensures the entity 0.0. is an account w/ EVM address + cryptoCreate("somebody").keyShape(SECP256K1_ON).withMatchingEvmAddress(), + tokenCreate(ft) + .tokenType(TokenType.FUNGIBLE_COMMON) + .supplyKey(ftKey) + .initialSupply(supply) + .withCustom(fractionalFee(1L, 20L, 0L, OptionalLong.of(0L), TOKEN_TREASURY)) + .treasury(TOKEN_TREASURY) + .exposingCreatedIdTo(idLit -> ftMirrorAddr.set(asHexedSolidityAddress(asToken(idLit)))), + cryptoTransfer(TokenMovement.moving(supply, ft).between(TOKEN_TREASURY, CIVILIAN))) + .when(withOpContext((spec, opLog) -> { + final var op = contractCall( + AUTO_CREATION_MODES, + "createDirectlyViaFungible", + headlongFromHexed(ftMirrorAddr.get()), + mirrorAddrWith(civilianId.get()), + nonMirrorAddrWith(123, civilianId.get() + 1), + supply) + .via(creationAttempt) + .gas(GAS_TO_OFFER) + .alsoSigningWithFullPrefix(CIVILIAN) + .hasKnownStatusFrom(SUCCESS, CONTRACT_REVERT_EXECUTED); + allRunFor(spec, op); + // If this ContractCall was converted to an EthereumTransaction, then it will + // not be tracking the last receipt and we can't do this extra logging; this is + // fine for now, since the _Eth spec hasn't been flaky + if (op.hasActualStatus() && op.getActualStatus() == CONTRACT_REVERT_EXECUTED) { + final var lookup = getTxnRecord(creationAttempt).andAllChildRecords(); + allRunFor(spec, lookup); + Assertions.fail("canCreateViaFungibleWithFractionalFee() failed w/ record " + + lookup.getResponseRecord() + + " and child records " + + lookup.getChildRecords()); + } + })) + .then(); + } + + HapiSpec canCreateMultipleHollows() { + final var n = 3; + final var nft = "nft"; + final var nftKey = NFT_KEY; + final var creationAttempt = CREATION_ATTEMPT; + final AtomicLong civilianId = new AtomicLong(); + final AtomicReference nftMirrorAddr = new AtomicReference<>(); + + return propertyPreservingHapiSpec("canCreateMultipleHollows") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(nftKey), + uploadInitCode(AUTO_CREATION_MODES), + contractCreate(AUTO_CREATION_MODES), + cryptoCreate(CIVILIAN) + .keyShape(ED25519) + .exposingCreatedIdTo(id -> civilianId.set(id.getAccountNum())), + tokenCreate(nft) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .supplyKey(nftKey) + .initialSupply(0) + .treasury(CIVILIAN) + .exposingCreatedIdTo( + idLit -> nftMirrorAddr.set(asHexedSolidityAddress(asToken(idLit)))), + mintToken( + nft, + IntStream.range(0, n) + .mapToObj(i -> ByteString.copyFromUtf8(ONE_TIME + i)) + .toList())) + .when(sourcing(() -> contractCall( + AUTO_CREATION_MODES, + "createSeveralDirectly", + headlongFromHexed(nftMirrorAddr.get()), + nCopiesOfSender(n, mirrorAddrWith(civilianId.get())), + nNonMirrorAddressFrom(n, civilianId.get() + 1_234_567_890L), + LongStream.iterate(1L, l -> l + 1).limit(n).toArray()) + .via(creationAttempt) + .gas(GAS_TO_OFFER) + .alsoSigningWithFullPrefix(CIVILIAN) + .hasKnownStatus(SUCCESS))) + .then(getTxnRecord(creationAttempt).andAllChildRecords().logged()); + } + + private HapiSpec cryptoTransferV1LazyCreate() { + final var NESTED_LAZY_PRECOMPILE_CONTRACT = "LazyPrecompileTransfers"; + final var FUNGIBLE_TOKEN_2 = "ftnt"; + return propertyPreservingHapiSpec("cryptoTransferV1LazyCreate") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(SENDER) + .balance(10 * ONE_HUNDRED_HBARS) + .key(MULTI_KEY) + .maxAutomaticTokenAssociations(5), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(TOTAL_SUPPLY) + .treasury(TOKEN_TREASURY), + tokenCreate(FUNGIBLE_TOKEN_2) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(TOTAL_SUPPLY) + .treasury(TOKEN_TREASURY), + tokenCreate(NFT_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasury(SENDER) + .initialSupply(0L) + .supplyKey(MULTI_KEY), + mintToken(NFT_TOKEN, List.of(META1, META2)), + newKeyNamed(ECDSA_KEY).shape(SECP_256K1_SHAPE), + cryptoTransfer(moving(500, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, SENDER)), + cryptoTransfer(moving(500, FUNGIBLE_TOKEN_2).between(TOKEN_TREASURY, SENDER)), + uploadInitCode(NESTED_LAZY_PRECOMPILE_CONTRACT), + contractCreate(NESTED_LAZY_PRECOMPILE_CONTRACT).maxAutomaticTokenAssociations(1), + getContractInfo(NESTED_LAZY_PRECOMPILE_CONTRACT) + .has(ContractInfoAsserts.contractWith().maxAutoAssociations(1)) + .logged()) + .when(withOpContext((spec, opLog) -> { + final var token = spec.registry().getTokenID(FUNGIBLE_TOKEN); + final var token2 = spec.registry().getTokenID(FUNGIBLE_TOKEN_2); + final var nftToken = spec.registry().getTokenID(NFT_TOKEN); + final var sender = spec.registry().getAccountID(SENDER); + final var ecdsaKey = spec.registry().getKey(ECDSA_KEY); + final var evmAddressBytes = ByteString.copyFrom(recoverAddressFromPubKey( + ecdsaKey.getECDSASecp256K1().toByteArray())); + final var amountToBeSent = 50L; + final var transferFn = "cryptoTransferV1LazyCreate"; + allRunFor( + spec, + contractCall( + NESTED_LAZY_PRECOMPILE_CONTRACT, + transferFn, + tokenTransferLists() + .withTokenTransferList( + tokenTransferList() + .forToken(token) + .withAccountAmounts( + accountAmount(sender, -amountToBeSent), + accountAmountAlias( + recoverAddressFromPubKey( + ecdsaKey.getECDSASecp256K1() + .toByteArray()), + amountToBeSent)) + .build(), + tokenTransferList() + .forToken(nftToken) + .withNftTransfers(nftTransferToAlias( + sender, + recoverAddressFromPubKey( + ecdsaKey.getECDSASecp256K1() + .toByteArray()), + 1L)) + .build()) + .build(), + tokenTransferLists() + .withTokenTransferList(tokenTransferList() + .forToken(token) + .withAccountAmounts( + accountAmount(sender, -amountToBeSent), + accountAmountAlias( + recoverAddressFromPubKey( + ecdsaKey.getECDSASecp256K1() + .toByteArray()), + amountToBeSent)) + .build()) + .build()) + .payingWith(GENESIS) + .via(TRANSFER_TXN) + .signedBy(GENESIS, MULTI_KEY) + .alsoSigningWithFullPrefix(MULTI_KEY) + .gas(GAS_TO_OFFER), + contractCall( + NESTED_LAZY_PRECOMPILE_CONTRACT, + transferFn, + tokenTransferLists() + .withTokenTransferList(tokenTransferList() + .forToken(token) + .withAccountAmounts( + accountAmount(sender, -1L), + accountAmountAlias( + recoverAddressFromPubKey( + ecdsaKey.getECDSASecp256K1() + .toByteArray()), + 1L)) + .build()) + .build(), + tokenTransferLists() + .withTokenTransferList(tokenTransferList() + .forToken(token) + .withAccountAmounts( + accountAmount(sender, -1L), + accountAmountAlias( + recoverAddressFromPubKey( + ecdsaKey.getECDSASecp256K1() + .toByteArray()), + 1L)) + .build()) + .build()) + .payingWith(GENESIS) + .signedBy(GENESIS, MULTI_KEY) + .alsoSigningWithFullPrefix(MULTI_KEY) + .via(TRANSFER_TXN2) + .gas(GAS_TO_OFFER), + contractCall( + NESTED_LAZY_PRECOMPILE_CONTRACT, + transferFn, + tokenTransferLists() + .withTokenTransferList(tokenTransferList() + .forToken(token2) + .withAccountAmounts( + accountAmount(sender, -1L), + accountAmountAlias( + recoverAddressFromPubKey( + ecdsaKey.getECDSASecp256K1() + .toByteArray()), + 1L)) + .build()) + .build(), + tokenTransferLists() + .withTokenTransferList(tokenTransferList() + .forToken(token2) + .withAccountAmounts( + accountAmount(sender, -1L), + accountAmountAlias( + recoverAddressFromPubKey( + ecdsaKey.getECDSASecp256K1() + .toByteArray()), + 1L)) + .build()) + .build()) + .payingWith(GENESIS) + .signedBy(GENESIS, MULTI_KEY) + .alsoSigningWithFullPrefix(MULTI_KEY) + .via(TRANSFER_TXN3) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED) + .gas(GAS_TO_OFFER), + childRecordsCheck( + TRANSFER_TXN, + SUCCESS, + recordWith().status(SUCCESS), + recordWith().status(SUCCESS), + recordWith().status(SUCCESS)), + childRecordsCheck( + TRANSFER_TXN2, + SUCCESS, + recordWith().status(SUCCESS), + recordWith().status(SUCCESS)), + childRecordsCheck( + TRANSFER_TXN3, + CONTRACT_REVERT_EXECUTED, + recordWith().status(NO_REMAINING_AUTOMATIC_ASSOCIATIONS)), + getAliasedAccountInfo(ECDSA_KEY) + .has(AccountInfoAsserts.accountWith() + .key(EMPTY_KEY) + .autoRenew(THREE_MONTHS_IN_SECONDS) + .receiverSigReq(false) + .memo(LAZY_MEMO)), + getAliasedAccountBalance(evmAddressBytes) + .hasTokenBalance(FUNGIBLE_TOKEN, amountToBeSent * 2 + 2) + .hasTokenBalance(NFT_TOKEN, 1) + .logged()); + })) + .then(); + } + + private HapiSpec cryptoTransferV2LazyCreate() { + final var NESTED_LAZY_PRECOMPILE_CONTRACT = "LazyPrecompileTransfersAtomic"; + final var FUNGIBLE_TOKEN_2 = "ftnt"; + final var INIT_BALANCE = 10 * ONE_HUNDRED_HBARS; + return propertyPreservingHapiSpec("cryptoTransferV2LazyCreate") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(SENDER) + .balance(INIT_BALANCE) + .key(MULTI_KEY) + .maxAutomaticTokenAssociations(5), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(TOTAL_SUPPLY) + .treasury(TOKEN_TREASURY), + tokenCreate(FUNGIBLE_TOKEN_2) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(TOTAL_SUPPLY) + .treasury(TOKEN_TREASURY), + tokenCreate(NFT_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasury(SENDER) + .initialSupply(0L) + .supplyKey(MULTI_KEY), + mintToken(NFT_TOKEN, List.of(META1, META2)), + newKeyNamed(ECDSA_KEY).shape(SECP_256K1_SHAPE), + cryptoTransfer(moving(500, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, SENDER)), + cryptoTransfer(moving(500, FUNGIBLE_TOKEN_2).between(TOKEN_TREASURY, SENDER)), + uploadInitCode(NESTED_LAZY_PRECOMPILE_CONTRACT), + contractCreate(NESTED_LAZY_PRECOMPILE_CONTRACT).maxAutomaticTokenAssociations(1), + getContractInfo(NESTED_LAZY_PRECOMPILE_CONTRACT) + .has(ContractInfoAsserts.contractWith().maxAutoAssociations(1)) + .logged()) + .when(withOpContext((spec, opLog) -> { + final var ecdsaKey = spec.registry().getKey(ECDSA_KEY); + final var tmp = ecdsaKey.getECDSASecp256K1().toByteArray(); + final var addressBytes = recoverAddressFromPubKey(tmp); + final var evmAddressBytes = ByteString.copyFrom(addressBytes); + final var token = spec.registry().getTokenID(FUNGIBLE_TOKEN); + final var token2 = spec.registry().getTokenID(FUNGIBLE_TOKEN_2); + final var nftToken = spec.registry().getTokenID(NFT_TOKEN); + final var sender = spec.registry().getAccountID(SENDER); + final var amountToBeSent = 50L; + + final var cryptoTransferV2LazyCreateFn = "cryptoTransferV2LazyCreate"; + allRunFor( + spec, + contractCall( + NESTED_LAZY_PRECOMPILE_CONTRACT, + cryptoTransferV2LazyCreateFn, + transferList() + .withAccountAmounts( + accountAmount(sender, -amountToBeSent, false), + UtilVerbs.accountAmountAlias( + addressBytes, amountToBeSent, false)) + .build(), + tokenTransferLists() + .withTokenTransferList( + tokenTransferList() + .forToken(token) + .withAccountAmounts( + accountAmount( + sender, -amountToBeSent, false), + UtilVerbs.accountAmountAlias( + addressBytes, + amountToBeSent, + false)) + .build(), + tokenTransferList() + .forToken(nftToken) + .withNftTransfers(UtilVerbs.nftTransferToAlias( + sender, addressBytes, 1L, false)) + .build()) + .build(), + transferList() + .withAccountAmounts( + accountAmount(sender, -amountToBeSent, false), + UtilVerbs.accountAmountAlias( + addressBytes, amountToBeSent, false)) + .build(), + tokenTransferLists() + .withTokenTransferList(tokenTransferList() + .forToken(token) + .withAccountAmounts( + accountAmount(sender, -amountToBeSent, false), + UtilVerbs.accountAmountAlias( + addressBytes, amountToBeSent, false)) + .build()) + .build()) + .payingWith(GENESIS) + .via(TRANSFER_TXN) + .signedBy(GENESIS, MULTI_KEY) + .alsoSigningWithFullPrefix(MULTI_KEY) + .gas(GAS_TO_OFFER), + contractCall( + NESTED_LAZY_PRECOMPILE_CONTRACT, + cryptoTransferV2LazyCreateFn, + transferList() + .withAccountAmounts( + accountAmount(sender, -amountToBeSent, false), + UtilVerbs.accountAmountAlias( + addressBytes, amountToBeSent, false)) + .build(), + EMPTY_TUPLE_ARRAY, + transferList() + .withAccountAmounts( + accountAmount(sender, -amountToBeSent, false), + UtilVerbs.accountAmountAlias( + addressBytes, amountToBeSent, false)) + .build(), + EMPTY_TUPLE_ARRAY) + .payingWith(GENESIS) + .signedBy(GENESIS, MULTI_KEY) + .alsoSigningWithFullPrefix(MULTI_KEY) + .via(TRANSFER_TXN2) + .gas(GAS_TO_OFFER), + contractCall( + NESTED_LAZY_PRECOMPILE_CONTRACT, + cryptoTransferV2LazyCreateFn, + transferList() + .withAccountAmounts(EMPTY_TUPLE_ARRAY) + .build(), + tokenTransferLists() + .withTokenTransferList(tokenTransferList() + .forToken(token2) + .withAccountAmounts( + accountAmount(sender, -amountToBeSent, false), + UtilVerbs.accountAmountAlias( + addressBytes, amountToBeSent, false)) + .build()) + .build(), + transferList() + .withAccountAmounts(EMPTY_TUPLE_ARRAY) + .build(), + EMPTY_TUPLE_ARRAY) + .payingWith(GENESIS) + .signedBy(GENESIS, MULTI_KEY) + .alsoSigningWithFullPrefix(MULTI_KEY) + .via(TRANSFER_TXN3) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED) + .gas(GAS_TO_OFFER), + childRecordsCheck( + TRANSFER_TXN, + SUCCESS, + recordWith().status(SUCCESS), + recordWith().status(SUCCESS), + recordWith().status(SUCCESS)), + childRecordsCheck( + TRANSFER_TXN2, + SUCCESS, + recordWith().status(SUCCESS), + recordWith().status(SUCCESS)), + childRecordsCheck( + TRANSFER_TXN3, + CONTRACT_REVERT_EXECUTED, + recordWith().status(NO_REMAINING_AUTOMATIC_ASSOCIATIONS)), + getAliasedAccountInfo(ECDSA_KEY) + .has(AccountInfoAsserts.accountWith() + .key(EMPTY_KEY) + .autoRenew(THREE_MONTHS_IN_SECONDS) + .receiverSigReq(false) + .memo(LAZY_MEMO)), + getAliasedAccountBalance(evmAddressBytes) + .hasTinyBars(4 * amountToBeSent) + .hasTokenBalance(FUNGIBLE_TOKEN, amountToBeSent * 2) + .hasTokenBalance(NFT_TOKEN, 1) + .logged()); + })) + .then(); + } + + private HapiSpec transferTokenLazyCreate() { + final AtomicReference tokenAddr = new AtomicReference<>(); + + return propertyPreservingHapiSpec("transferTokenLazyCreate") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ECDSA_KEY).shape(SECP_256K1_SHAPE), + newKeyNamed(MULTI_KEY), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(OWNER).balance(100 * ONE_HUNDRED_HBARS), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(5) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .exposingCreatedIdTo(id -> tokenAddr.set( + HapiPropertySource.asHexedSolidityAddress(HapiPropertySource.asToken(id)))), + uploadInitCode(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), + contractCreate(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), + tokenAssociate(OWNER, List.of(FUNGIBLE_TOKEN)), + cryptoTransfer(moving(5, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, OWNER))) + .when(withOpContext((spec, opLog) -> { + final var ecdsaKey = spec.registry().getKey(ECDSA_KEY); + final var tmp = ecdsaKey.getECDSASecp256K1().toByteArray(); + final var addressBytes = recoverAddressFromPubKey(tmp); + final var alias = ByteStringUtils.wrapUnsafely(addressBytes); + allRunFor( + spec, + contractCall( + TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT, + "transferTokenCallNestedThenAgain", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(OWNER))), + HapiParserUtil.asHeadlongAddress(addressBytes), + 2L, + 2L) + .via(TRANSFER_TOKEN_TXN) + .alsoSigningWithFullPrefix(OWNER) + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS), + getAliasedAccountInfo(ECDSA_KEY) + .has(AccountInfoAsserts.accountWith() + .key(EMPTY_KEY) + .autoRenew(THREE_MONTHS_IN_SECONDS) + .receiverSigReq(false) + .memo(LAZY_MEMO)), + getAliasedAccountBalance(alias) + .hasTokenBalance(FUNGIBLE_TOKEN, 4) + .logged(), + childRecordsCheck( + TRANSFER_TOKEN_TXN, + SUCCESS, + recordWith().status(SUCCESS), + recordWith().status(SUCCESS), + recordWith().status(SUCCESS))); + })) + .then(); + } + + private HapiSpec transferTokensToEVMAddressAliasRevertAndTransferAgainSuccessfully() { + final AtomicReference tokenAddr = new AtomicReference<>(); + + return propertyPreservingHapiSpec("transferTokensToEVMAddressAliasRevertAndTransferAgainSuccessfully") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ECDSA_KEY).shape(SECP_256K1_SHAPE), + newKeyNamed(MULTI_KEY), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(OWNER).balance(100 * ONE_HUNDRED_HBARS), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(5) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .exposingCreatedIdTo(id -> tokenAddr.set( + HapiPropertySource.asHexedSolidityAddress(HapiPropertySource.asToken(id)))), + uploadInitCode(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), + contractCreate(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), + tokenAssociate(OWNER, List.of(FUNGIBLE_TOKEN)), + cryptoTransfer(moving(5, FUNGIBLE_TOKEN).between(TOKEN_TREASURY, OWNER))) + .when(withOpContext((spec, opLog) -> { + final var ecdsaKey = spec.registry().getKey(ECDSA_KEY); + final var tmp = ecdsaKey.getECDSASecp256K1().toByteArray(); + final var addressBytes = recoverAddressFromPubKey(tmp); + final var alias = ByteStringUtils.wrapUnsafely(addressBytes); + assert addressBytes != null; + allRunFor( + spec, + contractCall( + TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT, + "transferTokensCallNestedThenAgain", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN))), + new Address[] { + HapiParserUtil.asHeadlongAddress(asAddress( + spec.registry().getAccountID(OWNER))), + HapiParserUtil.asHeadlongAddress(addressBytes) + }, + new long[] {-2L, 2L}, + new long[] {-2L, 2L}) + .via(TRANSFER_TOKENS_TXN) + .gas(GAS_TO_OFFER) + .alsoSigningWithFullPrefix(OWNER) + .hasKnownStatus(SUCCESS), + getAliasedAccountInfo(ECDSA_KEY) + .has(AccountInfoAsserts.accountWith() + .key(EMPTY_KEY) + .autoRenew(THREE_MONTHS_IN_SECONDS) + .receiverSigReq(false) + .memo(LAZY_MEMO)), + getAliasedAccountBalance(alias) + .hasTokenBalance(FUNGIBLE_TOKEN, 4) + .logged(), + childRecordsCheck( + TRANSFER_TOKENS_TXN, + SUCCESS, + recordWith().status(SUCCESS), + recordWith().status(SUCCESS), + recordWith().status(SUCCESS))); + })) + .then(); + } + + private HapiSpec transferNftLazyCreate() { + return propertyPreservingHapiSpec("transferNftLazyCreate") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ECDSA_KEY).shape(SECP_256K1_SHAPE), + newKeyNamed(MULTI_KEY), + cryptoCreate(OWNER).balance(100 * ONE_HUNDRED_HBARS), + cryptoCreate(SPENDER), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(NON_FUNGIBLE_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .initialSupply(0) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY), + uploadInitCode(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), + contractCreate(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), + tokenAssociate(OWNER, NON_FUNGIBLE_TOKEN), + tokenAssociate(SPENDER, NON_FUNGIBLE_TOKEN), + tokenAssociate(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT, NON_FUNGIBLE_TOKEN), + mintToken(NON_FUNGIBLE_TOKEN, List.of(FIRST_META, SECOND_META)), + cryptoTransfer(movingUnique(NON_FUNGIBLE_TOKEN, 1L, 2L).between(TOKEN_TREASURY, OWNER))) + .when(withOpContext((spec, opLog) -> { + final var ecdsaKey = spec.registry().getKey(ECDSA_KEY); + final var tmp = ecdsaKey.getECDSASecp256K1().toByteArray(); + final var addressBytes = recoverAddressFromPubKey(tmp); + final var alias = ByteStringUtils.wrapUnsafely(addressBytes); + allRunFor( + spec, + contractCall( + TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT, + "transferNFTCallNestedThenAgain", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(NON_FUNGIBLE_TOKEN))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(OWNER))), + HapiParserUtil.asHeadlongAddress(addressBytes), + 1L, + 2L) + .via(TRANSFER_NFT_TXN) + .alsoSigningWithFullPrefix(OWNER) + .gas(GAS_TO_OFFER) + .hasKnownStatus(SUCCESS), + getAliasedAccountInfo(ECDSA_KEY) + .has(AccountInfoAsserts.accountWith() + .key(EMPTY_KEY) + .autoRenew(THREE_MONTHS_IN_SECONDS) + .receiverSigReq(false) + .memo(LAZY_MEMO)), + getAliasedAccountBalance(alias) + .hasTokenBalance(NON_FUNGIBLE_TOKEN, 2) + .logged(), + childRecordsCheck( + TRANSFER_NFT_TXN, + SUCCESS, + recordWith().status(SUCCESS), + recordWith().status(SUCCESS), + recordWith().status(SUCCESS))); + })) + .then(); + } + + private HapiSpec transferNftsLazyCreate() { + return propertyPreservingHapiSpec("transferNftsLazyCreate") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ECDSA_KEY).shape(SECP_256K1_SHAPE), + newKeyNamed(MULTI_KEY), + cryptoCreate(OWNER).balance(100 * ONE_HUNDRED_HBARS), + cryptoCreate(SPENDER), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(NON_FUNGIBLE_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .initialSupply(0) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY), + uploadInitCode(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), + contractCreate(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT), + tokenAssociate(OWNER, NON_FUNGIBLE_TOKEN), + tokenAssociate(SPENDER, NON_FUNGIBLE_TOKEN), + tokenAssociate(TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT, NON_FUNGIBLE_TOKEN), + mintToken(NON_FUNGIBLE_TOKEN, List.of(FIRST_META, SECOND_META)), + cryptoTransfer(movingUnique(NON_FUNGIBLE_TOKEN, 1L, 2L).between(TOKEN_TREASURY, OWNER))) + .when(withOpContext((spec, opLog) -> { + final var ecdsaKey = spec.registry().getKey(ECDSA_KEY); + final var tmp = ecdsaKey.getECDSASecp256K1().toByteArray(); + final var addressBytes = recoverAddressFromPubKey(tmp); + final var alias = ByteStringUtils.wrapUnsafely(addressBytes); + assert addressBytes != null; + allRunFor( + spec, + contractCall( + TRANSFER_TO_ALIAS_PRECOMPILE_CONTRACT, + "transferNFTsCallNestedThenAgain", + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(NON_FUNGIBLE_TOKEN))), + new Address[] { + HapiParserUtil.asHeadlongAddress(asAddress( + spec.registry().getAccountID(OWNER))) + }, + new Address[] {HapiParserUtil.asHeadlongAddress(addressBytes)}, + new long[] {1L}, + new long[] {2L}) + .via(TRANSFER_NFTS_TXN) + .alsoSigningWithFullPrefix(OWNER) + .gas(GAS_TO_OFFER), + getAliasedAccountInfo(ECDSA_KEY) + .has(AccountInfoAsserts.accountWith() + .key(EMPTY_KEY) + .autoRenew(THREE_MONTHS_IN_SECONDS) + .receiverSigReq(false) + .memo(LAZY_MEMO)), + getAliasedAccountBalance(alias) + .hasTokenBalance(NON_FUNGIBLE_TOKEN, 2) + .logged(), + childRecordsCheck( + TRANSFER_NFTS_TXN, + SUCCESS, + recordWith().status(SUCCESS), + recordWith().status(SUCCESS), + recordWith().status(SUCCESS))); + })) + .then(); + } + + private HapiSpec htsTransferFromFungibleTokenLazyCreate() { + final var allowance = 10L; + final var successfulTransferFromTxn = "txn"; + return propertyPreservingHapiSpec("htsTransferFromFungibleTokenLazyCreate") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ECDSA_KEY).shape(SECP_256K1_SHAPE), + newKeyNamed(MULTI_KEY), + cryptoCreate(OWNER).balance(100 * ONE_HUNDRED_HBARS).maxAutomaticTokenAssociations(5), + tokenCreate(FUNGIBLE_TOKEN) + .tokenType(TokenType.FUNGIBLE_COMMON) + .supplyType(TokenSupplyType.FINITE) + .initialSupply(10L) + .maxSupply(1000L) + .supplyKey(MULTI_KEY) + .treasury(OWNER), + uploadInitCode(HTS_TRANSFER_FROM_CONTRACT), + contractCreate(HTS_TRANSFER_FROM_CONTRACT), + cryptoApproveAllowance() + .payingWith(DEFAULT_PAYER) + .addTokenAllowance(OWNER, FUNGIBLE_TOKEN, HTS_TRANSFER_FROM_CONTRACT, allowance) + .via(BASE_APPROVE_TXN) + .signedBy(DEFAULT_PAYER, OWNER) + .fee(ONE_HBAR), + getAccountDetails(OWNER) + .payingWith(GENESIS) + .has(accountDetailsWith() + .tokenAllowancesContaining( + FUNGIBLE_TOKEN, HTS_TRANSFER_FROM_CONTRACT, allowance))) + .when(withOpContext((spec, opLog) -> { + final var ecdsaKey = spec.registry().getKey(ECDSA_KEY); + final var tmp = ecdsaKey.getECDSASecp256K1().toByteArray(); + final var addressBytes = recoverAddressFromPubKey(tmp); + final ByteString alias = ByteStringUtils.wrapUnsafely(addressBytes); + allRunFor( + spec, + // transfer allowance/2 amount + contractCall( + HTS_TRANSFER_FROM_CONTRACT, + HTS_TRANSFER_FROM, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(OWNER))), + HapiParserUtil.asHeadlongAddress(addressBytes), + BigInteger.valueOf(allowance / 2)) + .gas(GAS_TO_OFFER) + .via(successfulTransferFromTxn) + .hasKnownStatus(SUCCESS), + childRecordsCheck( + successfulTransferFromTxn, + SUCCESS, + recordWith().status(SUCCESS).memo(LAZY_MEMO), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_TRANSFER_FROM) + .withStatus(SUCCESS)))), + getAliasedAccountInfo(ECDSA_KEY) + .logged() + .has(AccountInfoAsserts.accountWith() + .key(EMPTY_KEY) + .autoRenew(THREE_MONTHS_IN_SECONDS) + .receiverSigReq(false) + .memo(LAZY_MEMO)), + getAliasedAccountBalance(alias) + .hasTokenBalance(FUNGIBLE_TOKEN, allowance / 2) + .logged()); + })) + .then(); + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/MixedHTSPrecompileTestsSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/MixedHTSPrecompileTestsV1SecurityModelSuite.java similarity index 91% rename from hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/MixedHTSPrecompileTestsSuite.java rename to hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/MixedHTSPrecompileTestsV1SecurityModelSuite.java index 7fcc977dd682..28aeacd47139 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/MixedHTSPrecompileTestsSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/MixedHTSPrecompileTestsV1SecurityModelSuite.java @@ -17,7 +17,7 @@ package com.hedera.services.bdd.suites.contract.precompile; import static com.hedera.services.bdd.spec.HapiPropertySource.asTokenString; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; import static com.hedera.services.bdd.spec.keys.KeyShape.ED25519; @@ -36,8 +36,11 @@ import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overriding; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT; @@ -56,8 +59,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -public class MixedHTSPrecompileTestsSuite extends HapiSuite { - private static final Logger log = LogManager.getLogger(MixedHTSPrecompileTestsSuite.class); +public class MixedHTSPrecompileTestsV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(MixedHTSPrecompileTestsV1SecurityModelSuite.class); private static final long GAS_TO_OFFER = 4_000_000L; private static final long TOTAL_SUPPLY = 1_000; @@ -70,12 +73,12 @@ public class MixedHTSPrecompileTestsSuite extends HapiSuite { private static final String EXPLICIT_CREATE_RESULT = "Explicit create result is {}"; public static void main(String... args) { - new MixedHTSPrecompileTestsSuite().runSuiteAsync(); + new MixedHTSPrecompileTestsV1SecurityModelSuite().runSuiteSync(); } @Override public boolean canRunConcurrent() { - return true; + return false; } @Override @@ -91,8 +94,10 @@ private HapiSpec hscsPrec021TryCatchConstructOnlyRollsBackTheFailedPrecompile() final var outerContract = "AssociateTryCatch"; final var nestedContract = "CalledContract"; - return defaultHapiSpec("hscsPrec021TryCatchConstructOnlyRollsBackTheFailedPrecompile") + return propertyPreservingHapiSpec("hscsPrec021TryCatchConstructOnlyRollsBackTheFailedPrecompile") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), cryptoCreate(theAccount).balance(10 * ONE_HUNDRED_HBARS), cryptoCreate(TOKEN_TREASURY), tokenCreate(token) @@ -144,8 +149,10 @@ private HapiSpec createTokenWithFixedFeeThenTransferAndAssessFee() { final var TREASURY_KEY = "treasuryKey"; final var RECIPIENT_KEY = "recipientKey"; final var SECOND_RECIPIENT_KEY = "secondRecipientKey"; - return defaultHapiSpec("createTokenWithFixedFeeThenTransferAndAssessFee") + return propertyPreservingHapiSpec("createTokenWithFixedFeeThenTransferAndAssessFee") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), newKeyNamed(ED25519KEY).shape(ED25519), newKeyNamed(FEE_COLLECTOR_KEY), newKeyNamed(TREASURY_KEY), diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/PauseUnpauseTokenAccountPrecompileSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/PauseUnpauseTokenAccountPrecompileSuite.java index 153b135c3b6b..d189bfdd00b0 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/PauseUnpauseTokenAccountPrecompileSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/PauseUnpauseTokenAccountPrecompileSuite.java @@ -19,15 +19,11 @@ import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenDelete; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenUnpause; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; @@ -40,14 +36,9 @@ import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TOKEN_ID; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_PAUSE_KEY; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_WAS_DELETED; -import static com.hederahashgraph.api.proto.java.TokenPauseStatus.Paused; -import static com.hederahashgraph.api.proto.java.TokenPauseStatus.Unpaused; import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; -import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; import com.hedera.services.bdd.spec.HapiSpec; import com.hedera.services.bdd.suites.HapiSuite; @@ -62,20 +53,12 @@ public class PauseUnpauseTokenAccountPrecompileSuite extends HapiSuite { private static final Logger log = LogManager.getLogger(PauseUnpauseTokenAccountPrecompileSuite.class); public static final String PAUSE_UNPAUSE_CONTRACT = "PauseUnpauseTokenAccount"; - private static final String UNPAUSE_KEY = "UNPAUSE_KEY"; - - private static final String PAUSE_KEY = "PAUSE_KEY"; - private static final String ACCOUNT = "account"; public static final long INITIAL_BALANCE = 1_000_000_000L; private static final long GAS_TO_OFFER = 4_000_000L; public static final String PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME = "pauseTokenAccount"; public static final String UNPAUSE_TOKEN_ACCOUNT_FUNCTION_NAME = "unpauseTokenAccount"; - private static final String PAUSE_FUNGIBLE_TXN = "pauseFungibleTxn"; - private static final String UNPAUSE_FUNGIBLE_TXN = "unpauseFungibleTxn"; - private static final String PAUSE_NONFUNGIBLE_TXN = "pauseNonFungibleTxn"; - private static final String UNPAUSE_NONFUNGIBLE_TXN = "unpauseNonFungibleTxn"; private static final String INVALID_ADDRESS = "0x0000000000000000000000000000000000123456"; public static final String UNPAUSE_TX = "UnpauseTx"; public static final String PAUSE_TX = "PauseTx"; @@ -96,13 +79,7 @@ protected Logger getResultsLogger() { @Override public List getSpecsInSuite() { - return List.of( - pauseFungibleTokenHappyPath(), - unpauseFungibleTokenHappyPath(), - pauseNonFungibleTokenHappyPath(), - unpauseNonFungibleTokenHappyPath(), - noTokenIdReverts(), - noAccountKeyReverts()); + return List.of(noTokenIdReverts(), noAccountKeyReverts()); } private HapiSpec noTokenIdReverts() { @@ -201,236 +178,4 @@ private HapiSpec noAccountKeyReverts() { .contractCallResult( htsPrecompileResult().withStatus(TOKEN_HAS_NO_PAUSE_KEY))))); } - - HapiSpec pauseFungibleTokenHappyPath() { - final AtomicReference vanillaTokenID = new AtomicReference<>(); - return defaultHapiSpec("PauseFungibleTokenHappyPath") - .given( - newKeyNamed(MULTI_KEY), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(ACCOUNT).balance(INITIAL_BALANCE), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .pauseKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .initialSupply(1_000) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - uploadInitCode(PAUSE_UNPAUSE_CONTRACT), - contractCreate(PAUSE_UNPAUSE_CONTRACT)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - PAUSE_UNPAUSE_CONTRACT, - PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("pauseFungibleAccountDoesNotOwnPauseKeyFailingTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - cryptoUpdate(ACCOUNT).key(MULTI_KEY), - contractCall( - PAUSE_UNPAUSE_CONTRACT, - PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via(PAUSE_FUNGIBLE_TXN) - .gas(GAS_TO_OFFER), - getTokenInfo(VANILLA_TOKEN).hasPauseStatus(Paused), - tokenUnpause(VANILLA_TOKEN), - tokenDelete(VANILLA_TOKEN), - contractCall( - PAUSE_UNPAUSE_CONTRACT, - PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("pauseFungibleAccountIsDeletedFailingTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)))) - .then( - childRecordsCheck( - "pauseFungibleAccountDoesNotOwnPauseKeyFailingTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_SIGNATURE) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), - childRecordsCheck( - "pauseFungibleAccountIsDeletedFailingTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(TOKEN_WAS_DELETED) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(TOKEN_WAS_DELETED))))); - } - - HapiSpec unpauseFungibleTokenHappyPath() { - final AtomicReference vanillaTokenID = new AtomicReference<>(); - return defaultHapiSpec("UnpauseFungibleTokenHappyPath") - .given( - newKeyNamed(UNPAUSE_KEY), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(ACCOUNT).balance(INITIAL_BALANCE), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .pauseKey(UNPAUSE_KEY) - .initialSupply(1_000) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - uploadInitCode(PAUSE_UNPAUSE_CONTRACT), - contractCreate(PAUSE_UNPAUSE_CONTRACT)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - PAUSE_UNPAUSE_CONTRACT, - UNPAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("unpauseFungibleAccountDoesNotOwnPauseKeyFailingTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - cryptoUpdate(ACCOUNT).key(UNPAUSE_KEY), - contractCall( - PAUSE_UNPAUSE_CONTRACT, - UNPAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via(UNPAUSE_FUNGIBLE_TXN) - .gas(GAS_TO_OFFER)))) - .then( - childRecordsCheck( - "unpauseFungibleAccountDoesNotOwnPauseKeyFailingTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_SIGNATURE) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), - getTokenInfo(VANILLA_TOKEN).hasPauseStatus(Unpaused)); - } - - HapiSpec pauseNonFungibleTokenHappyPath() { - final AtomicReference vanillaTokenID = new AtomicReference<>(); - return defaultHapiSpec("PauseNonFungibleTokenHappyPath") - .given( - newKeyNamed(MULTI_KEY), - newKeyNamed(PAUSE_KEY), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(ACCOUNT).balance(INITIAL_BALANCE), - tokenCreate(VANILLA_TOKEN) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .pauseKey(PAUSE_KEY) - .supplyKey(MULTI_KEY) - .initialSupply(0) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - uploadInitCode(PAUSE_UNPAUSE_CONTRACT), - contractCreate(PAUSE_UNPAUSE_CONTRACT)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - PAUSE_UNPAUSE_CONTRACT, - PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("pauseNonFungibleAccountDoesNotOwnPauseKeyFailingTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - cryptoUpdate(ACCOUNT).key(MULTI_KEY).key(PAUSE_KEY), - contractCall( - PAUSE_UNPAUSE_CONTRACT, - PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via(PAUSE_NONFUNGIBLE_TXN) - .gas(GAS_TO_OFFER), - getTokenInfo(VANILLA_TOKEN).hasPauseStatus(Paused), - tokenUnpause(VANILLA_TOKEN), - tokenDelete(VANILLA_TOKEN), - contractCall( - PAUSE_UNPAUSE_CONTRACT, - PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("pauseNonFungibleAccountIsDeletedFailingTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)))) - .then( - childRecordsCheck( - "pauseNonFungibleAccountDoesNotOwnPauseKeyFailingTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_SIGNATURE) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), - childRecordsCheck( - "pauseNonFungibleAccountIsDeletedFailingTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(TOKEN_WAS_DELETED) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(TOKEN_WAS_DELETED))))); - } - - HapiSpec unpauseNonFungibleTokenHappyPath() { - final AtomicReference vanillaTokenID = new AtomicReference<>(); - return defaultHapiSpec("UnpauseNonFungibleTokenHappyPath") - .given( - newKeyNamed(MULTI_KEY), - newKeyNamed(UNPAUSE_KEY), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(ACCOUNT).balance(INITIAL_BALANCE), - tokenCreate(VANILLA_TOKEN) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(TOKEN_TREASURY) - .pauseKey(UNPAUSE_KEY) - .supplyKey(MULTI_KEY) - .initialSupply(0) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - uploadInitCode(PAUSE_UNPAUSE_CONTRACT), - contractCreate(PAUSE_UNPAUSE_CONTRACT)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - PAUSE_UNPAUSE_CONTRACT, - UNPAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via("unpauseNonFungibleAccountDoesNotOwnPauseKeyFailingTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - cryptoUpdate(ACCOUNT).key(UNPAUSE_KEY), - contractCall( - PAUSE_UNPAUSE_CONTRACT, - UNPAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, - asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .via(UNPAUSE_NONFUNGIBLE_TXN) - .gas(GAS_TO_OFFER)))) - .then( - childRecordsCheck( - "unpauseNonFungibleAccountDoesNotOwnPauseKeyFailingTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_SIGNATURE) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), - getTokenInfo(VANILLA_TOKEN).hasPauseStatus(Unpaused)); - } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/PauseUnpauseTokenAccountPrecompileV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/PauseUnpauseTokenAccountPrecompileV1SecurityModelSuite.java new file mode 100644 index 000000000000..0411d15f7387 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/PauseUnpauseTokenAccountPrecompileV1SecurityModelSuite.java @@ -0,0 +1,362 @@ +/* + * Copyright (C) 2021-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenDelete; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenUnpause; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overridingTwo; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asHexedAddress; +import static com.hedera.services.bdd.suites.contract.Utils.asToken; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.MULTI_KEY; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_WAS_DELETED; +import static com.hederahashgraph.api.proto.java.TokenPauseStatus.Paused; +import static com.hederahashgraph.api.proto.java.TokenPauseStatus.Unpaused; +import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; +import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; + +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.TokenID; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse +public class PauseUnpauseTokenAccountPrecompileV1SecurityModelSuite extends HapiSuite { + private static final Logger log = + LogManager.getLogger(PauseUnpauseTokenAccountPrecompileV1SecurityModelSuite.class); + public static final String PAUSE_UNPAUSE_CONTRACT = "PauseUnpauseTokenAccount"; + + private static final String UNPAUSE_KEY = "UNPAUSE_KEY"; + + private static final String PAUSE_KEY = "PAUSE_KEY"; + + private static final String ACCOUNT = "account"; + + public static final long INITIAL_BALANCE = 1_000_000_000L; + private static final long GAS_TO_OFFER = 4_000_000L; + public static final String PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME = "pauseTokenAccount"; + public static final String UNPAUSE_TOKEN_ACCOUNT_FUNCTION_NAME = "unpauseTokenAccount"; + private static final String PAUSE_FUNGIBLE_TXN = "pauseFungibleTxn"; + private static final String UNPAUSE_FUNGIBLE_TXN = "unpauseFungibleTxn"; + private static final String PAUSE_NONFUNGIBLE_TXN = "pauseNonFungibleTxn"; + private static final String UNPAUSE_NONFUNGIBLE_TXN = "unpauseNonFungibleTxn"; + public static final String UNPAUSE_TX = "UnpauseTx"; + public static final String PAUSE_TX = "PauseTx"; + + public static void main(String... args) { + new PauseUnpauseTokenAccountPrecompileV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + protected Logger getResultsLogger() { + return log; + } + + @Override + public List getSpecsInSuite() { + return List.of( + pauseFungibleTokenHappyPath(), + unpauseFungibleTokenHappyPath(), + pauseNonFungibleTokenHappyPath(), + unpauseNonFungibleTokenHappyPath()); + } + + HapiSpec pauseFungibleTokenHappyPath() { + final AtomicReference vanillaTokenID = new AtomicReference<>(); + return propertyPreservingHapiSpec("PauseFungibleTokenHappyPath") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,TokenCreate,TokenDelete,TokenPause,TokenUnpause", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(ACCOUNT).balance(INITIAL_BALANCE), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .pauseKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .initialSupply(1_000) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + uploadInitCode(PAUSE_UNPAUSE_CONTRACT), + contractCreate(PAUSE_UNPAUSE_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + PAUSE_UNPAUSE_CONTRACT, + PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("pauseFungibleAccountDoesNotOwnPauseKeyFailingTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + cryptoUpdate(ACCOUNT).key(MULTI_KEY), + contractCall( + PAUSE_UNPAUSE_CONTRACT, + PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via(PAUSE_FUNGIBLE_TXN) + .gas(GAS_TO_OFFER), + getTokenInfo(VANILLA_TOKEN).hasPauseStatus(Paused), + tokenUnpause(VANILLA_TOKEN), + tokenDelete(VANILLA_TOKEN), + contractCall( + PAUSE_UNPAUSE_CONTRACT, + PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("pauseFungibleAccountIsDeletedFailingTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)))) + .then( + childRecordsCheck( + "pauseFungibleAccountDoesNotOwnPauseKeyFailingTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_SIGNATURE) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), + childRecordsCheck( + "pauseFungibleAccountIsDeletedFailingTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(TOKEN_WAS_DELETED) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(TOKEN_WAS_DELETED))))); + } + + HapiSpec unpauseFungibleTokenHappyPath() { + final AtomicReference vanillaTokenID = new AtomicReference<>(); + return propertyPreservingHapiSpec("UnpauseFungibleTokenHappyPath") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,TokenCreate,TokenDelete,TokenPause,TokenUnpause", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(UNPAUSE_KEY), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(ACCOUNT).balance(INITIAL_BALANCE), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .pauseKey(UNPAUSE_KEY) + .initialSupply(1_000) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + uploadInitCode(PAUSE_UNPAUSE_CONTRACT), + contractCreate(PAUSE_UNPAUSE_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + PAUSE_UNPAUSE_CONTRACT, + UNPAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("unpauseFungibleAccountDoesNotOwnPauseKeyFailingTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + cryptoUpdate(ACCOUNT).key(UNPAUSE_KEY), + contractCall( + PAUSE_UNPAUSE_CONTRACT, + UNPAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via(UNPAUSE_FUNGIBLE_TXN) + .gas(GAS_TO_OFFER)))) + .then( + childRecordsCheck( + "unpauseFungibleAccountDoesNotOwnPauseKeyFailingTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_SIGNATURE) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), + getTokenInfo(VANILLA_TOKEN).hasPauseStatus(Unpaused)); + } + + HapiSpec pauseNonFungibleTokenHappyPath() { + final AtomicReference vanillaTokenID = new AtomicReference<>(); + return propertyPreservingHapiSpec("PauseNonFungibleTokenHappyPath") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,TokenCreate,TokenDelete,TokenPause,TokenUnpause", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + newKeyNamed(PAUSE_KEY), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(ACCOUNT).balance(INITIAL_BALANCE), + tokenCreate(VANILLA_TOKEN) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .pauseKey(PAUSE_KEY) + .supplyKey(MULTI_KEY) + .initialSupply(0) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + uploadInitCode(PAUSE_UNPAUSE_CONTRACT), + contractCreate(PAUSE_UNPAUSE_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + PAUSE_UNPAUSE_CONTRACT, + PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("pauseNonFungibleAccountDoesNotOwnPauseKeyFailingTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + cryptoUpdate(ACCOUNT).key(MULTI_KEY).key(PAUSE_KEY), + contractCall( + PAUSE_UNPAUSE_CONTRACT, + PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via(PAUSE_NONFUNGIBLE_TXN) + .gas(GAS_TO_OFFER), + getTokenInfo(VANILLA_TOKEN).hasPauseStatus(Paused), + tokenUnpause(VANILLA_TOKEN), + tokenDelete(VANILLA_TOKEN), + contractCall( + PAUSE_UNPAUSE_CONTRACT, + PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("pauseNonFungibleAccountIsDeletedFailingTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)))) + .then( + childRecordsCheck( + "pauseNonFungibleAccountDoesNotOwnPauseKeyFailingTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_SIGNATURE) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), + childRecordsCheck( + "pauseNonFungibleAccountIsDeletedFailingTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(TOKEN_WAS_DELETED) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(TOKEN_WAS_DELETED))))); + } + + HapiSpec unpauseNonFungibleTokenHappyPath() { + final AtomicReference vanillaTokenID = new AtomicReference<>(); + return propertyPreservingHapiSpec("UnpauseNonFungibleTokenHappyPath") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,TokenCreate,TokenDelete,TokenPause,TokenUnpause", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + newKeyNamed(UNPAUSE_KEY), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(ACCOUNT).balance(INITIAL_BALANCE), + tokenCreate(VANILLA_TOKEN) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(TOKEN_TREASURY) + .pauseKey(UNPAUSE_KEY) + .supplyKey(MULTI_KEY) + .initialSupply(0) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + uploadInitCode(PAUSE_UNPAUSE_CONTRACT), + contractCreate(PAUSE_UNPAUSE_CONTRACT)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + PAUSE_UNPAUSE_CONTRACT, + UNPAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via("unpauseNonFungibleAccountDoesNotOwnPauseKeyFailingTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + cryptoUpdate(ACCOUNT).key(UNPAUSE_KEY), + contractCall( + PAUSE_UNPAUSE_CONTRACT, + UNPAUSE_TOKEN_ACCOUNT_FUNCTION_NAME, + asHeadlongAddress(asHexedAddress(vanillaTokenID.get()))) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .via(UNPAUSE_NONFUNGIBLE_TXN) + .gas(GAS_TO_OFFER)))) + .then( + childRecordsCheck( + "unpauseNonFungibleAccountDoesNotOwnPauseKeyFailingTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_SIGNATURE) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), + getTokenInfo(VANILLA_TOKEN).hasPauseStatus(Unpaused)); + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/SigningReqsSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/SigningReqsSuite.java index 97d5fbd72892..694794824dec 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/SigningReqsSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/SigningReqsSuite.java @@ -16,27 +16,21 @@ package com.hedera.services.bdd.suites.contract.precompile; -import static com.hedera.services.bdd.spec.HapiPropertySource.asToken; import static com.hedera.services.bdd.spec.HapiPropertySource.asTokenString; import static com.hedera.services.bdd.spec.HapiPropertySource.idAsHeadlongAddress; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; import static com.hedera.services.bdd.spec.keys.KeyShape.*; import static com.hedera.services.bdd.spec.queries.QueryVerbs.*; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.*; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.*; -import static com.hedera.services.bdd.suites.contract.precompile.WipeTokenAccountPrecompileSuite.*; import static com.hedera.services.bdd.suites.file.FileUpdateSuite.*; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; import com.esaulpaugh.headlong.abi.Address; import com.hedera.services.bdd.spec.*; import com.hedera.services.bdd.spec.assertions.*; import com.hedera.services.bdd.spec.keys.KeyShape; -import com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoCreate; import com.hedera.services.bdd.spec.utilops.CustomSpecAssert; import com.hedera.services.bdd.suites.*; import com.hederahashgraph.api.proto.java.TokenID; @@ -58,7 +52,7 @@ public class SigningReqsSuite extends HapiSuite { private static final String LEGACY_ACTIVATIONS_PROP = "contracts.keys.legacyActivations"; public static final String AUTO_RENEW = "autoRenew"; - public static final String AR_KEY = "arKey"; + public static final int GAS_TO_OFFER = 1_000_000; public static void main(String... args) { new SigningReqsSuite().runSuiteAsync(); @@ -71,144 +65,7 @@ public boolean canRunConcurrent() { @Override public List getSpecsInSuite() { - return List.of( - newAutoRenewAccountMustSignUpdate(), - newTreasuryAccountMustSignUpdate(), - autoRenewAccountMustSignCreation(), - fractionalFeeCollectorMustSign(), - selfDenominatedFixedCollectorMustSign(), - autoRenewAccountCanUseLegacySigActivationIfConfigured()); - } - - @SuppressWarnings("java:S5960") - private HapiSpec selfDenominatedFixedCollectorMustSign() { - final var fcKey = "fcKey"; - final var arKey = AR_KEY; - final var feeCollector = "feeCollector"; - final var autoRenew = AUTO_RENEW; - final AtomicLong contractId = new AtomicLong(); - final AtomicReference
autoRenewAlias = new AtomicReference<>(); - final AtomicReference
feeCollectorAlias = new AtomicReference<>(); - final AtomicReference createdToken = new AtomicReference<>(); - - return defaultHapiSpec("SelfDenominatedFixedCollectorMustSign") - .given( - newKeyNamed(arKey).shape(SECP256K1), - newKeyNamed(fcKey).shape(SECP256K1), - cryptoCreate(CIVILIAN).balance(10L * ONE_HUNDRED_HBARS), - cryptoCreateWithExposingId(autoRenew, arKey, autoRenewAlias), - cryptoCreateWithExposingId(feeCollector, fcKey, feeCollectorAlias), - uploadInitCode(MINIMAL_CREATIONS_CONTRACT), - contractCreate(MINIMAL_CREATIONS_CONTRACT) - .gas(GAS_TO_OFFER) - .exposingNumTo(contractId::set)) - .when( - sourcing(() -> contractCall( - MINIMAL_CREATIONS_CONTRACT, - "makeRenewableTokenWithSelfDenominatedFixedFee", - autoRenewAlias.get(), - THREE_MONTHS_IN_SECONDS, - feeCollectorAlias.get()) - .via(FIRST_CREATE_TXN) - .gas(10L * GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .payingWith(CIVILIAN) - .alsoSigningWithFullPrefix(autoRenew) - .refusingEthConversion() - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), - sourcing(() -> contractCall( - MINIMAL_CREATIONS_CONTRACT, - "makeRenewableTokenWithSelfDenominatedFixedFee", - autoRenewAlias.get(), - THREE_MONTHS_IN_SECONDS, - feeCollectorAlias.get()) - .via(FIRST_CREATE_TXN) - .gas(10L * GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .payingWith(CIVILIAN) - .alsoSigningWithFullPrefix(autoRenew, feeCollector) - .refusingEthConversion())) - .then( - getTxnRecord(FIRST_CREATE_TXN) - .andAllChildRecords() - .exposingTokenCreationsTo(creations -> createdToken.set(creations.get(0))), - sourcing(() -> getTokenInfo(asTokenString(createdToken.get())) - .hasAutoRenewAccount(autoRenew) - .logged() - .hasCustom((spec, fees) -> { - assertEquals(1, fees.size()); - final var fee = fees.get(0); - assertTrue(fee.hasFixedFee()); - assertEquals( - createdToken.get(), - fee.getFixedFee().getDenominatingTokenId()); - assertEquals( - spec.registry().getAccountID(feeCollector), fee.getFeeCollectorAccountId()); - }))); - } - - @SuppressWarnings("java:S5960") - private HapiSpec fractionalFeeCollectorMustSign() { - final var fcKey = "fcKey"; - final var arKey = AR_KEY; - final var feeCollector = "feeCollector"; - final var autoRenew = AUTO_RENEW; - final AtomicLong contractId = new AtomicLong(); - final AtomicReference
autoRenewAlias = new AtomicReference<>(); - final AtomicReference
feeCollectorAlias = new AtomicReference<>(); - final AtomicReference createdToken = new AtomicReference<>(); - - return defaultHapiSpec("FractionalFeeCollectorMustSign") - .given( - newKeyNamed(arKey).shape(SECP256K1), - newKeyNamed(fcKey).shape(SECP256K1), - cryptoCreate(CIVILIAN).balance(10L * ONE_HUNDRED_HBARS), - cryptoCreateWithExposingId(autoRenew, arKey, autoRenewAlias), - cryptoCreateWithExposingId(feeCollector, fcKey, feeCollectorAlias), - uploadInitCode(MINIMAL_CREATIONS_CONTRACT), - contractCreate(MINIMAL_CREATIONS_CONTRACT) - .gas(GAS_TO_OFFER) - .exposingNumTo(contractId::set)) - .when( - sourcing(() -> contractCall( - MINIMAL_CREATIONS_CONTRACT, - "makeRenewableTokenWithFractionalFee", - autoRenewAlias.get(), - THREE_MONTHS_IN_SECONDS, - feeCollectorAlias.get()) - .via(FIRST_CREATE_TXN) - .gas(10L * GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .payingWith(CIVILIAN) - .alsoSigningWithFullPrefix(autoRenew) - .refusingEthConversion() - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), - sourcing(() -> contractCall( - MINIMAL_CREATIONS_CONTRACT, - "makeRenewableTokenWithFractionalFee", - autoRenewAlias.get(), - THREE_MONTHS_IN_SECONDS, - feeCollectorAlias.get()) - .via(FIRST_CREATE_TXN) - .gas(10L * GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .payingWith(CIVILIAN) - .alsoSigningWithFullPrefix(autoRenew, feeCollector) - .refusingEthConversion())) - .then( - getTxnRecord(FIRST_CREATE_TXN) - .andAllChildRecords() - .exposingTokenCreationsTo(creations -> createdToken.set(creations.get(0))), - sourcing(() -> getTokenInfo(asTokenString(createdToken.get())) - .hasAutoRenewAccount(autoRenew) - .logged() - .hasCustom((spec, fees) -> { - assertEquals(1, fees.size()); - final var fee = fees.get(0); - assertTrue(fee.hasFractionalFee()); - assertEquals( - spec.registry().getAccountID(feeCollector), fee.getFeeCollectorAccountId()); - }))); + return List.of(autoRenewAccountCanUseLegacySigActivationIfConfigured()); } private HapiSpec autoRenewAccountCanUseLegacySigActivationIfConfigured() { @@ -277,164 +134,6 @@ private HapiSpec autoRenewAccountCanUseLegacySigActivationIfConfigured() { getTokenInfo(asTokenString(createdToken.get())).hasAutoRenewAccount(autoRenew))); } - private HapiSpec autoRenewAccountMustSignCreation() { - final var arKey = AR_KEY; - final var autoRenew = AUTO_RENEW; - final AtomicReference
autoRenewAlias = new AtomicReference<>(); - final AtomicLong contractId = new AtomicLong(); - final AtomicReference createdToken = new AtomicReference<>(); - - return defaultHapiSpec("AutoRenewAccountMustSignCreation") - .given( - newKeyNamed(arKey).shape(SECP256K1), - cryptoCreate(CIVILIAN).balance(10L * ONE_HUNDRED_HBARS), - cryptoCreateWithExposingId(autoRenew, arKey, autoRenewAlias), - uploadInitCode(MINIMAL_CREATIONS_CONTRACT), - contractCreate(MINIMAL_CREATIONS_CONTRACT) - .exposingNumTo(contractId::set) - .gas(GAS_TO_OFFER)) - .when( - // Fails without the auto-renew account's full-prefix signature - sourcing(() -> contractCall( - MINIMAL_CREATIONS_CONTRACT, - "makeRenewableToken", - autoRenewAlias.get(), - THREE_MONTHS_IN_SECONDS) - .via(FIRST_CREATE_TXN) - .gas(10L * GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .payingWith(CIVILIAN) - .refusingEthConversion() - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), - // Succeeds with the full-prefix signature - sourcing(() -> contractCall( - MINIMAL_CREATIONS_CONTRACT, - "makeRenewableToken", - autoRenewAlias.get(), - THREE_MONTHS_IN_SECONDS) - .via(SECOND_CREATE_TXN) - .gas(10L * GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .payingWith(CIVILIAN) - .alsoSigningWithFullPrefix(arKey) - .refusingEthConversion())) - .then( - getTxnRecord(SECOND_CREATE_TXN) - .andAllChildRecords() - .exposingTokenCreationsTo(creations -> createdToken.set(creations.get(0))), - childRecordsCheck( - FIRST_CREATE_TXN, - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith() - .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)), - sourcing(() -> - getTokenInfo(asTokenString(createdToken.get())).hasAutoRenewAccount(autoRenew))); - } - - private HapiSpec newTreasuryAccountMustSignUpdate() { - final var ft = "fungibleToken"; - final var ntKey = "ntKey"; - final var updateTxn = "updateTxn"; - final var newTreasury = "newTreasury"; - final AtomicReference
tokenMirrorAddr = new AtomicReference<>(); - final AtomicReference
newTreasuryAliasAddr = new AtomicReference<>(); - - return defaultHapiSpec("NewTreasuryAccountMustSignUpdate") - .given( - newKeyNamed(ntKey).shape(SECP256K1), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(newTreasury) - // The new treasury must either already be associated or - // have open auto-association slots; it's therefore a bit - // odd that we require it to also sign, but this is the - // HAPI behavior, so we should be consistent for now - .maxAutomaticTokenAssociations(1) - .key(ntKey) - .exposingCreatedIdTo(id -> newTreasuryAliasAddr.set(idAsHeadlongAddress(id))), - cryptoCreate(CIVILIAN).balance(10L * ONE_HUNDRED_HBARS), - uploadInitCode(MINIMAL_CREATIONS_CONTRACT), - contractCreate(MINIMAL_CREATIONS_CONTRACT).gas(GAS_TO_OFFER), - tokenCreate(ft) - .adminKey(CIVILIAN) - .treasury(TOKEN_TREASURY) - .exposingCreatedIdTo(idLit -> tokenMirrorAddr.set(idAsHeadlongAddress(asToken(idLit))))) - .when(sourcing(() -> contractCall( - MINIMAL_CREATIONS_CONTRACT, - "updateTokenWithNewTreasury", - tokenMirrorAddr.get(), - newTreasuryAliasAddr.get()) - .via(updateTxn) - .gas(10L * GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .payingWith(CIVILIAN) - .refusingEthConversion() - .hasKnownStatus(CONTRACT_REVERT_EXECUTED))) - .then( - childRecordsCheck( - updateTxn, - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith() - .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)), - // Treasury account is unchanged - getTokenInfo(ft).hasTreasury(TOKEN_TREASURY)); - } - - private HapiSpec newAutoRenewAccountMustSignUpdate() { - final var ft = "fungibleToken"; - final var narKey = "narKey"; - final var adminKey = "adminKey"; - final var updateTxn = "updateTxn"; - final var newAutoRenewAccount = "newAutoRenewAccount"; - final AtomicReference
tokenMirrorAddr = new AtomicReference<>(); - final AtomicReference
newAutoRenewAliasAddr = new AtomicReference<>(); - - return defaultHapiSpec("newAutoRenewAccountMustSignUpdate") - .given( - newKeyNamed(adminKey), - newKeyNamed(narKey).shape(SECP256K1), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(newAutoRenewAccount) - .maxAutomaticTokenAssociations(2) - .key(narKey) - .exposingCreatedIdTo(id -> newAutoRenewAliasAddr.set(idAsHeadlongAddress(id))), - cryptoCreate(CIVILIAN).balance(10L * ONE_HUNDRED_HBARS), - uploadInitCode(MINIMAL_CREATIONS_CONTRACT), - contractCreate(MINIMAL_CREATIONS_CONTRACT).gas(GAS_TO_OFFER), - tokenCreate(ft) - .autoRenewAccount(TOKEN_TREASURY) - .autoRenewPeriod(THREE_MONTHS_IN_SECONDS - 3600L) - .adminKey(CIVILIAN) - .treasury(TOKEN_TREASURY) - .exposingCreatedIdTo(idLit -> tokenMirrorAddr.set(idAsHeadlongAddress(asToken(idLit))))) - .when(sourcing(() -> contractCall( - MINIMAL_CREATIONS_CONTRACT, - "updateTokenWithNewAutoRenewInfo", - tokenMirrorAddr.get(), - newAutoRenewAliasAddr.get(), - THREE_MONTHS_IN_SECONDS + 3600) - .via(updateTxn) - .gas(10L * GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .payingWith(CIVILIAN) - .refusingEthConversion() - .hasKnownStatus(CONTRACT_REVERT_EXECUTED))) - .then( - childRecordsCheck( - updateTxn, - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith() - .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)), - // Auto-renew account is unchanged - getTokenInfo(ft).hasAutoRenewAccount(TOKEN_TREASURY)); - } - - private static HapiCryptoCreate cryptoCreateWithExposingId( - String accountName, String keyName, AtomicReference
addressReference) { - return cryptoCreate(accountName) - .key(keyName) - .exposingCreatedIdTo(id -> addressReference.set(idAsHeadlongAddress(id))); - } - @Override protected Logger getResultsLogger() { return log; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/SigningReqsV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/SigningReqsV1SecurityModelSuite.java new file mode 100644 index 000000000000..3624995f718d --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/SigningReqsV1SecurityModelSuite.java @@ -0,0 +1,405 @@ +/* + * Copyright (C) 2022-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.spec.HapiPropertySource.asToken; +import static com.hedera.services.bdd.spec.HapiPropertySource.asTokenString; +import static com.hedera.services.bdd.spec.HapiPropertySource.idAsHeadlongAddress; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.keys.KeyShape.*; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.*; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.*; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.*; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.file.FileUpdateSuite.*; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.esaulpaugh.headlong.abi.Address; +import com.hedera.services.bdd.spec.*; +import com.hedera.services.bdd.spec.assertions.*; +import com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoCreate; +import com.hedera.services.bdd.suites.*; +import com.hederahashgraph.api.proto.java.TokenID; +import java.util.*; +import java.util.concurrent.atomic.*; +import org.apache.logging.log4j.*; + +// Some of the test cases cannot be converted to use eth calls, +// since they use admin keys, which are held by the txn payer. +// In the case of an eth txn, we revoke the payers keys and the txn would fail. +// The only way an eth account to create a token is the admin key to be of a contractId type. +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse +public class SigningReqsV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(SigningReqsV1SecurityModelSuite.class); + + private static final String FIRST_CREATE_TXN = "firstCreateTxn"; + private static final String SECOND_CREATE_TXN = "secondCreateTxn"; + private static final long DEFAULT_AMOUNT_TO_SEND = 20 * ONE_HBAR; + private static final String MINIMAL_CREATIONS_CONTRACT = "MinimalTokenCreations"; + + public static final String AUTO_RENEW = "autoRenew"; + public static final String AR_KEY = "arKey"; + public static final int GAS_TO_OFFER = 1_000_000; + + public static void main(String... args) { + new SigningReqsV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + public List getSpecsInSuite() { + return List.of( + newAutoRenewAccountMustSignUpdate(), + newTreasuryAccountMustSignUpdate(), + autoRenewAccountMustSignCreation(), + fractionalFeeCollectorMustSign(), + selfDenominatedFixedCollectorMustSign()); + } + + @SuppressWarnings("java:S5960") // "assertions should not be used in production code" - not production + private HapiSpec selfDenominatedFixedCollectorMustSign() { + final var fcKey = "fcKey"; + final var arKey = AR_KEY; + final var feeCollector = "feeCollector"; + final var autoRenew = AUTO_RENEW; + final AtomicLong contractId = new AtomicLong(); + final AtomicReference
autoRenewAlias = new AtomicReference<>(); + final AtomicReference
feeCollectorAlias = new AtomicReference<>(); + final AtomicReference createdToken = new AtomicReference<>(); + + return propertyPreservingHapiSpec("SelfDenominatedFixedCollectorMustSign") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenCreate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(arKey).shape(SECP256K1), + newKeyNamed(fcKey).shape(SECP256K1), + cryptoCreate(CIVILIAN).balance(10L * ONE_HUNDRED_HBARS), + cryptoCreateWithExposingId(autoRenew, arKey, autoRenewAlias), + cryptoCreateWithExposingId(feeCollector, fcKey, feeCollectorAlias), + uploadInitCode(MINIMAL_CREATIONS_CONTRACT), + contractCreate(MINIMAL_CREATIONS_CONTRACT) + .gas(GAS_TO_OFFER) + .exposingNumTo(contractId::set)) + .when( + sourcing(() -> contractCall( + MINIMAL_CREATIONS_CONTRACT, + "makeRenewableTokenWithSelfDenominatedFixedFee", + autoRenewAlias.get(), + THREE_MONTHS_IN_SECONDS, + feeCollectorAlias.get()) + .via(FIRST_CREATE_TXN) + .gas(10L * GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .payingWith(CIVILIAN) + .alsoSigningWithFullPrefix(autoRenew) + .refusingEthConversion() + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), + sourcing(() -> contractCall( + MINIMAL_CREATIONS_CONTRACT, + "makeRenewableTokenWithSelfDenominatedFixedFee", + autoRenewAlias.get(), + THREE_MONTHS_IN_SECONDS, + feeCollectorAlias.get()) + .via(FIRST_CREATE_TXN) + .gas(10L * GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .payingWith(CIVILIAN) + .alsoSigningWithFullPrefix(autoRenew, feeCollector) + .refusingEthConversion())) + .then( + getTxnRecord(FIRST_CREATE_TXN) + .andAllChildRecords() + .exposingTokenCreationsTo(creations -> createdToken.set(creations.get(0))), + sourcing(() -> getTokenInfo(asTokenString(createdToken.get())) + .hasAutoRenewAccount(autoRenew) + .logged() + .hasCustom((spec, fees) -> { + assertEquals(1, fees.size()); + final var fee = fees.get(0); + assertTrue(fee.hasFixedFee()); + assertEquals( + createdToken.get(), + fee.getFixedFee().getDenominatingTokenId()); + assertEquals( + spec.registry().getAccountID(feeCollector), fee.getFeeCollectorAccountId()); + }))); + } + + @SuppressWarnings("java:S5960") // "assertions should not be used in production code" - not production + private HapiSpec fractionalFeeCollectorMustSign() { + final var fcKey = "fcKey"; + final var arKey = AR_KEY; + final var feeCollector = "feeCollector"; + final var autoRenew = AUTO_RENEW; + final AtomicLong contractId = new AtomicLong(); + final AtomicReference
autoRenewAlias = new AtomicReference<>(); + final AtomicReference
feeCollectorAlias = new AtomicReference<>(); + final AtomicReference createdToken = new AtomicReference<>(); + + return propertyPreservingHapiSpec("FractionalFeeCollectorMustSign") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenCreate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(arKey).shape(SECP256K1), + newKeyNamed(fcKey).shape(SECP256K1), + cryptoCreate(CIVILIAN).balance(10L * ONE_HUNDRED_HBARS), + cryptoCreateWithExposingId(autoRenew, arKey, autoRenewAlias), + cryptoCreateWithExposingId(feeCollector, fcKey, feeCollectorAlias), + uploadInitCode(MINIMAL_CREATIONS_CONTRACT), + contractCreate(MINIMAL_CREATIONS_CONTRACT) + .gas(GAS_TO_OFFER) + .exposingNumTo(contractId::set)) + .when( + sourcing(() -> contractCall( + MINIMAL_CREATIONS_CONTRACT, + "makeRenewableTokenWithFractionalFee", + autoRenewAlias.get(), + THREE_MONTHS_IN_SECONDS, + feeCollectorAlias.get()) + .via(FIRST_CREATE_TXN) + .gas(10L * GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .payingWith(CIVILIAN) + .alsoSigningWithFullPrefix(autoRenew) + .refusingEthConversion() + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), + sourcing(() -> contractCall( + MINIMAL_CREATIONS_CONTRACT, + "makeRenewableTokenWithFractionalFee", + autoRenewAlias.get(), + THREE_MONTHS_IN_SECONDS, + feeCollectorAlias.get()) + .via(FIRST_CREATE_TXN) + .gas(10L * GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .payingWith(CIVILIAN) + .alsoSigningWithFullPrefix(autoRenew, feeCollector) + .refusingEthConversion())) + .then( + getTxnRecord(FIRST_CREATE_TXN) + .andAllChildRecords() + .exposingTokenCreationsTo(creations -> createdToken.set(creations.get(0))), + sourcing(() -> getTokenInfo(asTokenString(createdToken.get())) + .hasAutoRenewAccount(autoRenew) + .logged() + .hasCustom((spec, fees) -> { + assertEquals(1, fees.size()); + final var fee = fees.get(0); + assertTrue(fee.hasFractionalFee()); + assertEquals( + spec.registry().getAccountID(feeCollector), fee.getFeeCollectorAccountId()); + }))); + } + + private HapiSpec autoRenewAccountMustSignCreation() { + final var arKey = AR_KEY; + final var autoRenew = AUTO_RENEW; + final AtomicReference
autoRenewAlias = new AtomicReference<>(); + final AtomicLong contractId = new AtomicLong(); + final AtomicReference createdToken = new AtomicReference<>(); + + return propertyPreservingHapiSpec("AutoRenewAccountMustSignCreation") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenCreate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(arKey).shape(SECP256K1), + cryptoCreate(CIVILIAN).balance(10L * ONE_HUNDRED_HBARS), + cryptoCreateWithExposingId(autoRenew, arKey, autoRenewAlias), + uploadInitCode(MINIMAL_CREATIONS_CONTRACT), + contractCreate(MINIMAL_CREATIONS_CONTRACT) + .exposingNumTo(contractId::set) + .gas(GAS_TO_OFFER)) + .when( + // Fails without the auto-renew account's full-prefix signature + sourcing(() -> contractCall( + MINIMAL_CREATIONS_CONTRACT, + "makeRenewableToken", + autoRenewAlias.get(), + THREE_MONTHS_IN_SECONDS) + .via(FIRST_CREATE_TXN) + .gas(10L * GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .payingWith(CIVILIAN) + .refusingEthConversion() + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), + // Succeeds with the full-prefix signature + sourcing(() -> contractCall( + MINIMAL_CREATIONS_CONTRACT, + "makeRenewableToken", + autoRenewAlias.get(), + THREE_MONTHS_IN_SECONDS) + .via(SECOND_CREATE_TXN) + .gas(10L * GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .payingWith(CIVILIAN) + .alsoSigningWithFullPrefix(arKey) + .refusingEthConversion())) + .then( + getTxnRecord(SECOND_CREATE_TXN) + .andAllChildRecords() + .exposingTokenCreationsTo(creations -> createdToken.set(creations.get(0))), + childRecordsCheck( + FIRST_CREATE_TXN, + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)), + sourcing(() -> + getTokenInfo(asTokenString(createdToken.get())).hasAutoRenewAccount(autoRenew))); + } + + private HapiSpec newTreasuryAccountMustSignUpdate() { + final var ft = "fungibleToken"; + final var ntKey = "ntKey"; + final var updateTxn = "updateTxn"; + final var newTreasury = "newTreasury"; + final AtomicReference
tokenMirrorAddr = new AtomicReference<>(); + final AtomicReference
newTreasuryAliasAddr = new AtomicReference<>(); + + return propertyPreservingHapiSpec("NewTreasuryAccountMustSignUpdate") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenCreate,TokenUpdate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ntKey).shape(SECP256K1), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(newTreasury) + // The new treasury must either already be associated or + // have open auto-association slots; it's therefore a bit + // odd that we require it to also sign, but this is the + // HAPI behavior, so we should be consistent for now + .maxAutomaticTokenAssociations(1) + .key(ntKey) + .exposingCreatedIdTo(id -> newTreasuryAliasAddr.set(idAsHeadlongAddress(id))), + cryptoCreate(CIVILIAN).balance(10L * ONE_HUNDRED_HBARS), + uploadInitCode(MINIMAL_CREATIONS_CONTRACT), + contractCreate(MINIMAL_CREATIONS_CONTRACT).gas(GAS_TO_OFFER), + tokenCreate(ft) + .adminKey(CIVILIAN) + .treasury(TOKEN_TREASURY) + .exposingCreatedIdTo(idLit -> tokenMirrorAddr.set(idAsHeadlongAddress(asToken(idLit))))) + .when(sourcing(() -> contractCall( + MINIMAL_CREATIONS_CONTRACT, + "updateTokenWithNewTreasury", + tokenMirrorAddr.get(), + newTreasuryAliasAddr.get()) + .via(updateTxn) + .gas(10L * GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .payingWith(CIVILIAN) + .refusingEthConversion() + .hasKnownStatus(CONTRACT_REVERT_EXECUTED))) + .then( + childRecordsCheck( + updateTxn, + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)), + // Treasury account is unchanged + getTokenInfo(ft).hasTreasury(TOKEN_TREASURY)); + } + + private HapiSpec newAutoRenewAccountMustSignUpdate() { + final var ft = "fungibleToken"; + final var narKey = "narKey"; + final var adminKey = "adminKey"; + final var updateTxn = "updateTxn"; + final var newAutoRenewAccount = "newAutoRenewAccount"; + final AtomicReference
tokenMirrorAddr = new AtomicReference<>(); + final AtomicReference
newAutoRenewAliasAddr = new AtomicReference<>(); + + return propertyPreservingHapiSpec("newAutoRenewAccountMustSignUpdate") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenCreate,TokenUpdate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(adminKey), + newKeyNamed(narKey).shape(SECP256K1), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(newAutoRenewAccount) + .maxAutomaticTokenAssociations(2) + .key(narKey) + .exposingCreatedIdTo(id -> newAutoRenewAliasAddr.set(idAsHeadlongAddress(id))), + cryptoCreate(CIVILIAN).balance(10L * ONE_HUNDRED_HBARS), + uploadInitCode(MINIMAL_CREATIONS_CONTRACT), + contractCreate(MINIMAL_CREATIONS_CONTRACT).gas(GAS_TO_OFFER), + tokenCreate(ft) + .autoRenewAccount(TOKEN_TREASURY) + .autoRenewPeriod(THREE_MONTHS_IN_SECONDS - 3600L) + .adminKey(CIVILIAN) + .treasury(TOKEN_TREASURY) + .exposingCreatedIdTo(idLit -> tokenMirrorAddr.set(idAsHeadlongAddress(asToken(idLit))))) + .when(sourcing(() -> contractCall( + MINIMAL_CREATIONS_CONTRACT, + "updateTokenWithNewAutoRenewInfo", + tokenMirrorAddr.get(), + newAutoRenewAliasAddr.get(), + THREE_MONTHS_IN_SECONDS + 3600) + .via(updateTxn) + .gas(10L * GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .payingWith(CIVILIAN) + .refusingEthConversion() + .hasKnownStatus(CONTRACT_REVERT_EXECUTED))) + .then( + childRecordsCheck( + updateTxn, + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith() + .status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)), + // Auto-renew account is unchanged + getTokenInfo(ft).hasAutoRenewAccount(TOKEN_TREASURY)); + } + + private static HapiCryptoCreate cryptoCreateWithExposingId( + String accountName, String keyName, AtomicReference
addressReference) { + return cryptoCreate(accountName) + .key(keyName) + .exposingCreatedIdTo(id -> addressReference.set(idAsHeadlongAddress(id))); + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenExpiryInfoSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenExpiryInfoSuite.java index 9dba6cfa9f41..946b25b69acf 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenExpiryInfoSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenExpiryInfoSuite.java @@ -26,33 +26,23 @@ import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; -import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.Utils.asAddress; import static com.hedera.services.bdd.suites.contract.Utils.asToken; -import static com.hedera.services.bdd.suites.contract.precompile.WipeTokenAccountPrecompileSuite.GAS_TO_OFFER; import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_AUTORENEW_ACCOUNT; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_EXPIRATION_TIME; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_RENEWAL_PERIOD; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TOKEN_ID; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; -import static org.junit.jupiter.api.Assertions.assertEquals; import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; import com.hedera.services.bdd.spec.HapiSpec; -import com.hedera.services.bdd.spec.HapiSpecSetup; -import com.hedera.services.bdd.spec.keys.SigControl; import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; import com.hedera.services.bdd.suites.HapiSuite; import com.hedera.services.bdd.suites.utils.contracts.precompile.TokenKeyType; -import com.hederahashgraph.api.proto.java.AccountID; import com.hederahashgraph.api.proto.java.TokenID; import com.hederahashgraph.api.proto.java.TokenSupplyType; import java.util.List; @@ -65,16 +55,9 @@ public class TokenExpiryInfoSuite extends HapiSuite { private static final Logger log = LogManager.getLogger(TokenExpiryInfoSuite.class); private static final String TOKEN_EXPIRY_CONTRACT = "TokenExpiryContract"; private static final String AUTO_RENEW_ACCOUNT = "autoRenewAccount"; - private static final String UPDATED_AUTO_RENEW_ACCOUNT = "updatedAutoRenewAccount"; - private static final String INVALID_ADDRESS = "0x0000000000000000000000000000000000123456"; - private static final long DEFAULT_MAX_LIFETIME = - Long.parseLong(HapiSpecSetup.getDefaultNodeProps().get("entities.maxLifetime")); - public static final long MONTH_IN_SECONDS = 7_000_000L; private static final String ADMIN_KEY = TokenKeyType.ADMIN_KEY.name(); - public static final String UPDATE_EXPIRY_INFO_FOR_TOKEN = "updateExpiryInfoForToken"; public static final String GET_EXPIRY_INFO_FOR_TOKEN = "getExpiryInfoForToken"; - public static final String UPDATE_EXPIRY_INFO_FOR_TOKEN_AND_READ_LATEST_INFO = - "updateExpiryInfoForTokenAndReadLatestInfo"; + public static final int GAS_TO_OFFER = 1_000_000; public static void main(String... args) { new TokenExpiryInfoSuite().runSuiteAsync(); @@ -92,8 +75,7 @@ protected Logger getResultsLogger() { @Override public List getSpecsInSuite() { - return List.of( - getExpiryInfoForToken(), updateExpiryInfoForToken(), updateExpiryInfoForTokenAndReadLatestInfo()); + return List.of(getExpiryInfoForToken()); } private HapiSpec getExpiryInfoForToken() { @@ -172,207 +154,4 @@ private HapiSpec getExpiryInfoForToken() { THREE_MONTHS_IN_SECONDS))))); })); } - - @SuppressWarnings("java:S5960") - private HapiSpec updateExpiryInfoForToken() { - - final AtomicReference vanillaTokenID = new AtomicReference<>(); - final AtomicReference updatedAutoRenewAccountID = new AtomicReference<>(); - - return defaultHapiSpec("UpdateExpiryInfoForToken") - .given( - cryptoCreate(TOKEN_TREASURY).balance(0L), - cryptoCreate(AUTO_RENEW_ACCOUNT).balance(0L), - cryptoCreate(UPDATED_AUTO_RENEW_ACCOUNT) - .balance(0L) - .keyShape(SigControl.ED25519_ON) - .exposingCreatedIdTo(updatedAutoRenewAccountID::set), - newKeyNamed(ADMIN_KEY), - uploadInitCode(TOKEN_EXPIRY_CONTRACT), - contractCreate(TOKEN_EXPIRY_CONTRACT).gas(1_000_000L), - tokenCreate(VANILLA_TOKEN) - .supplyType(TokenSupplyType.FINITE) - .treasury(TOKEN_TREASURY) - .expiry(100) - .autoRenewAccount(AUTO_RENEW_ACCOUNT) - .autoRenewPeriod(THREE_MONTHS_IN_SECONDS) - .maxSupply(1000) - .initialSupply(500L) - .adminKey(ADMIN_KEY) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id)))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_EXPIRY_CONTRACT, - UPDATE_EXPIRY_INFO_FOR_TOKEN, - asHeadlongAddress(INVALID_ADDRESS), - DEFAULT_MAX_LIFETIME - 12_345L, - HapiParserUtil.asHeadlongAddress(asAddress(updatedAutoRenewAccountID.get())), - MONTH_IN_SECONDS) - .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_AUTO_RENEW_ACCOUNT) - .via("invalidTokenTxn") - .gas(GAS_TO_OFFER) - .payingWith(GENESIS) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_EXPIRY_CONTRACT, - UPDATE_EXPIRY_INFO_FOR_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - DEFAULT_MAX_LIFETIME - 12_345L, - HapiParserUtil.asHeadlongAddress(asAddress(updatedAutoRenewAccountID.get())), - MONTH_IN_SECONDS) - .via("invalidSignatureTxn") - .gas(GAS_TO_OFFER) - .payingWith(GENESIS) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_EXPIRY_CONTRACT, - UPDATE_EXPIRY_INFO_FOR_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - 100L, - HapiParserUtil.asHeadlongAddress(asAddress(updatedAutoRenewAccountID.get())), - MONTH_IN_SECONDS) - .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_AUTO_RENEW_ACCOUNT) - .via("invalidExpiryTxn") - .gas(GAS_TO_OFFER) - .payingWith(GENESIS) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_EXPIRY_CONTRACT, - UPDATE_EXPIRY_INFO_FOR_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - DEFAULT_MAX_LIFETIME - 12_345L, - asHeadlongAddress(INVALID_ADDRESS), - MONTH_IN_SECONDS) - .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_AUTO_RENEW_ACCOUNT) - .via("invalidAutoRenewAccountTxn") - .gas(GAS_TO_OFFER) - .payingWith(GENESIS) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_EXPIRY_CONTRACT, - UPDATE_EXPIRY_INFO_FOR_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - DEFAULT_MAX_LIFETIME - 12_345L, - HapiParserUtil.asHeadlongAddress(asAddress(updatedAutoRenewAccountID.get())), - 1L) - .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_AUTO_RENEW_ACCOUNT) - .via("invalidAutoRenewPeriodTxn") - .gas(GAS_TO_OFFER) - .payingWith(GENESIS) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_EXPIRY_CONTRACT, - UPDATE_EXPIRY_INFO_FOR_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - DEFAULT_MAX_LIFETIME - 12_345L, - HapiParserUtil.asHeadlongAddress(asAddress(updatedAutoRenewAccountID.get())), - MONTH_IN_SECONDS) - .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_AUTO_RENEW_ACCOUNT) - .via("updateExpiryTxn") - .gas(GAS_TO_OFFER) - .payingWith(GENESIS)))) - .then( - childRecordsCheck( - "invalidTokenTxn", - CONTRACT_REVERT_EXECUTED, - recordWith().status(INVALID_TOKEN_ID)), - childRecordsCheck( - "invalidSignatureTxn", - CONTRACT_REVERT_EXECUTED, - recordWith().status(INVALID_SIGNATURE)), - childRecordsCheck( - "invalidExpiryTxn", - CONTRACT_REVERT_EXECUTED, - recordWith().status(INVALID_EXPIRATION_TIME)), - childRecordsCheck( - "invalidAutoRenewAccountTxn", - CONTRACT_REVERT_EXECUTED, - recordWith().status(INVALID_AUTORENEW_ACCOUNT)), - childRecordsCheck( - "invalidAutoRenewPeriodTxn", - CONTRACT_REVERT_EXECUTED, - recordWith().status(INVALID_RENEWAL_PERIOD)), - withOpContext((spec, opLog) -> { - final var getTokenInfoQuery = getTokenInfo(VANILLA_TOKEN); - allRunFor(spec, getTokenInfoQuery); - final var expirySecond = getTokenInfoQuery - .getResponse() - .getTokenGetInfo() - .getTokenInfo() - .getExpiry() - .getSeconds(); - final var autoRenewAccount = getTokenInfoQuery - .getResponse() - .getTokenGetInfo() - .getTokenInfo() - .getAutoRenewAccount(); - final var autoRenewPeriod = getTokenInfoQuery - .getResponse() - .getTokenGetInfo() - .getTokenInfo() - .getAutoRenewPeriod() - .getSeconds(); - assertEquals(expirySecond, DEFAULT_MAX_LIFETIME - 12_345L); - assertEquals(autoRenewAccount, spec.registry().getAccountID(UPDATED_AUTO_RENEW_ACCOUNT)); - assertEquals(autoRenewPeriod, MONTH_IN_SECONDS); - })); - } - - private HapiSpec updateExpiryInfoForTokenAndReadLatestInfo() { - - final AtomicReference vanillaTokenID = new AtomicReference<>(); - final AtomicReference updatedAutoRenewAccountID = new AtomicReference<>(); - - return defaultHapiSpec("UpdateExpiryInfoForTokenAndReadLatestInfo") - .given( - cryptoCreate(TOKEN_TREASURY).balance(0L), - cryptoCreate(AUTO_RENEW_ACCOUNT).balance(0L), - cryptoCreate(UPDATED_AUTO_RENEW_ACCOUNT) - .keyShape(SigControl.ED25519_ON) - .balance(0L) - .exposingCreatedIdTo(updatedAutoRenewAccountID::set), - newKeyNamed(ADMIN_KEY), - uploadInitCode(TOKEN_EXPIRY_CONTRACT), - contractCreate(TOKEN_EXPIRY_CONTRACT).gas(1_000_000L), - tokenCreate(VANILLA_TOKEN) - .supplyType(TokenSupplyType.FINITE) - .treasury(TOKEN_TREASURY) - .expiry(100) - .autoRenewAccount(AUTO_RENEW_ACCOUNT) - .autoRenewPeriod(THREE_MONTHS_IN_SECONDS) - .maxSupply(1000) - .initialSupply(500L) - .adminKey(ADMIN_KEY) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id)))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_EXPIRY_CONTRACT, - UPDATE_EXPIRY_INFO_FOR_TOKEN_AND_READ_LATEST_INFO, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - DEFAULT_MAX_LIFETIME - 12_345L, - HapiParserUtil.asHeadlongAddress(asAddress(updatedAutoRenewAccountID.get())), - MONTH_IN_SECONDS) - .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_AUTO_RENEW_ACCOUNT) - .via("updateExpiryAndReadLatestInfoTxn") - .gas(GAS_TO_OFFER) - .payingWith(GENESIS)))) - .then(withOpContext((spec, opLog) -> allRunFor( - spec, - childRecordsCheck( - "updateExpiryAndReadLatestInfoTxn", - SUCCESS, - recordWith().status(SUCCESS), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_GET_TOKEN_EXPIRY_INFO) - .withStatus(SUCCESS) - .withExpiry( - DEFAULT_MAX_LIFETIME - 12_345L, - updatedAutoRenewAccountID.get(), - MONTH_IN_SECONDS))))))); - } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenExpiryInfoV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenExpiryInfoV1SecurityModelSuite.java new file mode 100644 index 000000000000..8e8b9a6a8134 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenExpiryInfoV1SecurityModelSuite.java @@ -0,0 +1,316 @@ +/* + * Copyright (C) 2021-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overridingTwo; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.asToken; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_AUTORENEW_ACCOUNT; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_EXPIRATION_TIME; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_RENEWAL_PERIOD; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.HapiSpecSetup; +import com.hedera.services.bdd.spec.keys.SigControl; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hedera.services.bdd.suites.utils.contracts.precompile.TokenKeyType; +import com.hederahashgraph.api.proto.java.AccountID; +import com.hederahashgraph.api.proto.java.TokenID; +import com.hederahashgraph.api.proto.java.TokenSupplyType; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class TokenExpiryInfoV1SecurityModelSuite extends HapiSuite { + + private static final Logger log = LogManager.getLogger(TokenExpiryInfoV1SecurityModelSuite.class); + private static final String TOKEN_EXPIRY_CONTRACT = "TokenExpiryContract"; + private static final String AUTO_RENEW_ACCOUNT = "autoRenewAccount"; + private static final String UPDATED_AUTO_RENEW_ACCOUNT = "updatedAutoRenewAccount"; + private static final String INVALID_ADDRESS = "0x0000000000000000000000000000000000123456"; + private static final long DEFAULT_MAX_LIFETIME = + Long.parseLong(HapiSpecSetup.getDefaultNodeProps().get("entities.maxLifetime")); + public static final long MONTH_IN_SECONDS = 7_000_000L; + private static final String ADMIN_KEY = TokenKeyType.ADMIN_KEY.name(); + public static final String UPDATE_EXPIRY_INFO_FOR_TOKEN = "updateExpiryInfoForToken"; + public static final String UPDATE_EXPIRY_INFO_FOR_TOKEN_AND_READ_LATEST_INFO = + "updateExpiryInfoForTokenAndReadLatestInfo"; + public static final int GAS_TO_OFFER = 1_000_000; + + public static void main(String... args) { + new TokenExpiryInfoV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return true; + } + + @Override + protected Logger getResultsLogger() { + return log; + } + + @Override + public List getSpecsInSuite() { + return List.of(updateExpiryInfoForToken(), updateExpiryInfoForTokenAndReadLatestInfo()); + } + + @SuppressWarnings({"java:S5960", "java:S1192" + }) // using `assertThat` in production code - except this isn't production code + private HapiSpec updateExpiryInfoForToken() { + + final AtomicReference vanillaTokenID = new AtomicReference<>(); + final AtomicReference updatedAutoRenewAccountID = new AtomicReference<>(); + + return propertyPreservingHapiSpec("updateExpiryInfoForToken") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,TokenCreate,TokenUpdate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(TOKEN_TREASURY).balance(0L), + cryptoCreate(AUTO_RENEW_ACCOUNT).balance(0L), + cryptoCreate(UPDATED_AUTO_RENEW_ACCOUNT) + .balance(0L) + .keyShape(SigControl.ED25519_ON) + .exposingCreatedIdTo(updatedAutoRenewAccountID::set), + newKeyNamed(ADMIN_KEY), + uploadInitCode(TOKEN_EXPIRY_CONTRACT), + contractCreate(TOKEN_EXPIRY_CONTRACT).gas(1_000_000L), + tokenCreate(VANILLA_TOKEN) + .supplyType(TokenSupplyType.FINITE) + .treasury(TOKEN_TREASURY) + .expiry(100) + .autoRenewAccount(AUTO_RENEW_ACCOUNT) + .autoRenewPeriod(THREE_MONTHS_IN_SECONDS) + .maxSupply(1000) + .initialSupply(500L) + .adminKey(ADMIN_KEY) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id)))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_EXPIRY_CONTRACT, + UPDATE_EXPIRY_INFO_FOR_TOKEN, + asHeadlongAddress(INVALID_ADDRESS), + DEFAULT_MAX_LIFETIME - 12_345L, + HapiParserUtil.asHeadlongAddress(asAddress(updatedAutoRenewAccountID.get())), + MONTH_IN_SECONDS) + .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_AUTO_RENEW_ACCOUNT) + .via("invalidTokenTxn") + .gas(GAS_TO_OFFER) + .payingWith(GENESIS) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_EXPIRY_CONTRACT, + UPDATE_EXPIRY_INFO_FOR_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + DEFAULT_MAX_LIFETIME - 12_345L, + HapiParserUtil.asHeadlongAddress(asAddress(updatedAutoRenewAccountID.get())), + MONTH_IN_SECONDS) + .via("invalidSignatureTxn") + .gas(GAS_TO_OFFER) + .payingWith(GENESIS) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_EXPIRY_CONTRACT, + UPDATE_EXPIRY_INFO_FOR_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + 100L, + HapiParserUtil.asHeadlongAddress(asAddress(updatedAutoRenewAccountID.get())), + MONTH_IN_SECONDS) + .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_AUTO_RENEW_ACCOUNT) + .via("invalidExpiryTxn") + .gas(GAS_TO_OFFER) + .payingWith(GENESIS) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_EXPIRY_CONTRACT, + UPDATE_EXPIRY_INFO_FOR_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + DEFAULT_MAX_LIFETIME - 12_345L, + asHeadlongAddress(INVALID_ADDRESS), + MONTH_IN_SECONDS) + .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_AUTO_RENEW_ACCOUNT) + .via("invalidAutoRenewAccountTxn") + .gas(GAS_TO_OFFER) + .payingWith(GENESIS) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_EXPIRY_CONTRACT, + UPDATE_EXPIRY_INFO_FOR_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + DEFAULT_MAX_LIFETIME - 12_345L, + HapiParserUtil.asHeadlongAddress(asAddress(updatedAutoRenewAccountID.get())), + 1L) + .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_AUTO_RENEW_ACCOUNT) + .via("invalidAutoRenewPeriodTxn") + .gas(GAS_TO_OFFER) + .payingWith(GENESIS) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_EXPIRY_CONTRACT, + UPDATE_EXPIRY_INFO_FOR_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + DEFAULT_MAX_LIFETIME - 12_345L, + HapiParserUtil.asHeadlongAddress(asAddress(updatedAutoRenewAccountID.get())), + MONTH_IN_SECONDS) + .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_AUTO_RENEW_ACCOUNT) + .via("updateExpiryTxn") + .gas(GAS_TO_OFFER) + .payingWith(GENESIS)))) + .then( + childRecordsCheck( + "invalidTokenTxn", + CONTRACT_REVERT_EXECUTED, + recordWith().status(INVALID_TOKEN_ID)), + childRecordsCheck( + "invalidSignatureTxn", + CONTRACT_REVERT_EXECUTED, + recordWith().status(INVALID_SIGNATURE)), + childRecordsCheck( + "invalidExpiryTxn", + CONTRACT_REVERT_EXECUTED, + recordWith().status(INVALID_EXPIRATION_TIME)), + childRecordsCheck( + "invalidAutoRenewAccountTxn", + CONTRACT_REVERT_EXECUTED, + recordWith().status(INVALID_AUTORENEW_ACCOUNT)), + childRecordsCheck( + "invalidAutoRenewPeriodTxn", + CONTRACT_REVERT_EXECUTED, + recordWith().status(INVALID_RENEWAL_PERIOD)), + withOpContext((spec, opLog) -> { + final var getTokenInfoQuery = getTokenInfo(VANILLA_TOKEN); + allRunFor(spec, getTokenInfoQuery); + final var expirySecond = getTokenInfoQuery + .getResponse() + .getTokenGetInfo() + .getTokenInfo() + .getExpiry() + .getSeconds(); + final var autoRenewAccount = getTokenInfoQuery + .getResponse() + .getTokenGetInfo() + .getTokenInfo() + .getAutoRenewAccount(); + final var autoRenewPeriod = getTokenInfoQuery + .getResponse() + .getTokenGetInfo() + .getTokenInfo() + .getAutoRenewPeriod() + .getSeconds(); + assertEquals(expirySecond, DEFAULT_MAX_LIFETIME - 12_345L); + assertEquals(autoRenewAccount, spec.registry().getAccountID(UPDATED_AUTO_RENEW_ACCOUNT)); + assertEquals(autoRenewPeriod, MONTH_IN_SECONDS); + })); + } + + @SuppressWarnings("java:S1192") // "use already defined const instead of copying its value here" - not this time + private HapiSpec updateExpiryInfoForTokenAndReadLatestInfo() { + + final AtomicReference vanillaTokenID = new AtomicReference<>(); + final AtomicReference updatedAutoRenewAccountID = new AtomicReference<>(); + + return propertyPreservingHapiSpec("updateExpiryInfoForTokenAndReadLatestInfo") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,TokenCreate,TokenUpdate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(TOKEN_TREASURY).balance(0L), + cryptoCreate(AUTO_RENEW_ACCOUNT).balance(0L), + cryptoCreate(UPDATED_AUTO_RENEW_ACCOUNT) + .keyShape(SigControl.ED25519_ON) + .balance(0L) + .exposingCreatedIdTo(updatedAutoRenewAccountID::set), + newKeyNamed(ADMIN_KEY), + uploadInitCode(TOKEN_EXPIRY_CONTRACT), + contractCreate(TOKEN_EXPIRY_CONTRACT).gas(1_000_000L), + tokenCreate(VANILLA_TOKEN) + .supplyType(TokenSupplyType.FINITE) + .treasury(TOKEN_TREASURY) + .expiry(100) + .autoRenewAccount(AUTO_RENEW_ACCOUNT) + .autoRenewPeriod(THREE_MONTHS_IN_SECONDS) + .maxSupply(1000) + .initialSupply(500L) + .adminKey(ADMIN_KEY) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id)))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_EXPIRY_CONTRACT, + UPDATE_EXPIRY_INFO_FOR_TOKEN_AND_READ_LATEST_INFO, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + DEFAULT_MAX_LIFETIME - 12_345L, + HapiParserUtil.asHeadlongAddress(asAddress(updatedAutoRenewAccountID.get())), + MONTH_IN_SECONDS) + .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_AUTO_RENEW_ACCOUNT) + .via("updateExpiryAndReadLatestInfoTxn") + .gas(GAS_TO_OFFER) + .payingWith(GENESIS)))) + .then(withOpContext((spec, opLog) -> allRunFor( + spec, + childRecordsCheck( + "updateExpiryAndReadLatestInfoTxn", + SUCCESS, + recordWith().status(SUCCESS), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_GET_TOKEN_EXPIRY_INFO) + .withStatus(SUCCESS) + .withExpiry( + DEFAULT_MAX_LIFETIME - 12_345L, + updatedAutoRenewAccountID.get(), + MONTH_IN_SECONDS))))))); + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenInfoHTSSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenInfoHTSSuite.java index a0d64d82aebe..002e7a89dcb9 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenInfoHTSSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenInfoHTSSuite.java @@ -19,8 +19,6 @@ import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; -import static com.hedera.services.bdd.spec.keys.KeyShape.CONTRACT; -import static com.hedera.services.bdd.spec.keys.SigControl.ED25519_ON; import static com.hedera.services.bdd.spec.queries.QueryVerbs.contractCallLocal; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenNftInfo; @@ -47,7 +45,6 @@ import static com.hedera.services.bdd.suites.contract.Utils.asAddress; import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; -import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; import com.google.protobuf.ByteString; import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; @@ -96,41 +93,26 @@ public class TokenInfoHTSSuite extends HapiSuite { private static final String AUTO_RENEW_ACCOUNT = "autoRenewAccount"; private static final String FEE_DENOM = "denom"; public static final String HTS_COLLECTOR = "denomFee"; - private static final String ACCOUNT = "Account"; private static final String CREATE_TXN = "CreateTxn"; private static final String TOKEN_INFO_TXN = "TokenInfoTxn"; private static final String FUNGIBLE_TOKEN_INFO_TXN = "FungibleTokenInfoTxn"; - private static final String UPDATE_ANG_GET_TOKEN_INFO_TXN = "UpdateAndGetTokenInfoTxn"; - private static final String UPDATE_ANG_GET_FUNGIBLE_TOKEN_INFO_TXN = "UpdateAndGetFungibleTokenInfoTxn"; - private static final String UPDATE_ANG_GET_NON_FUNGIBLE_TOKEN_INFO_TXN = "UpdateAndGetNonFungibleTokenInfoTxn"; private static final String NON_FUNGIBLE_TOKEN_INFO_TXN = "NonFungibleTokenInfoTxn"; private static final String GET_TOKEN_INFO_TXN = "GetTokenInfo"; private static final String APPROVE_TXN = "approveTxn"; - private static final String UPDATE_AND_GET_TOKEN_KEYS_INFO_TXN = "updateTokenKeysAndReadLatestInformation"; private static final String SYMBOL = "T"; private static final String FUNGIBLE_SYMBOL = "FT"; private static final String FUNGIBLE_TOKEN_NAME = "FungibleToken"; private static final String NON_FUNGIBLE_SYMBOL = "NFT"; private static final String META = "First"; private static final String MEMO = "JUMP"; - private static final String UPDATE_NAME = "NewName"; - private static final String UPDATE_SYMBOL = "NewSymbol"; - private static final String UPDATE_MEMO = "NewMemo"; private static final String PRIMARY_TOKEN_NAME = "primary"; private static final String NFT_OWNER = "NFT Owner"; private static final String NFT_SPENDER = "NFT Spender"; private static final String NON_FUNGIBLE_TOKEN_NAME = "NonFungibleToken"; - private static final String MULTI_KEY = "multiKey"; private static final String GET_INFORMATION_FOR_TOKEN = "getInformationForToken"; private static final String GET_INFORMATION_FOR_FUNGIBLE_TOKEN = "getInformationForFungibleToken"; private static final String GET_INFORMATION_FOR_NON_FUNGIBLE_TOKEN = "getInformationForNonFungibleToken"; - private static final String UPDATE_INFORMATION_FOR_TOKEN_AND_GET_LATEST_INFORMATION = - "updateInformationForTokenAndGetLatestInformation"; - private static final String UPDATE_INFORMATION_FOR_FUNGIBLE_TOKEN_AND_GET_LATEST_INFORMATION = - "updateInformationForFungibleTokenAndGetLatestInformation"; - private static final String UPDATE_INFORMATION_FOR_NON_FUNGIBLE_TOKEN_AND_GET_LATEST_INFORMATION = - "updateInformationForNonFungibleTokenAndGetLatestInformation"; private static final int NUMERATOR = 1; private static final int DENOMINATOR = 2; private static final int MINIMUM_TO_COLLECT = 5; @@ -163,14 +145,10 @@ List negativeSpecs() { List positiveSpecs() { return List.of( happyPathGetTokenInfo(), - happyPathUpdateTokenInfoAndGetLatestInfo(), happyPathGetFungibleTokenInfo(), - happyPathUpdateFungibleTokenInfoAndGetLatestInfo(), happyPathGetNonFungibleTokenInfo(), - happyPathUpdateNonFungibleTokenInfoAndGetLatestInfo(), happyPathGetTokenCustomFees(), - happyPathGetNonFungibleTokenCustomFees(), - happyPathUpdateTokenKeysAndReadLatestInformation()); + happyPathGetNonFungibleTokenCustomFees()); } private HapiSpec happyPathGetTokenInfo() { @@ -264,109 +242,6 @@ private HapiSpec happyPathGetTokenInfo() { })); } - private HapiSpec happyPathUpdateTokenInfoAndGetLatestInfo() { - final int decimals = 1; - return defaultHapiSpec("HappyPathUpdateTokenInfoAndGetLatestInfo") - .given( - cryptoCreate(TOKEN_TREASURY).balance(0L), - cryptoCreate(UPDATED_TREASURY) - .keyShape(ED25519_ON) - .balance(0L) - .maxAutomaticTokenAssociations(3), - cryptoCreate(AUTO_RENEW_ACCOUNT).balance(0L), - cryptoCreate(HTS_COLLECTOR), - cryptoCreate(ACCOUNT), - newKeyNamed(ADMIN_KEY), - newKeyNamed(FREEZE_KEY), - newKeyNamed(KYC_KEY), - newKeyNamed(SUPPLY_KEY), - newKeyNamed(WIPE_KEY), - newKeyNamed(FEE_SCHEDULE_KEY), - newKeyNamed(PAUSE_KEY), - uploadInitCode(TOKEN_INFO_CONTRACT), - contractCreate(TOKEN_INFO_CONTRACT).gas(1_000_000L), - tokenCreate(FUNGIBLE_TOKEN_NAME) - .supplyType(TokenSupplyType.FINITE) - .entityMemo(MEMO) - .name(FUNGIBLE_TOKEN_NAME) - .symbol(FUNGIBLE_SYMBOL) - .treasury(TOKEN_TREASURY) - .autoRenewAccount(AUTO_RENEW_ACCOUNT) - .autoRenewPeriod(THREE_MONTHS_IN_SECONDS) - .maxSupply(MAX_SUPPLY) - .initialSupply(500) - .decimals(decimals) - .adminKey(ADMIN_KEY) - .freezeKey(FREEZE_KEY) - .kycKey(KYC_KEY) - .supplyKey(SUPPLY_KEY) - .wipeKey(WIPE_KEY) - .feeScheduleKey(FEE_SCHEDULE_KEY) - .pauseKey(PAUSE_KEY) - .withCustom(fixedHbarFee(500L, HTS_COLLECTOR)) - // Include a fractional fee with no minimum to collect - .withCustom(fractionalFee( - NUMERATOR, DENOMINATOR * 2L, 0, OptionalLong.empty(), TOKEN_TREASURY)) - .withCustom(fractionalFee( - NUMERATOR, - DENOMINATOR, - MINIMUM_TO_COLLECT, - OptionalLong.of(MAXIMUM_TO_COLLECT), - TOKEN_TREASURY)) - .via(CREATE_TXN), - tokenAssociate(ACCOUNT, FUNGIBLE_TOKEN_NAME)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_INFO_CONTRACT, - UPDATE_INFORMATION_FOR_TOKEN_AND_GET_LATEST_INFORMATION, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN_NAME))), - UPDATE_NAME, - UPDATE_SYMBOL, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(UPDATED_TREASURY))), - UPDATE_MEMO) - .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_TREASURY) - .payingWith(ACCOUNT) - .via(UPDATE_ANG_GET_TOKEN_INFO_TXN) - .gas(1_000_000L)))) - .then(withOpContext((spec, opLog) -> { - final var getTokenInfoQuery = getTokenInfo(FUNGIBLE_TOKEN_NAME); - allRunFor(spec, getTokenInfoQuery); - final var expirySecond = getTokenInfoQuery - .getResponse() - .getTokenGetInfo() - .getTokenInfo() - .getExpiry() - .getSeconds(); - allRunFor( - spec, - getTxnRecord(UPDATE_ANG_GET_TOKEN_INFO_TXN) - .andAllChildRecords() - .logged(), - childRecordsCheck( - UPDATE_ANG_GET_TOKEN_INFO_TXN, - SUCCESS, - recordWith().status(SUCCESS), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_GET_TOKEN_INFO) - .withStatus(SUCCESS) - .withDecimals(decimals) - .withTokenInfo(getTokenInfoStructForFungibleToken( - spec, - UPDATE_NAME, - UPDATE_SYMBOL, - UPDATE_MEMO, - spec.registry() - .getAccountID(UPDATED_TREASURY), - expirySecond)))))); - })); - } - private HapiSpec happyPathGetFungibleTokenInfo() { final int decimals = 1; return defaultHapiSpec("HappyPathGetFungibleTokenInfo") @@ -459,106 +334,6 @@ private HapiSpec happyPathGetFungibleTokenInfo() { })); } - private HapiSpec happyPathUpdateFungibleTokenInfoAndGetLatestInfo() { - final int decimals = 1; - return defaultHapiSpec("HappyPathUpdateFungibleTokenInfoAndGetLatestInfo") - .given( - cryptoCreate(TOKEN_TREASURY).balance(0L), - cryptoCreate(UPDATED_TREASURY).balance(0L).maxAutomaticTokenAssociations(3), - cryptoCreate(AUTO_RENEW_ACCOUNT).balance(0L), - cryptoCreate(HTS_COLLECTOR), - cryptoCreate(ACCOUNT), - newKeyNamed(ADMIN_KEY), - newKeyNamed(FREEZE_KEY), - newKeyNamed(KYC_KEY), - newKeyNamed(SUPPLY_KEY), - newKeyNamed(WIPE_KEY), - newKeyNamed(FEE_SCHEDULE_KEY), - newKeyNamed(PAUSE_KEY), - uploadInitCode(TOKEN_INFO_CONTRACT), - contractCreate(TOKEN_INFO_CONTRACT).gas(1_000_000L), - tokenCreate(FUNGIBLE_TOKEN_NAME) - .supplyType(TokenSupplyType.FINITE) - .entityMemo(MEMO) - .name(FUNGIBLE_TOKEN_NAME) - .symbol(FUNGIBLE_SYMBOL) - .treasury(TOKEN_TREASURY) - .autoRenewAccount(AUTO_RENEW_ACCOUNT) - .autoRenewPeriod(THREE_MONTHS_IN_SECONDS) - .maxSupply(MAX_SUPPLY) - .initialSupply(500) - .decimals(decimals) - .adminKey(ADMIN_KEY) - .freezeKey(FREEZE_KEY) - .kycKey(KYC_KEY) - .supplyKey(SUPPLY_KEY) - .wipeKey(WIPE_KEY) - .feeScheduleKey(FEE_SCHEDULE_KEY) - .pauseKey(PAUSE_KEY) - .withCustom(fixedHbarFee(500L, HTS_COLLECTOR)) - // Include a fractional fee with no minimum to collect - .withCustom(fractionalFee( - NUMERATOR, DENOMINATOR * 2L, 0, OptionalLong.empty(), TOKEN_TREASURY)) - .withCustom(fractionalFee( - NUMERATOR, - DENOMINATOR, - MINIMUM_TO_COLLECT, - OptionalLong.of(MAXIMUM_TO_COLLECT), - TOKEN_TREASURY)) - .via(CREATE_TXN), - tokenAssociate(ACCOUNT, FUNGIBLE_TOKEN_NAME)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_INFO_CONTRACT, - UPDATE_INFORMATION_FOR_FUNGIBLE_TOKEN_AND_GET_LATEST_INFORMATION, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN_NAME))), - UPDATE_NAME, - UPDATE_SYMBOL, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(UPDATED_TREASURY))), - UPDATE_MEMO) - .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_TREASURY) - .payingWith(ACCOUNT) - .via(UPDATE_ANG_GET_FUNGIBLE_TOKEN_INFO_TXN) - .gas(1_000_000L)))) - .then(withOpContext((spec, opLog) -> { - final var getTokenInfoQuery = getTokenInfo(FUNGIBLE_TOKEN_NAME); - allRunFor(spec, getTokenInfoQuery); - final var expirySecond = getTokenInfoQuery - .getResponse() - .getTokenGetInfo() - .getTokenInfo() - .getExpiry() - .getSeconds(); - allRunFor( - spec, - getTxnRecord(UPDATE_ANG_GET_FUNGIBLE_TOKEN_INFO_TXN) - .andAllChildRecords() - .logged(), - childRecordsCheck( - UPDATE_ANG_GET_FUNGIBLE_TOKEN_INFO_TXN, - SUCCESS, - recordWith().status(SUCCESS), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_GET_FUNGIBLE_TOKEN_INFO) - .withStatus(SUCCESS) - .withDecimals(decimals) - .withTokenInfo(getTokenInfoStructForFungibleToken( - spec, - UPDATE_NAME, - UPDATE_SYMBOL, - UPDATE_MEMO, - spec.registry() - .getAccountID(UPDATED_TREASURY), - expirySecond)))))); - })); - } - private HapiSpec happyPathGetNonFungibleTokenInfo() { final int maxSupply = 10; final ByteString meta = ByteString.copyFrom(META.getBytes(StandardCharsets.UTF_8)); @@ -667,119 +442,6 @@ private HapiSpec happyPathGetNonFungibleTokenInfo() { })); } - private HapiSpec happyPathUpdateNonFungibleTokenInfoAndGetLatestInfo() { - final int maxSupply = 10; - final ByteString meta = ByteString.copyFrom(META.getBytes(StandardCharsets.UTF_8)); - return defaultHapiSpec("HappyPathUpdateNonFungibleTokenInfoAndGetLatestInfo") - .given( - cryptoCreate(TOKEN_TREASURY).balance(0L), - cryptoCreate(UPDATED_TREASURY) - .balance(0L) - .keyShape(ED25519_ON) - .maxAutomaticTokenAssociations(2), - cryptoCreate(AUTO_RENEW_ACCOUNT).balance(0L), - cryptoCreate(NFT_OWNER), - cryptoCreate(NFT_SPENDER), - cryptoCreate(HTS_COLLECTOR), - newKeyNamed(ADMIN_KEY), - newKeyNamed(FREEZE_KEY), - newKeyNamed(KYC_KEY), - newKeyNamed(SUPPLY_KEY), - newKeyNamed(WIPE_KEY), - newKeyNamed(FEE_SCHEDULE_KEY), - newKeyNamed(PAUSE_KEY), - uploadInitCode(TOKEN_INFO_CONTRACT), - contractCreate(TOKEN_INFO_CONTRACT).gas(1_000_000L), - tokenCreate(FEE_DENOM).treasury(HTS_COLLECTOR), - tokenCreate(NON_FUNGIBLE_TOKEN_NAME) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .supplyType(TokenSupplyType.FINITE) - .entityMemo(MEMO) - .name(NON_FUNGIBLE_TOKEN_NAME) - .symbol(NON_FUNGIBLE_SYMBOL) - .treasury(TOKEN_TREASURY) - .autoRenewAccount(AUTO_RENEW_ACCOUNT) - .autoRenewPeriod(THREE_MONTHS_IN_SECONDS) - .maxSupply(maxSupply) - .initialSupply(0) - .adminKey(ADMIN_KEY) - .freezeKey(FREEZE_KEY) - .kycKey(KYC_KEY) - .supplyKey(SUPPLY_KEY) - .wipeKey(WIPE_KEY) - .feeScheduleKey(FEE_SCHEDULE_KEY) - .pauseKey(PAUSE_KEY) - .withCustom(royaltyFeeWithFallback( - 1, 2, fixedHtsFeeInheritingRoyaltyCollector(100, FEE_DENOM), HTS_COLLECTOR)) - .via(CREATE_TXN), - mintToken(NON_FUNGIBLE_TOKEN_NAME, List.of(meta)), - tokenAssociate(NFT_OWNER, List.of(NON_FUNGIBLE_TOKEN_NAME)), - tokenAssociate(NFT_SPENDER, List.of(NON_FUNGIBLE_TOKEN_NAME)), - grantTokenKyc(NON_FUNGIBLE_TOKEN_NAME, NFT_OWNER), - cryptoTransfer(TokenMovement.movingUnique(NON_FUNGIBLE_TOKEN_NAME, 1L) - .between(TOKEN_TREASURY, NFT_OWNER)), - cryptoApproveAllowance() - .payingWith(DEFAULT_PAYER) - .addNftAllowance(NFT_OWNER, NON_FUNGIBLE_TOKEN_NAME, NFT_SPENDER, false, List.of(1L)) - .via(APPROVE_TXN) - .logged() - .signedBy(DEFAULT_PAYER, NFT_OWNER) - .fee(ONE_HBAR)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_INFO_CONTRACT, - UPDATE_INFORMATION_FOR_NON_FUNGIBLE_TOKEN_AND_GET_LATEST_INFORMATION, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(NON_FUNGIBLE_TOKEN_NAME))), - 1L, - UPDATE_NAME, - UPDATE_SYMBOL, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(UPDATED_TREASURY))), - UPDATE_MEMO) - .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_TREASURY) - .via(UPDATE_ANG_GET_NON_FUNGIBLE_TOKEN_INFO_TXN) - .gas(1_000_000L)))) - .then(withOpContext((spec, opLog) -> { - final var getTokenInfoQuery = getTokenInfo(NON_FUNGIBLE_TOKEN_NAME); - allRunFor(spec, getTokenInfoQuery); - final var expirySecond = getTokenInfoQuery - .getResponse() - .getTokenGetInfo() - .getTokenInfo() - .getExpiry() - .getSeconds(); - - final var nftTokenInfo = getTokenNftInfoForCheck(spec, getTokenInfoQuery, meta); - - allRunFor( - spec, - getTxnRecord(UPDATE_ANG_GET_NON_FUNGIBLE_TOKEN_INFO_TXN) - .andAllChildRecords() - .logged(), - childRecordsCheck( - UPDATE_ANG_GET_NON_FUNGIBLE_TOKEN_INFO_TXN, - SUCCESS, - recordWith().status(SUCCESS), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_GET_NON_FUNGIBLE_TOKEN_INFO) - .withStatus(SUCCESS) - .withTokenInfo(getTokenInfoStructForNonFungibleToken( - spec, - UPDATE_NAME, - UPDATE_SYMBOL, - UPDATE_MEMO, - spec.registry() - .getAccountID(UPDATED_TREASURY), - expirySecond)) - .withNftTokenInfo(nftTokenInfo))))); - })); - } - private HapiSpec getInfoOnDeletedFungibleTokenWorks() { return defaultHapiSpec("getInfoOnDeletedFungibleTokenWorks") .given( @@ -1086,117 +748,6 @@ private HapiSpec happyPathGetNonFungibleTokenCustomFees() { .withCustomFees(getCustomFeeForNFT(spec)))))))); } - private HapiSpec happyPathUpdateTokenKeysAndReadLatestInformation() { - final String TOKEN_INFO_AS_KEY = "TOKEN_INFO_CONTRACT_KEY"; - return defaultHapiSpec("happyPathUpdateTokenKeysAndReadLatestInformation") - .given( - cryptoCreate(TOKEN_TREASURY).balance(0L), - cryptoCreate(AUTO_RENEW_ACCOUNT).balance(0L), - cryptoCreate(HTS_COLLECTOR), - cryptoCreate(ACCOUNT), - uploadInitCode(TOKEN_INFO_CONTRACT), - contractCreate(TOKEN_INFO_CONTRACT).gas(1_000_000L), - newKeyNamed(MULTI_KEY), - newKeyNamed(TOKEN_INFO_AS_KEY).shape(CONTRACT.signedWith(TOKEN_INFO_CONTRACT)), - tokenCreate(FUNGIBLE_TOKEN_NAME) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .feeScheduleKey(MULTI_KEY) - .pauseKey(MULTI_KEY) - .wipeKey(MULTI_KEY) - .freezeKey(MULTI_KEY) - .kycKey(MULTI_KEY) - .initialSupply(1_000), - tokenAssociate(ACCOUNT, FUNGIBLE_TOKEN_NAME)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_INFO_CONTRACT, - UPDATE_AND_GET_TOKEN_KEYS_INFO_TXN, - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN_NAME))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_INFO_CONTRACT)))) - .via(UPDATE_AND_GET_TOKEN_KEYS_INFO_TXN) - .alsoSigningWithFullPrefix(MULTI_KEY)))) - .then(withOpContext((spec, opLog) -> allRunFor( - spec, - getTxnRecord(UPDATE_AND_GET_TOKEN_KEYS_INFO_TXN) - .andAllChildRecords() - .logged(), - childRecordsCheck( - UPDATE_AND_GET_TOKEN_KEYS_INFO_TXN, - SUCCESS, - recordWith().status(SUCCESS), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue( - // - // spec.registry().getKey(TOKEN_INFO_AS_KEY) - Key.newBuilder() - .setContractID( - spec.registry() - .getContractId( - TOKEN_INFO_CONTRACT)) - .build()))), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue(Key.newBuilder() - .setContractID(spec.registry() - .getContractId(TOKEN_INFO_CONTRACT)) - .build()))), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue( - spec.registry().getKey(TOKEN_INFO_AS_KEY)))), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue( - spec.registry().getKey(TOKEN_INFO_AS_KEY)))), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue( - spec.registry().getKey(TOKEN_INFO_AS_KEY)))), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue( - spec.registry().getKey(TOKEN_INFO_AS_KEY)))), - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue( - spec.registry().getKey(TOKEN_INFO_AS_KEY)))))))); - } - private TokenNftInfo getTokenNftInfoForCheck( final HapiSpec spec, final HapiGetTokenInfo getTokenInfoQuery, final ByteString meta) { final var tokenId = diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenInfoHTSV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenInfoHTSV1SecurityModelSuite.java new file mode 100644 index 000000000000..9dc8c00fd150 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenInfoHTSV1SecurityModelSuite.java @@ -0,0 +1,796 @@ +/* + * Copyright (C) 2022-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.keys.KeyShape.CONTRACT; +import static com.hedera.services.bdd.spec.keys.SigControl.ED25519_ON; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenNftInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoApproveAllowance; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.grantTokenKyc; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fixedHbarFee; +import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fixedHtsFeeInheritingRoyaltyCollector; +import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.fractionalFee; +import static com.hedera.services.bdd.spec.transactions.token.CustomFeeSpecs.royaltyFeeWithFallback; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.*; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.*; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; + +import com.google.protobuf.ByteString; +import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.queries.token.HapiGetTokenInfo; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.spec.transactions.token.TokenMovement; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hedera.services.bdd.suites.utils.contracts.precompile.TokenKeyType; +import com.hederahashgraph.api.proto.java.AccountID; +import com.hederahashgraph.api.proto.java.CustomFee; +import com.hederahashgraph.api.proto.java.Duration; +import com.hederahashgraph.api.proto.java.FixedFee; +import com.hederahashgraph.api.proto.java.Fraction; +import com.hederahashgraph.api.proto.java.FractionalFee; +import com.hederahashgraph.api.proto.java.Key; +import com.hederahashgraph.api.proto.java.NftID; +import com.hederahashgraph.api.proto.java.RoyaltyFee; +import com.hederahashgraph.api.proto.java.Timestamp; +import com.hederahashgraph.api.proto.java.TokenInfo; +import com.hederahashgraph.api.proto.java.TokenNftInfo; +import com.hederahashgraph.api.proto.java.TokenSupplyType; +import com.hederahashgraph.api.proto.java.TokenType; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.OptionalLong; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.tuweni.bytes.Bytes; +import org.jetbrains.annotations.NotNull; + +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse +public class TokenInfoHTSV1SecurityModelSuite extends HapiSuite { + + private static final Logger LOG = LogManager.getLogger(TokenInfoHTSV1SecurityModelSuite.class); + + private static final String TOKEN_INFO_CONTRACT = "TokenInfoContract"; + private static final String ADMIN_KEY = TokenKeyType.ADMIN_KEY.name(); + private static final String KYC_KEY = TokenKeyType.KYC_KEY.name(); + private static final String SUPPLY_KEY = TokenKeyType.SUPPLY_KEY.name(); + private static final String FREEZE_KEY = TokenKeyType.FREEZE_KEY.name(); + private static final String WIPE_KEY = TokenKeyType.WIPE_KEY.name(); + private static final String FEE_SCHEDULE_KEY = TokenKeyType.FEE_SCHEDULE_KEY.name(); + private static final String PAUSE_KEY = TokenKeyType.PAUSE_KEY.name(); + private static final String AUTO_RENEW_ACCOUNT = "autoRenewAccount"; + private static final String FEE_DENOM = "denom"; + public static final String HTS_COLLECTOR = "denomFee"; + private static final String ACCOUNT = "Account"; + private static final String CREATE_TXN = "CreateTxn"; + private static final String UPDATE_ANG_GET_TOKEN_INFO_TXN = "UpdateAndGetTokenInfoTxn"; + private static final String UPDATE_ANG_GET_FUNGIBLE_TOKEN_INFO_TXN = "UpdateAndGetFungibleTokenInfoTxn"; + private static final String UPDATE_ANG_GET_NON_FUNGIBLE_TOKEN_INFO_TXN = "UpdateAndGetNonFungibleTokenInfoTxn"; + private static final String APPROVE_TXN = "approveTxn"; + private static final String UPDATE_AND_GET_TOKEN_KEYS_INFO_TXN = "updateTokenKeysAndReadLatestInformation"; + private static final String FUNGIBLE_SYMBOL = "FT"; + private static final String FUNGIBLE_TOKEN_NAME = "FungibleToken"; + private static final String NON_FUNGIBLE_SYMBOL = "NFT"; + private static final String META = "First"; + private static final String MEMO = "JUMP"; + private static final String UPDATE_NAME = "NewName"; + private static final String UPDATE_SYMBOL = "NewSymbol"; + private static final String UPDATE_MEMO = "NewMemo"; + private static final String NFT_OWNER = "NFT Owner"; + private static final String NFT_SPENDER = "NFT Spender"; + private static final String NON_FUNGIBLE_TOKEN_NAME = "NonFungibleToken"; + private static final String MULTI_KEY = "multiKey"; + + private static final String UPDATE_INFORMATION_FOR_TOKEN_AND_GET_LATEST_INFORMATION = + "updateInformationForTokenAndGetLatestInformation"; + private static final String UPDATE_INFORMATION_FOR_FUNGIBLE_TOKEN_AND_GET_LATEST_INFORMATION = + "updateInformationForFungibleTokenAndGetLatestInformation"; + private static final String UPDATE_INFORMATION_FOR_NON_FUNGIBLE_TOKEN_AND_GET_LATEST_INFORMATION = + "updateInformationForNonFungibleTokenAndGetLatestInformation"; + private static final int NUMERATOR = 1; + private static final int DENOMINATOR = 2; + private static final int MINIMUM_TO_COLLECT = 5; + private static final int MAXIMUM_TO_COLLECT = 400; + private static final int MAX_SUPPLY = 1000; + + public static void main(final String... args) { + new TokenInfoHTSV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + public List getSpecsInSuite() { + return allOf(positiveSpecs(), negativeSpecs()); + } + + List negativeSpecs() { + return List.of(); + } + + List positiveSpecs() { + return List.of( + happyPathUpdateTokenInfoAndGetLatestInfo(), + happyPathUpdateFungibleTokenInfoAndGetLatestInfo(), + happyPathUpdateNonFungibleTokenInfoAndGetLatestInfo(), + happyPathUpdateTokenKeysAndReadLatestInformation()); + } + + private HapiSpec happyPathUpdateTokenInfoAndGetLatestInfo() { + final int decimals = 1; + return propertyPreservingHapiSpec("happyPathUpdateTokenInfoAndGetLatestInfo") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,TokenAssociateToAccount,TokenCreate,TokenUpdate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(TOKEN_TREASURY).balance(0L), + cryptoCreate(UPDATED_TREASURY) + .keyShape(ED25519_ON) + .balance(0L) + .maxAutomaticTokenAssociations(3), + cryptoCreate(AUTO_RENEW_ACCOUNT).balance(0L), + cryptoCreate(HTS_COLLECTOR), + cryptoCreate(ACCOUNT), + newKeyNamed(ADMIN_KEY), + newKeyNamed(FREEZE_KEY), + newKeyNamed(KYC_KEY), + newKeyNamed(SUPPLY_KEY), + newKeyNamed(WIPE_KEY), + newKeyNamed(FEE_SCHEDULE_KEY), + newKeyNamed(PAUSE_KEY), + uploadInitCode(TOKEN_INFO_CONTRACT), + contractCreate(TOKEN_INFO_CONTRACT).gas(1_000_000L), + tokenCreate(FUNGIBLE_TOKEN_NAME) + .supplyType(TokenSupplyType.FINITE) + .entityMemo(MEMO) + .name(FUNGIBLE_TOKEN_NAME) + .symbol(FUNGIBLE_SYMBOL) + .treasury(TOKEN_TREASURY) + .autoRenewAccount(AUTO_RENEW_ACCOUNT) + .autoRenewPeriod(THREE_MONTHS_IN_SECONDS) + .maxSupply(MAX_SUPPLY) + .initialSupply(500) + .decimals(decimals) + .adminKey(ADMIN_KEY) + .freezeKey(FREEZE_KEY) + .kycKey(KYC_KEY) + .supplyKey(SUPPLY_KEY) + .wipeKey(WIPE_KEY) + .feeScheduleKey(FEE_SCHEDULE_KEY) + .pauseKey(PAUSE_KEY) + .withCustom(fixedHbarFee(500L, HTS_COLLECTOR)) + // Include a fractional fee with no minimum to collect + .withCustom(fractionalFee( + NUMERATOR, DENOMINATOR * 2L, 0, OptionalLong.empty(), TOKEN_TREASURY)) + .withCustom(fractionalFee( + NUMERATOR, + DENOMINATOR, + MINIMUM_TO_COLLECT, + OptionalLong.of(MAXIMUM_TO_COLLECT), + TOKEN_TREASURY)) + .via(CREATE_TXN), + tokenAssociate(ACCOUNT, FUNGIBLE_TOKEN_NAME)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_INFO_CONTRACT, + UPDATE_INFORMATION_FOR_TOKEN_AND_GET_LATEST_INFORMATION, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN_NAME))), + UPDATE_NAME, + UPDATE_SYMBOL, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(UPDATED_TREASURY))), + UPDATE_MEMO) + .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_TREASURY) + .payingWith(ACCOUNT) + .via(UPDATE_ANG_GET_TOKEN_INFO_TXN) + .gas(1_000_000L)))) + .then(withOpContext((spec, opLog) -> { + final var getTokenInfoQuery = getTokenInfo(FUNGIBLE_TOKEN_NAME); + allRunFor(spec, getTokenInfoQuery); + final var expirySecond = getTokenInfoQuery + .getResponse() + .getTokenGetInfo() + .getTokenInfo() + .getExpiry() + .getSeconds(); + allRunFor( + spec, + getTxnRecord(UPDATE_ANG_GET_TOKEN_INFO_TXN) + .andAllChildRecords() + .logged(), + childRecordsCheck( + UPDATE_ANG_GET_TOKEN_INFO_TXN, + SUCCESS, + recordWith().status(SUCCESS), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_GET_TOKEN_INFO) + .withStatus(SUCCESS) + .withDecimals(decimals) + .withTokenInfo(getTokenInfoStructForFungibleToken( + spec, + UPDATE_NAME, + UPDATE_SYMBOL, + UPDATE_MEMO, + spec.registry() + .getAccountID(UPDATED_TREASURY), + expirySecond)))))); + })); + } + + private HapiSpec happyPathUpdateFungibleTokenInfoAndGetLatestInfo() { + final int decimals = 1; + return propertyPreservingHapiSpec("happyPathUpdateFungibleTokenInfoAndGetLatestInfo") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,TokenAssociateToAccount,TokenCreate,TokenUpdate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(TOKEN_TREASURY).balance(0L), + cryptoCreate(UPDATED_TREASURY).balance(0L).maxAutomaticTokenAssociations(3), + cryptoCreate(AUTO_RENEW_ACCOUNT).balance(0L), + cryptoCreate(HTS_COLLECTOR), + cryptoCreate(ACCOUNT), + newKeyNamed(ADMIN_KEY), + newKeyNamed(FREEZE_KEY), + newKeyNamed(KYC_KEY), + newKeyNamed(SUPPLY_KEY), + newKeyNamed(WIPE_KEY), + newKeyNamed(FEE_SCHEDULE_KEY), + newKeyNamed(PAUSE_KEY), + uploadInitCode(TOKEN_INFO_CONTRACT), + contractCreate(TOKEN_INFO_CONTRACT).gas(1_000_000L), + tokenCreate(FUNGIBLE_TOKEN_NAME) + .supplyType(TokenSupplyType.FINITE) + .entityMemo(MEMO) + .name(FUNGIBLE_TOKEN_NAME) + .symbol(FUNGIBLE_SYMBOL) + .treasury(TOKEN_TREASURY) + .autoRenewAccount(AUTO_RENEW_ACCOUNT) + .autoRenewPeriod(THREE_MONTHS_IN_SECONDS) + .maxSupply(MAX_SUPPLY) + .initialSupply(500) + .decimals(decimals) + .adminKey(ADMIN_KEY) + .freezeKey(FREEZE_KEY) + .kycKey(KYC_KEY) + .supplyKey(SUPPLY_KEY) + .wipeKey(WIPE_KEY) + .feeScheduleKey(FEE_SCHEDULE_KEY) + .pauseKey(PAUSE_KEY) + .withCustom(fixedHbarFee(500L, HTS_COLLECTOR)) + // Include a fractional fee with no minimum to collect + .withCustom(fractionalFee( + NUMERATOR, DENOMINATOR * 2L, 0, OptionalLong.empty(), TOKEN_TREASURY)) + .withCustom(fractionalFee( + NUMERATOR, + DENOMINATOR, + MINIMUM_TO_COLLECT, + OptionalLong.of(MAXIMUM_TO_COLLECT), + TOKEN_TREASURY)) + .via(CREATE_TXN), + tokenAssociate(ACCOUNT, FUNGIBLE_TOKEN_NAME)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_INFO_CONTRACT, + UPDATE_INFORMATION_FOR_FUNGIBLE_TOKEN_AND_GET_LATEST_INFORMATION, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN_NAME))), + UPDATE_NAME, + UPDATE_SYMBOL, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(UPDATED_TREASURY))), + UPDATE_MEMO) + .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_TREASURY) + .payingWith(ACCOUNT) + .via(UPDATE_ANG_GET_FUNGIBLE_TOKEN_INFO_TXN) + .gas(1_000_000L)))) + .then(withOpContext((spec, opLog) -> { + final var getTokenInfoQuery = getTokenInfo(FUNGIBLE_TOKEN_NAME); + allRunFor(spec, getTokenInfoQuery); + final var expirySecond = getTokenInfoQuery + .getResponse() + .getTokenGetInfo() + .getTokenInfo() + .getExpiry() + .getSeconds(); + allRunFor( + spec, + getTxnRecord(UPDATE_ANG_GET_FUNGIBLE_TOKEN_INFO_TXN) + .andAllChildRecords() + .logged(), + childRecordsCheck( + UPDATE_ANG_GET_FUNGIBLE_TOKEN_INFO_TXN, + SUCCESS, + recordWith().status(SUCCESS), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_GET_FUNGIBLE_TOKEN_INFO) + .withStatus(SUCCESS) + .withDecimals(decimals) + .withTokenInfo(getTokenInfoStructForFungibleToken( + spec, + UPDATE_NAME, + UPDATE_SYMBOL, + UPDATE_MEMO, + spec.registry() + .getAccountID(UPDATED_TREASURY), + expirySecond)))))); + })); + } + + private HapiSpec happyPathUpdateNonFungibleTokenInfoAndGetLatestInfo() { + final int maxSupply = 10; + final ByteString meta = ByteString.copyFrom(META.getBytes(StandardCharsets.UTF_8)); + return propertyPreservingHapiSpec("happyPathUpdateNonFungibleTokenInfoAndGetLatestInfo") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,TokenAssociateToAccount,TokenCreate,TokenUpdate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(TOKEN_TREASURY).balance(0L), + cryptoCreate(UPDATED_TREASURY) + .balance(0L) + .keyShape(ED25519_ON) + .maxAutomaticTokenAssociations(2), + cryptoCreate(AUTO_RENEW_ACCOUNT).balance(0L), + cryptoCreate(NFT_OWNER), + cryptoCreate(NFT_SPENDER), + cryptoCreate(HTS_COLLECTOR), + newKeyNamed(ADMIN_KEY), + newKeyNamed(FREEZE_KEY), + newKeyNamed(KYC_KEY), + newKeyNamed(SUPPLY_KEY), + newKeyNamed(WIPE_KEY), + newKeyNamed(FEE_SCHEDULE_KEY), + newKeyNamed(PAUSE_KEY), + uploadInitCode(TOKEN_INFO_CONTRACT), + contractCreate(TOKEN_INFO_CONTRACT).gas(1_000_000L), + tokenCreate(FEE_DENOM).treasury(HTS_COLLECTOR), + tokenCreate(NON_FUNGIBLE_TOKEN_NAME) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .supplyType(TokenSupplyType.FINITE) + .entityMemo(MEMO) + .name(NON_FUNGIBLE_TOKEN_NAME) + .symbol(NON_FUNGIBLE_SYMBOL) + .treasury(TOKEN_TREASURY) + .autoRenewAccount(AUTO_RENEW_ACCOUNT) + .autoRenewPeriod(THREE_MONTHS_IN_SECONDS) + .maxSupply(maxSupply) + .initialSupply(0) + .adminKey(ADMIN_KEY) + .freezeKey(FREEZE_KEY) + .kycKey(KYC_KEY) + .supplyKey(SUPPLY_KEY) + .wipeKey(WIPE_KEY) + .feeScheduleKey(FEE_SCHEDULE_KEY) + .pauseKey(PAUSE_KEY) + .withCustom(royaltyFeeWithFallback( + 1, 2, fixedHtsFeeInheritingRoyaltyCollector(100, FEE_DENOM), HTS_COLLECTOR)) + .via(CREATE_TXN), + mintToken(NON_FUNGIBLE_TOKEN_NAME, List.of(meta)), + tokenAssociate(NFT_OWNER, List.of(NON_FUNGIBLE_TOKEN_NAME)), + tokenAssociate(NFT_SPENDER, List.of(NON_FUNGIBLE_TOKEN_NAME)), + grantTokenKyc(NON_FUNGIBLE_TOKEN_NAME, NFT_OWNER), + cryptoTransfer(TokenMovement.movingUnique(NON_FUNGIBLE_TOKEN_NAME, 1L) + .between(TOKEN_TREASURY, NFT_OWNER)), + cryptoApproveAllowance() + .payingWith(DEFAULT_PAYER) + .addNftAllowance(NFT_OWNER, NON_FUNGIBLE_TOKEN_NAME, NFT_SPENDER, false, List.of(1L)) + .via(APPROVE_TXN) + .logged() + .signedBy(DEFAULT_PAYER, NFT_OWNER) + .fee(ONE_HBAR)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_INFO_CONTRACT, + UPDATE_INFORMATION_FOR_NON_FUNGIBLE_TOKEN_AND_GET_LATEST_INFORMATION, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(NON_FUNGIBLE_TOKEN_NAME))), + 1L, + UPDATE_NAME, + UPDATE_SYMBOL, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(UPDATED_TREASURY))), + UPDATE_MEMO) + .alsoSigningWithFullPrefix(ADMIN_KEY, UPDATED_TREASURY) + .via(UPDATE_ANG_GET_NON_FUNGIBLE_TOKEN_INFO_TXN) + .gas(1_000_000L)))) + .then(withOpContext((spec, opLog) -> { + final var getTokenInfoQuery = getTokenInfo(NON_FUNGIBLE_TOKEN_NAME); + allRunFor(spec, getTokenInfoQuery); + final var expirySecond = getTokenInfoQuery + .getResponse() + .getTokenGetInfo() + .getTokenInfo() + .getExpiry() + .getSeconds(); + + final var nftTokenInfo = getTokenNftInfoForCheck(spec, getTokenInfoQuery, meta); + + allRunFor( + spec, + getTxnRecord(UPDATE_ANG_GET_NON_FUNGIBLE_TOKEN_INFO_TXN) + .andAllChildRecords() + .logged(), + childRecordsCheck( + UPDATE_ANG_GET_NON_FUNGIBLE_TOKEN_INFO_TXN, + SUCCESS, + recordWith().status(SUCCESS), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_GET_NON_FUNGIBLE_TOKEN_INFO) + .withStatus(SUCCESS) + .withTokenInfo(getTokenInfoStructForNonFungibleToken( + spec, + UPDATE_NAME, + UPDATE_SYMBOL, + UPDATE_MEMO, + spec.registry() + .getAccountID(UPDATED_TREASURY), + expirySecond)) + .withNftTokenInfo(nftTokenInfo))))); + })); + } + + private HapiSpec happyPathUpdateTokenKeysAndReadLatestInformation() { + final String TOKEN_INFO_AS_KEY = "TOKEN_INFO_CONTRACT_KEY"; + return propertyPreservingHapiSpec("happyPathUpdateTokenKeysAndReadLatestInformation") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,TokenAssociateToAccount,TokenCreate,TokenUpdate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(TOKEN_TREASURY).balance(0L), + cryptoCreate(AUTO_RENEW_ACCOUNT).balance(0L), + cryptoCreate(HTS_COLLECTOR), + cryptoCreate(ACCOUNT), + uploadInitCode(TOKEN_INFO_CONTRACT), + contractCreate(TOKEN_INFO_CONTRACT).gas(1_000_000L), + newKeyNamed(MULTI_KEY), + newKeyNamed(TOKEN_INFO_AS_KEY).shape(CONTRACT.signedWith(TOKEN_INFO_CONTRACT)), + tokenCreate(FUNGIBLE_TOKEN_NAME) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .feeScheduleKey(MULTI_KEY) + .pauseKey(MULTI_KEY) + .wipeKey(MULTI_KEY) + .freezeKey(MULTI_KEY) + .kycKey(MULTI_KEY) + .initialSupply(1_000), + tokenAssociate(ACCOUNT, FUNGIBLE_TOKEN_NAME)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_INFO_CONTRACT, + UPDATE_AND_GET_TOKEN_KEYS_INFO_TXN, + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getTokenID(FUNGIBLE_TOKEN_NAME))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_INFO_CONTRACT)))) + .via(UPDATE_AND_GET_TOKEN_KEYS_INFO_TXN) + .alsoSigningWithFullPrefix(MULTI_KEY)))) + .then(withOpContext((spec, opLog) -> allRunFor( + spec, + getTxnRecord(UPDATE_AND_GET_TOKEN_KEYS_INFO_TXN) + .andAllChildRecords() + .logged(), + childRecordsCheck( + UPDATE_AND_GET_TOKEN_KEYS_INFO_TXN, + SUCCESS, + recordWith().status(SUCCESS), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue( + // + // spec.registry().getKey(TOKEN_INFO_AS_KEY) + Key.newBuilder() + .setContractID( + spec.registry() + .getContractId( + TOKEN_INFO_CONTRACT)) + .build()))), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue(Key.newBuilder() + .setContractID(spec.registry() + .getContractId(TOKEN_INFO_CONTRACT)) + .build()))), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue( + spec.registry().getKey(TOKEN_INFO_AS_KEY)))), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue( + spec.registry().getKey(TOKEN_INFO_AS_KEY)))), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue( + spec.registry().getKey(TOKEN_INFO_AS_KEY)))), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue( + spec.registry().getKey(TOKEN_INFO_AS_KEY)))), + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue( + spec.registry().getKey(TOKEN_INFO_AS_KEY)))))))); + } + + private TokenNftInfo getTokenNftInfoForCheck( + final HapiSpec spec, final HapiGetTokenInfo getTokenInfoQuery, final ByteString meta) { + final var tokenId = + getTokenInfoQuery.getResponse().getTokenGetInfo().getTokenInfo().getTokenId(); + + final var getNftTokenInfoQuery = getTokenNftInfo(NON_FUNGIBLE_TOKEN_NAME, 1L); + allRunFor(spec, getNftTokenInfoQuery); + final var creationTime = + getNftTokenInfoQuery.getResponse().getTokenGetNftInfo().getNft().getCreationTime(); + + final var ownerId = spec.registry().getAccountID(NFT_OWNER); + final var spenderId = spec.registry().getAccountID(NFT_SPENDER); + + return TokenNftInfo.newBuilder() + .setLedgerId(fromString("0x03")) + .setNftID(NftID.newBuilder() + .setTokenID(tokenId) + .setSerialNumber(1L) + .build()) + .setAccountID(ownerId) + .setCreationTime(creationTime) + .setMetadata(meta) + .setSpenderId(spenderId) + .build(); + } + + private TokenInfo getTokenInfoStructForFungibleToken( + final HapiSpec spec, + final String tokenName, + final String symbol, + final String memo, + final AccountID treasury, + final long expirySecond) { + final var autoRenewAccount = spec.registry().getAccountID(AUTO_RENEW_ACCOUNT); + + final ArrayList customFees = getExpectedCustomFees(spec); + + return TokenInfo.newBuilder() + .setLedgerId(fromString("0x03")) + .setSupplyTypeValue(TokenSupplyType.FINITE_VALUE) + .setExpiry(Timestamp.newBuilder().setSeconds(expirySecond)) + .setAutoRenewAccount(autoRenewAccount) + .setAutoRenewPeriod(Duration.newBuilder() + .setSeconds(THREE_MONTHS_IN_SECONDS) + .build()) + .setSymbol(symbol) + .setName(tokenName) + .setMemo(memo) + .setTreasury(treasury) + .setTotalSupply(500L) + .setMaxSupply(MAX_SUPPLY) + .addAllCustomFees(customFees) + .setAdminKey(getTokenKeyFromSpec(spec, TokenKeyType.ADMIN_KEY)) + .setKycKey(getTokenKeyFromSpec(spec, TokenKeyType.KYC_KEY)) + .setFreezeKey(getTokenKeyFromSpec(spec, TokenKeyType.FREEZE_KEY)) + .setWipeKey(getTokenKeyFromSpec(spec, TokenKeyType.WIPE_KEY)) + .setSupplyKey(getTokenKeyFromSpec(spec, TokenKeyType.SUPPLY_KEY)) + .setFeeScheduleKey(getTokenKeyFromSpec(spec, TokenKeyType.FEE_SCHEDULE_KEY)) + .setPauseKey(getTokenKeyFromSpec(spec, TokenKeyType.PAUSE_KEY)) + .build(); + } + + @NotNull + private ArrayList getExpectedCustomFees(final HapiSpec spec) { + final var fixedFee = FixedFee.newBuilder().setAmount(500L).build(); + final var customFixedFee = CustomFee.newBuilder() + .setFixedFee(fixedFee) + .setFeeCollectorAccountId(spec.registry().getAccountID(HTS_COLLECTOR)) + .build(); + + final var firstFraction = Fraction.newBuilder() + .setNumerator(NUMERATOR) + .setDenominator(DENOMINATOR * 2L) + .build(); + final var firstFractionalFee = + FractionalFee.newBuilder().setFractionalAmount(firstFraction).build(); + final var firstCustomFractionalFee = CustomFee.newBuilder() + .setFractionalFee(firstFractionalFee) + .setFeeCollectorAccountId(spec.registry().getAccountID(TOKEN_TREASURY)) + .build(); + + final var fraction = Fraction.newBuilder() + .setNumerator(NUMERATOR) + .setDenominator(DENOMINATOR) + .build(); + final var fractionalFee = FractionalFee.newBuilder() + .setFractionalAmount(fraction) + .setMinimumAmount(MINIMUM_TO_COLLECT) + .setMaximumAmount(MAXIMUM_TO_COLLECT) + .build(); + final var customFractionalFee = CustomFee.newBuilder() + .setFractionalFee(fractionalFee) + .setFeeCollectorAccountId(spec.registry().getAccountID(TOKEN_TREASURY)) + .build(); + + final var customFees = new ArrayList(); + customFees.add(customFixedFee); + customFees.add(firstCustomFractionalFee); + customFees.add(customFractionalFee); + return customFees; + } + + private TokenInfo getTokenInfoStructForNonFungibleToken( + final HapiSpec spec, + final String tokenName, + final String symbol, + final String memo, + final AccountID treasury, + final long expirySecond) { + final var autoRenewAccount = spec.registry().getAccountID(AUTO_RENEW_ACCOUNT); + + return TokenInfo.newBuilder() + .setLedgerId(fromString("0x03")) + .setSupplyTypeValue(TokenSupplyType.FINITE_VALUE) + .setExpiry(Timestamp.newBuilder().setSeconds(expirySecond)) + .setAutoRenewAccount(autoRenewAccount) + .setAutoRenewPeriod(Duration.newBuilder() + .setSeconds(THREE_MONTHS_IN_SECONDS) + .build()) + .setSymbol(symbol) + .setName(tokenName) + .setMemo(memo) + .setTreasury(treasury) + .setTotalSupply(1L) + .setMaxSupply(10L) + .addAllCustomFees(getCustomFeeForNFT(spec)) + .setAdminKey(getTokenKeyFromSpec(spec, TokenKeyType.ADMIN_KEY)) + .setKycKey(getTokenKeyFromSpec(spec, TokenKeyType.KYC_KEY)) + .setFreezeKey(getTokenKeyFromSpec(spec, TokenKeyType.FREEZE_KEY)) + .setWipeKey(getTokenKeyFromSpec(spec, TokenKeyType.WIPE_KEY)) + .setSupplyKey(getTokenKeyFromSpec(spec, TokenKeyType.SUPPLY_KEY)) + .setFeeScheduleKey(getTokenKeyFromSpec(spec, TokenKeyType.FEE_SCHEDULE_KEY)) + .setPauseKey(getTokenKeyFromSpec(spec, TokenKeyType.PAUSE_KEY)) + .build(); + } + + @NotNull + private ArrayList getCustomFeeForNFT(final HapiSpec spec) { + final var fraction = Fraction.newBuilder() + .setNumerator(NUMERATOR) + .setDenominator(DENOMINATOR) + .build(); + final var fallbackFee = FixedFee.newBuilder() + .setAmount(100L) + .setDenominatingTokenId(spec.registry().getTokenID(FEE_DENOM)) + .build(); + final var royaltyFee = RoyaltyFee.newBuilder() + .setExchangeValueFraction(fraction) + .setFallbackFee(fallbackFee) + .build(); + + final var customRoyaltyFee = CustomFee.newBuilder() + .setRoyaltyFee(royaltyFee) + .setFeeCollectorAccountId(spec.registry().getAccountID(HTS_COLLECTOR)) + .build(); + + final var customFees = new ArrayList(); + customFees.add(customRoyaltyFee); + + return customFees; + } + + private Key getTokenKeyFromSpec(final HapiSpec spec, final TokenKeyType type) { + final var key = spec.registry().getKey(type.name()); + + final var keyBuilder = Key.newBuilder(); + + if (key.getContractID().getContractNum() > 0) { + keyBuilder.setContractID(key.getContractID()); + } + if (key.getEd25519().toByteArray().length > 0) { + keyBuilder.setEd25519(key.getEd25519()); + } + if (key.getECDSASecp256K1().toByteArray().length > 0) { + keyBuilder.setECDSASecp256K1(key.getECDSASecp256K1()); + } + if (key.getDelegatableContractId().getContractNum() > 0) { + keyBuilder.setDelegatableContractId(key.getDelegatableContractId()); + } + + return keyBuilder.build(); + } + + private ByteString fromString(final String value) { + return ByteString.copyFrom(Bytes.fromHexString(value).toArray()); + } + + @Override + protected Logger getResultsLogger() { + return LOG; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenUpdatePrecompileSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenUpdatePrecompileSuite.java index 73555e24be90..1346fdf9b1f9 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenUpdatePrecompileSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenUpdatePrecompileSuite.java @@ -17,17 +17,12 @@ package com.hedera.services.bdd.suites.contract.precompile; import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; -import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; -import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; import static com.hedera.services.bdd.spec.keys.KeyShape.CONTRACT; import static com.hedera.services.bdd.spec.keys.KeyShape.DELEGATE_CONTRACT; import static com.hedera.services.bdd.spec.keys.KeyShape.ED25519; import static com.hedera.services.bdd.spec.keys.KeyShape.SECP256K1; import static com.hedera.services.bdd.spec.keys.SigControl.ED25519_ON; import static com.hedera.services.bdd.spec.queries.QueryVerbs.contractCallLocal; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenNftInfo; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; @@ -46,35 +41,20 @@ import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; import static com.hedera.services.bdd.suites.contract.Utils.asAddress; import static com.hedera.services.bdd.suites.contract.Utils.asToken; -import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TOKEN_ID; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.KEY_NOT_PROVIDED; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_FEE_SCHEDULE_KEY; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_FREEZE_KEY; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_KYC_KEY; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_PAUSE_KEY; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_SUPPLY_KEY; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_WIPE_KEY; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_IS_IMMUTABLE; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_NAME_TOO_LONG; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_SYMBOL_TOO_LONG; import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; import com.google.protobuf.ByteString; -import com.hedera.node.app.hapi.utils.contracts.ParsingConstants; import com.hedera.services.bdd.spec.HapiSpec; import com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts; -import com.hedera.services.bdd.spec.transactions.TxnUtils; import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; import com.hedera.services.bdd.suites.HapiSuite; import com.hederahashgraph.api.proto.java.TokenID; -import com.hederahashgraph.api.proto.java.TokenPauseStatus; -import com.hederahashgraph.api.proto.java.TokenSupplyType; import com.hederahashgraph.api.proto.java.TokenType; import java.math.BigInteger; -import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicReference; import org.apache.logging.log4j.LogManager; @@ -94,13 +74,6 @@ public class TokenUpdatePrecompileSuite extends HapiSuite { private static final String GET_KEY_FUNC = "getKeyFromToken"; public static final String TOKEN_UPDATE_CONTRACT = "UpdateTokenInfoContract"; private static final String UPDATE_TXN = "updateTxn"; - private static final String GET_KYC_KEY_TXN = "getKycTokenKeyTxn"; - private static final String GET_ADMIN_KEY_TXN = "getAdminTokenKeyTxn"; - private static final String GET_PAUSE_KEY_TXN = "getPauseTokenKeyTxn"; - private static final String GET_FREEZE_KEY_TXN = "getFreezeTokenKeyTxn"; - private static final String GET_WIPE_KEY_TXN = "getWipeTokenKeyTxn"; - private static final String GET_FEE_KEY_TXN = "getFeeTokenKeyTxn"; - private static final String GET_SUPPLY_KEY_TXN = "getSupplyTokenKeyTxn"; private static final String NO_ADMIN_KEY = "noAdminKeyTxn"; private static final long DEFAULT_AMOUNT_TO_SEND = 20 * ONE_HBAR; private static final String ED25519KEY = "ed25519key"; @@ -113,12 +86,7 @@ public class TokenUpdatePrecompileSuite extends HapiSuite { public static final String CUSTOM_SYMBOL = "Ω"; public static final String CUSTOM_MEMO = "Omega"; private static final long ADMIN_KEY_TYPE = 1L; - private static final long KYC_KEY_TYPE = 2L; - private static final long FREEZE_KEY_TYPE = 4L; - private static final long WIPE_KEY_TYPE = 8L; private static final long SUPPLY_KEY_TYPE = 16L; - private static final long FEE_SCHEDULE_KEY_TYPE = 32L; - private static final long PAUSE_KEY_TYPE = 64L; public static void main(String... args) { new TokenUpdatePrecompileSuite().runSuiteAsync(); @@ -136,509 +104,14 @@ protected Logger getResultsLogger() { @Override public List getSpecsInSuite() { - return allOf(positiveCases(), negativeCases()); + return allOf(negativeCases()); } - List positiveCases() { - return List.of( - updateTokenWithKeysHappyPath(), - updateNftTreasuryWithAndWithoutAdminKey(), - updateOnlyTokenKeysAndGetTheUpdatedValues(), - updateOnlyKeysForNonFungibleToken(), - updateTokenWithoutNameSymbolMemo()); - } - - List negativeCases() { - return List.of( - updateWithTooLongNameAndSymbol(), - updateTokenWithKeysNegative(), - updateTokenWithInvalidKeyValues(), - updateNftTokenKeysWithWrongTokenIdAndMissingAdminKey(), - getTokenKeyForNonFungibleNegative()); - } - - private HapiSpec updateTokenWithKeysHappyPath() { - final AtomicReference vanillaTokenID = new AtomicReference<>(); - return defaultHapiSpec("updateTokenWithKeysHappyPath") - .given( - newKeyNamed(ED25519KEY).shape(ED25519), - newKeyNamed(ECDSA_KEY).shape(SECP256K1), - newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), - newKeyNamed(MULTI_KEY).shape(ED25519_ON), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(MULTI_KEY), - cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), - uploadInitCode(TOKEN_UPDATE_CONTRACT), - contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .feeScheduleKey(MULTI_KEY) - .pauseKey(MULTI_KEY) - .wipeKey(MULTI_KEY) - .freezeKey(MULTI_KEY) - .kycKey(MULTI_KEY) - .initialSupply(1_000) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - tokenAssociate(ACCOUNT, VANILLA_TOKEN), - grantTokenKyc(VANILLA_TOKEN, ACCOUNT), - cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_UPDATE_CONTRACT, - "updateTokenWithAllFields", - HapiParserUtil.asHeadlongAddress(new byte[20]), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - AUTO_RENEW_PERIOD, - CUSTOM_NAME, - CUSTOM_SYMBOL, - CUSTOM_MEMO) - .via(UPDATE_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_UPDATE_CONTRACT, - "updateTokenWithAllFields", - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - AUTO_RENEW_PERIOD, - CUSTOM_NAME, - CUSTOM_SYMBOL, - CUSTOM_MEMO) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT), - newKeyNamed(DELEGATE_KEY).shape(DELEGATE_CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT)), - newKeyNamed(TOKEN_UPDATE_AS_KEY).shape(CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT))))) - .then( - childRecordsCheck( - UPDATE_TXN, - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith().status(INVALID_TOKEN_ID)), - sourcing(() -> getTokenInfo(VANILLA_TOKEN) - .logged() - .hasTokenType(TokenType.FUNGIBLE_COMMON) - .hasSymbol(CUSTOM_SYMBOL) - .hasName(CUSTOM_NAME) - .hasEntityMemo(CUSTOM_MEMO) - .hasTreasury(ACCOUNT) - .hasAutoRenewAccount(ACCOUNT) - .hasAutoRenewPeriod(AUTO_RENEW_PERIOD) - .hasSupplyType(TokenSupplyType.INFINITE) - .searchKeysGlobally() - .hasAdminKey(ED25519KEY) - .hasPauseKey(MULTI_KEY) - .hasKycKey(ED25519KEY) - .hasFreezeKey(ECDSA_KEY) - .hasWipeKey(ECDSA_KEY) - .hasFeeScheduleKey(DELEGATE_KEY) - .hasSupplyKey(TOKEN_UPDATE_AS_KEY) - .hasPauseKey(TOKEN_UPDATE_AS_KEY))); - } - - public HapiSpec updateNftTreasuryWithAndWithoutAdminKey() { - final var newTokenTreasury = "newTokenTreasury"; - final var NO_ADMIN_TOKEN = "noAdminKeyToken"; - final AtomicReference noAdminKeyToken = new AtomicReference<>(); - final AtomicReference nftToken = new AtomicReference<>(); - return defaultHapiSpec("UpdateNftTreasuryWithAndWithoutAdminKey") - .given( - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(newTokenTreasury).keyShape(ED25519_ON).maxAutomaticTokenAssociations(6), - newKeyNamed(MULTI_KEY).shape(ED25519_ON), - cryptoCreate(ACCOUNT).key(MULTI_KEY).balance(ONE_MILLION_HBARS), - uploadInitCode(TOKEN_UPDATE_CONTRACT), - contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), - tokenCreate(NO_ADMIN_TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasury(TOKEN_TREASURY) - .initialSupply(0) - .supplyKey(MULTI_KEY) - .exposingCreatedIdTo(id -> noAdminKeyToken.set(asToken(id))), - tokenCreate(VANILLA_TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasury(TOKEN_TREASURY) - .initialSupply(0) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .pauseKey(MULTI_KEY) - .exposingCreatedIdTo(id -> nftToken.set(asToken(id))), - mintToken(VANILLA_TOKEN, List.of(ByteString.copyFromUtf8("nft0"))), - tokenAssociate(newTokenTreasury, VANILLA_TOKEN), - mintToken(NO_ADMIN_TOKEN, List.of(ByteString.copyFromUtf8("nft1")))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_UPDATE_CONTRACT, - "updateTokenTreasury", - HapiParserUtil.asHeadlongAddress(asAddress(noAdminKeyToken.get())), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(newTokenTreasury)))) - .via("noAdminKey") - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT, newTokenTreasury) - .alsoSigningWithFullPrefix(ACCOUNT, newTokenTreasury) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_UPDATE_CONTRACT, - "updateTokenTreasury", - HapiParserUtil.asHeadlongAddress(asAddress(nftToken.get())), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(newTokenTreasury)))) - .via("tokenUpdateTxn") - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT, newTokenTreasury) - .alsoSigningWithFullPrefix(ACCOUNT, newTokenTreasury)))) - .then( - childRecordsCheck( - "noAdminKey", - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith().status(TOKEN_IS_IMMUTABLE)), - getTokenNftInfo(VANILLA_TOKEN, 1) - .hasAccountID(newTokenTreasury) - .logged(), - getAccountBalance(TOKEN_TREASURY).hasTokenBalance(VANILLA_TOKEN, 0), - getAccountBalance(newTokenTreasury).hasTokenBalance(VANILLA_TOKEN, 1), - getTokenInfo(VANILLA_TOKEN) - .hasTreasury(newTokenTreasury) - .hasPauseStatus(TokenPauseStatus.Unpaused) - .logged(), - getTokenNftInfo(VANILLA_TOKEN, 1) - .hasAccountID(newTokenTreasury) - .logged()); - } - - public HapiSpec updateWithTooLongNameAndSymbol() { - final var tooLongString = "ORIGINAL" + TxnUtils.randomUppercase(101); - final var tooLongSymbolTxn = "tooLongSymbolTxn"; - final AtomicReference vanillaTokenID = new AtomicReference<>(); - return defaultHapiSpec("updateWithTooLongNameAndSymbol") - .given( - cryptoCreate(TOKEN_TREASURY), - newKeyNamed(MULTI_KEY).shape(ED25519_ON), - cryptoCreate(ACCOUNT).key(MULTI_KEY).balance(ONE_MILLION_HBARS), - uploadInitCode(TOKEN_UPDATE_CONTRACT), - contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .initialSupply(1000) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .pauseKey(MULTI_KEY) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - tokenAssociate(ACCOUNT, VANILLA_TOKEN)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_UPDATE_CONTRACT, - "checkNameAndSymbolLength", - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - tooLongString, - CUSTOM_SYMBOL) - .via(UPDATE_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_UPDATE_CONTRACT, - "checkNameAndSymbolLength", - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - CUSTOM_NAME, - tooLongString) - .via(tooLongSymbolTxn) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)))) - .then(withOpContext((spec, opLog) -> allRunFor( - spec, - childRecordsCheck( - UPDATE_TXN, - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith().status(TOKEN_NAME_TOO_LONG)), - childRecordsCheck( - tooLongSymbolTxn, - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith().status(TOKEN_SYMBOL_TOO_LONG))))); - } - - private HapiSpec updateTokenWithKeysNegative() { - final var updateTokenWithKeysFunc = "updateTokenWithKeys"; - final var NO_FEE_SCHEDULE_KEY_TXN = "NO_FEE_SCHEDULE_KEY_TXN"; - final var NO_PAUSE_KEY_TXN = "NO_PAUSE_KEY_TXN"; - final var NO_KYC_KEY_TXN = "NO_KYC_KEY_TXN"; - final var NO_WIPE_KEY_TXN = "NO_WIPE_KEY_TXN"; - final var NO_FREEZE_KEY_TXN = "NO_FREEZE_KEY_TXN"; - final var NO_SUPPLY_KEY_TXN = "NO_SUPPLY_KEY_TXN"; - final List> tokenList = new ArrayList<>(); - final AtomicReference vanillaTokenID = new AtomicReference<>(); - return defaultHapiSpec("updateTokenWithKeysNegative") - .given( - newKeyNamed(ED25519KEY).shape(ED25519), - newKeyNamed(ECDSA_KEY).shape(SECP256K1), - newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), - newKeyNamed(MULTI_KEY).shape(ED25519_ON), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(ACCOUNT) - .balance(ONE_MILLION_HBARS) - .key(MULTI_KEY) - .maxAutomaticTokenAssociations(100), - cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), - uploadInitCode(TOKEN_UPDATE_CONTRACT), - contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .feeScheduleKey(MULTI_KEY) - .exposingCreatedIdTo(id -> tokenList.add(new AtomicReference<>(asToken(id)))), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .feeScheduleKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .exposingCreatedIdTo(id -> tokenList.add(new AtomicReference<>(asToken(id)))), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .feeScheduleKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .wipeKey(MULTI_KEY) - .exposingCreatedIdTo(id -> tokenList.add(new AtomicReference<>(asToken(id)))), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .pauseKey(MULTI_KEY) - .feeScheduleKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .wipeKey(MULTI_KEY) - .exposingCreatedIdTo(id -> tokenList.add(new AtomicReference<>(asToken(id)))), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .freezeKey(MULTI_KEY) - .adminKey(MULTI_KEY) - .pauseKey(MULTI_KEY) - .feeScheduleKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .wipeKey(MULTI_KEY) - .exposingCreatedIdTo(id -> tokenList.add(new AtomicReference<>(asToken(id))))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_UPDATE_CONTRACT, - updateTokenWithKeysFunc, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) - .via(NO_FEE_SCHEDULE_KEY_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_UPDATE_CONTRACT, - updateTokenWithKeysFunc, - HapiParserUtil.asHeadlongAddress( - asAddress(tokenList.get(0).get())), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) - .via(NO_SUPPLY_KEY_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_UPDATE_CONTRACT, - updateTokenWithKeysFunc, - HapiParserUtil.asHeadlongAddress( - asAddress(tokenList.get(1).get())), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) - .via(NO_WIPE_KEY_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_UPDATE_CONTRACT, - updateTokenWithKeysFunc, - HapiParserUtil.asHeadlongAddress( - asAddress(tokenList.get(2).get())), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) - .via(NO_PAUSE_KEY_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_UPDATE_CONTRACT, - updateTokenWithKeysFunc, - HapiParserUtil.asHeadlongAddress( - asAddress(tokenList.get(3).get())), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) - .via(NO_FREEZE_KEY_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_UPDATE_CONTRACT, - updateTokenWithKeysFunc, - HapiParserUtil.asHeadlongAddress( - asAddress(tokenList.get(4).get())), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) - .via(NO_KYC_KEY_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)))) - .then(withOpContext((spec, ignore) -> allRunFor( - spec, - childRecordsCheck( - NO_FEE_SCHEDULE_KEY_TXN, - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith().status(TOKEN_HAS_NO_FEE_SCHEDULE_KEY)), - childRecordsCheck( - NO_SUPPLY_KEY_TXN, - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith().status(TOKEN_HAS_NO_SUPPLY_KEY)), - childRecordsCheck( - NO_WIPE_KEY_TXN, - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith().status(TOKEN_HAS_NO_WIPE_KEY)), - childRecordsCheck( - NO_PAUSE_KEY_TXN, - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith().status(TOKEN_HAS_NO_PAUSE_KEY)), - childRecordsCheck( - NO_FREEZE_KEY_TXN, - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith().status(TOKEN_HAS_NO_FREEZE_KEY)), - childRecordsCheck( - NO_KYC_KEY_TXN, - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith().status(TOKEN_HAS_NO_KYC_KEY))))); + List negativeCases() { + return List.of( + updateTokenWithInvalidKeyValues(), + updateNftTokenKeysWithWrongTokenIdAndMissingAdminKey(), + getTokenKeyForNonFungibleNegative()); } private HapiSpec updateTokenWithInvalidKeyValues() { @@ -688,265 +161,6 @@ private HapiSpec updateTokenWithInvalidKeyValues() { .then(sourcing(() -> emptyChildRecordsCheck(UPDATE_TXN, CONTRACT_REVERT_EXECUTED))); } - private HapiSpec updateOnlyTokenKeysAndGetTheUpdatedValues() { - - final AtomicReference vanillaTokenID = new AtomicReference<>(); - return defaultHapiSpec("updateOnlyTokenKeysAndGetTheUpdatedValues") - .given( - newKeyNamed(ED25519KEY).shape(ED25519), - newKeyNamed(ECDSA_KEY).shape(SECP256K1), - newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), - newKeyNamed(MULTI_KEY).shape(ED25519_ON), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(MULTI_KEY), - cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), - uploadInitCode(TOKEN_UPDATE_CONTRACT), - contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .feeScheduleKey(MULTI_KEY) - .pauseKey(MULTI_KEY) - .wipeKey(MULTI_KEY) - .freezeKey(MULTI_KEY) - .kycKey(MULTI_KEY) - .initialSupply(1_000) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - tokenAssociate(ACCOUNT, VANILLA_TOKEN), - grantTokenKyc(VANILLA_TOKEN, ACCOUNT), - cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_UPDATE_CONTRACT, - UPDATE_KEY_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT), - newKeyNamed(DELEGATE_KEY).shape(DELEGATE_CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT)), - newKeyNamed(TOKEN_UPDATE_AS_KEY).shape(CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT)), - contractCall( - TOKEN_UPDATE_CONTRACT, - GET_KEY_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - BigInteger.valueOf(ADMIN_KEY_TYPE)) - .via(GET_ADMIN_KEY_TXN), - contractCall( - TOKEN_UPDATE_CONTRACT, - GET_KEY_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - BigInteger.valueOf(KYC_KEY_TYPE)) - .via(GET_KYC_KEY_TXN), - contractCall( - TOKEN_UPDATE_CONTRACT, - GET_KEY_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - BigInteger.valueOf(FREEZE_KEY_TYPE)) - .via(GET_FREEZE_KEY_TXN), - contractCall( - TOKEN_UPDATE_CONTRACT, - GET_KEY_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - BigInteger.valueOf(WIPE_KEY_TYPE)) - .via(GET_WIPE_KEY_TXN), - contractCall( - TOKEN_UPDATE_CONTRACT, - GET_KEY_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - BigInteger.valueOf(FEE_SCHEDULE_KEY_TYPE)) - .via(GET_FEE_KEY_TXN), - contractCall( - TOKEN_UPDATE_CONTRACT, - GET_KEY_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - BigInteger.valueOf(SUPPLY_KEY_TYPE)) - .via(GET_SUPPLY_KEY_TXN), - contractCall( - TOKEN_UPDATE_CONTRACT, - GET_KEY_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - BigInteger.valueOf(PAUSE_KEY_TYPE)) - .via(GET_PAUSE_KEY_TXN), - contractCallLocal( - TOKEN_UPDATE_CONTRACT, - GET_KEY_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - BigInteger.valueOf(ADMIN_KEY_TYPE))))) - .then(withOpContext((spec, opLog) -> allRunFor( - spec, - getTokenInfo(VANILLA_TOKEN) - .logged() - .hasTokenType(TokenType.FUNGIBLE_COMMON) - .hasSupplyType(TokenSupplyType.INFINITE) - .searchKeysGlobally() - .hasAdminKey(ED25519KEY) - .hasPauseKey(MULTI_KEY) - .hasKycKey(ED25519KEY) - .hasFreezeKey(ECDSA_KEY) - .hasWipeKey(ECDSA_KEY) - .hasFeeScheduleKey(DELEGATE_KEY) - .hasSupplyKey(TOKEN_UPDATE_AS_KEY) - .hasPauseKey(TOKEN_UPDATE_AS_KEY), - childRecordsCheck( - GET_ADMIN_KEY_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue( - spec.registry().getKey(ED25519KEY))))), - childRecordsCheck( - GET_KYC_KEY_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue( - spec.registry().getKey(ED25519KEY))))), - childRecordsCheck( - GET_FREEZE_KEY_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue( - spec.registry().getKey(ECDSA_KEY))))), - childRecordsCheck( - GET_WIPE_KEY_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue( - spec.registry().getKey(ECDSA_KEY))))), - childRecordsCheck( - GET_FEE_KEY_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue( - spec.registry().getKey(DELEGATE_KEY))))), - childRecordsCheck( - GET_SUPPLY_KEY_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue( - spec.registry().getKey(TOKEN_UPDATE_AS_KEY))))), - childRecordsCheck( - GET_PAUSE_KEY_TXN, - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) - .withStatus(SUCCESS) - .withTokenKeyValue( - spec.registry().getKey(TOKEN_UPDATE_AS_KEY)))))))); - } - - public HapiSpec updateOnlyKeysForNonFungibleToken() { - final AtomicReference nftToken = new AtomicReference<>(); - return defaultHapiSpec("updateOnlyKeysForNonFungibleToken") - .given( - cryptoCreate(TOKEN_TREASURY), - newKeyNamed(ED25519KEY).shape(ED25519), - newKeyNamed(ECDSA_KEY).shape(SECP256K1), - newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), - newKeyNamed(MULTI_KEY).shape(ED25519_ON), - cryptoCreate(ACCOUNT).key(MULTI_KEY).balance(ONE_MILLION_HBARS), - cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), - uploadInitCode(TOKEN_UPDATE_CONTRACT), - contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), - tokenCreate(NFT_TOKEN) - .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .treasury(TOKEN_TREASURY) - .initialSupply(0) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .feeScheduleKey(MULTI_KEY) - .pauseKey(MULTI_KEY) - .wipeKey(MULTI_KEY) - .freezeKey(MULTI_KEY) - .kycKey(MULTI_KEY) - .exposingCreatedIdTo(id -> nftToken.set(asToken(id))), - mintToken(VANILLA_TOKEN, List.of(ByteString.copyFromUtf8("nft3"))), - tokenAssociate(ACCOUNT, VANILLA_TOKEN)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_UPDATE_CONTRACT, - UPDATE_KEY_FUNC, - HapiParserUtil.asHeadlongAddress(asAddress(nftToken.get())), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) - .via(UPDATE_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT), - newKeyNamed(DELEGATE_KEY).shape(DELEGATE_CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT)), - newKeyNamed(TOKEN_UPDATE_AS_KEY).shape(CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT))))) - .then(withOpContext((spec, opLog) -> allRunFor( - spec, - getTokenInfo(NFT_TOKEN) - .logged() - .hasTokenType(TokenType.NON_FUNGIBLE_UNIQUE) - .hasSupplyType(TokenSupplyType.INFINITE) - .searchKeysGlobally() - .hasAdminKey(ED25519KEY) - .hasPauseKey(MULTI_KEY) - .hasKycKey(ED25519KEY) - .hasFreezeKey(ECDSA_KEY) - .hasWipeKey(ECDSA_KEY) - .hasFeeScheduleKey(DELEGATE_KEY) - .hasSupplyKey(TOKEN_UPDATE_AS_KEY) - .hasPauseKey(TOKEN_UPDATE_AS_KEY)))); - } - public HapiSpec updateNftTokenKeysWithWrongTokenIdAndMissingAdminKey() { final AtomicReference nftToken = new AtomicReference<>(); return defaultHapiSpec("updateNftTokenKeysWithWrongTokenIdAndMissingAdminKey") @@ -1081,113 +295,4 @@ public HapiSpec getTokenKeyForNonFungibleNegative() { CONTRACT_REVERT_EXECUTED, TransactionRecordAsserts.recordWith().status(KEY_NOT_PROVIDED))))); } - - private HapiSpec updateTokenWithoutNameSymbolMemo() { - final var updateTokenWithoutNameSymbolMemoFunc = "updateTokenWithoutNameSymbolMemo"; - final AtomicReference vanillaTokenID = new AtomicReference<>(); - return defaultHapiSpec(updateTokenWithoutNameSymbolMemoFunc) - .given( - newKeyNamed(ED25519KEY).shape(ED25519), - newKeyNamed(ECDSA_KEY).shape(SECP256K1), - newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), - newKeyNamed(MULTI_KEY).shape(ED25519_ON), - cryptoCreate(TOKEN_TREASURY), - cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(MULTI_KEY), - cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), - uploadInitCode(TOKEN_UPDATE_CONTRACT), - contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), - tokenCreate(VANILLA_TOKEN) - .symbol(CUSTOM_SYMBOL) - .entityMemo(CUSTOM_MEMO) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .feeScheduleKey(MULTI_KEY) - .pauseKey(MULTI_KEY) - .wipeKey(MULTI_KEY) - .freezeKey(MULTI_KEY) - .kycKey(MULTI_KEY) - .initialSupply(1_000) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - tokenAssociate(ACCOUNT, VANILLA_TOKEN), - grantTokenKyc(VANILLA_TOKEN, ACCOUNT), - cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - TOKEN_UPDATE_CONTRACT, - updateTokenWithoutNameSymbolMemoFunc, - HapiParserUtil.asHeadlongAddress(new byte[20]), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - AUTO_RENEW_PERIOD) - .via(UPDATE_TXN) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - TOKEN_UPDATE_CONTRACT, - "updateTokenWithoutNameSymbolMemo", - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - spec.registry() - .getKey(ED25519KEY) - .getEd25519() - .toByteArray(), - spec.registry() - .getKey(ECDSA_KEY) - .getECDSASecp256K1() - .toByteArray(), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT))), - HapiParserUtil.asHeadlongAddress( - asAddress(spec.registry().getAccountID(ACCOUNT))), - AUTO_RENEW_PERIOD) - .gas(GAS_TO_OFFER) - .sending(DEFAULT_AMOUNT_TO_SEND) - .signedBy(GENESIS, ACCOUNT) - .alsoSigningWithFullPrefix(ACCOUNT), - newKeyNamed(DELEGATE_KEY).shape(DELEGATE_CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT)), - newKeyNamed(TOKEN_UPDATE_AS_KEY).shape(CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT))))) - .then( - childRecordsCheck( - UPDATE_TXN, - CONTRACT_REVERT_EXECUTED, - TransactionRecordAsserts.recordWith().status(INVALID_TOKEN_ID)), - sourcing(() -> getTokenInfo(VANILLA_TOKEN) - .logged() - .hasTokenType(TokenType.FUNGIBLE_COMMON) - .hasSymbol(CUSTOM_SYMBOL) - .hasName(VANILLA_TOKEN) - .hasEntityMemo(CUSTOM_MEMO) - .hasTreasury(ACCOUNT) - .hasAutoRenewAccount(ACCOUNT) - .hasAutoRenewPeriod(AUTO_RENEW_PERIOD) - .hasSupplyType(TokenSupplyType.INFINITE) - .searchKeysGlobally() - .hasAdminKey(ED25519KEY) - .hasPauseKey(MULTI_KEY) - .hasKycKey(ED25519KEY) - .hasFreezeKey(ECDSA_KEY) - .hasWipeKey(ECDSA_KEY) - .hasFeeScheduleKey(DELEGATE_KEY) - .hasSupplyKey(TOKEN_UPDATE_AS_KEY) - .hasPauseKey(TOKEN_UPDATE_AS_KEY))); - } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenUpdatePrecompileV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenUpdatePrecompileV1SecurityModelSuite.java new file mode 100644 index 000000000000..9b9feb7c2218 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TokenUpdatePrecompileV1SecurityModelSuite.java @@ -0,0 +1,1021 @@ +/* + * Copyright (C) 2022-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.keys.KeyShape.CONTRACT; +import static com.hedera.services.bdd.spec.keys.KeyShape.DELEGATE_CONTRACT; +import static com.hedera.services.bdd.spec.keys.KeyShape.ED25519; +import static com.hedera.services.bdd.spec.keys.KeyShape.SECP256K1; +import static com.hedera.services.bdd.spec.keys.SigControl.ED25519_ON; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.contractCallLocal; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenNftInfo; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.grantTokenKyc; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overriding; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.asToken; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_FEE_SCHEDULE_KEY; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_FREEZE_KEY; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_KYC_KEY; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_PAUSE_KEY; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_SUPPLY_KEY; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_HAS_NO_WIPE_KEY; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_IS_IMMUTABLE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_NAME_TOO_LONG; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.TOKEN_SYMBOL_TOO_LONG; +import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; + +import com.google.protobuf.ByteString; +import com.hedera.node.app.hapi.utils.contracts.ParsingConstants; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts; +import com.hedera.services.bdd.spec.transactions.TxnUtils; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.TokenID; +import com.hederahashgraph.api.proto.java.TokenPauseStatus; +import com.hederahashgraph.api.proto.java.TokenSupplyType; +import com.hederahashgraph.api.proto.java.TokenType; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse +public class TokenUpdatePrecompileV1SecurityModelSuite extends HapiSuite { + + private static final Logger log = LogManager.getLogger(TokenUpdatePrecompileV1SecurityModelSuite.class); + + private static final long GAS_TO_OFFER = 4_000_000L; + private static final long AUTO_RENEW_PERIOD = 8_000_000L; + private static final String ACCOUNT = "account"; + private static final String VANILLA_TOKEN = "TokenD"; + private static final String NFT_TOKEN = "TokenD"; + private static final String MULTI_KEY = "multiKey"; + private static final String UPDATE_KEY_FUNC = "tokenUpdateKeys"; + private static final String GET_KEY_FUNC = "getKeyFromToken"; + public static final String TOKEN_UPDATE_CONTRACT = "UpdateTokenInfoContract"; + private static final String UPDATE_TXN = "updateTxn"; + private static final String GET_KYC_KEY_TXN = "getKycTokenKeyTxn"; + private static final String GET_ADMIN_KEY_TXN = "getAdminTokenKeyTxn"; + private static final String GET_PAUSE_KEY_TXN = "getPauseTokenKeyTxn"; + private static final String GET_FREEZE_KEY_TXN = "getFreezeTokenKeyTxn"; + private static final String GET_WIPE_KEY_TXN = "getWipeTokenKeyTxn"; + private static final String GET_FEE_KEY_TXN = "getFeeTokenKeyTxn"; + private static final String GET_SUPPLY_KEY_TXN = "getSupplyTokenKeyTxn"; + private static final long DEFAULT_AMOUNT_TO_SEND = 20 * ONE_HBAR; + private static final String ED25519KEY = "ed25519key"; + private static final String ECDSA_KEY = "ecdsa"; + public static final String TOKEN_UPDATE_AS_KEY = "tokenCreateContractAsKey"; + private static final String DELEGATE_KEY = "tokenUpdateAsKeyDelegate"; + private static final String ACCOUNT_TO_ASSOCIATE = "account3"; + private static final String ACCOUNT_TO_ASSOCIATE_KEY = "associateKey"; + public static final String CUSTOM_NAME = "customName"; + public static final String CUSTOM_SYMBOL = "Ω"; + public static final String CUSTOM_MEMO = "Omega"; + private static final long ADMIN_KEY_TYPE = 1L; + private static final long KYC_KEY_TYPE = 2L; + private static final long FREEZE_KEY_TYPE = 4L; + private static final long WIPE_KEY_TYPE = 8L; + private static final long SUPPLY_KEY_TYPE = 16L; + private static final long FEE_SCHEDULE_KEY_TYPE = 32L; + private static final long PAUSE_KEY_TYPE = 64L; + + public static void main(String... args) { + new TokenUpdatePrecompileV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + protected Logger getResultsLogger() { + return log; + } + + @Override + public List getSpecsInSuite() { + return allOf(positiveCases(), negativeCases()); + } + + List positiveCases() { + return List.of( + updateTokenWithKeysHappyPath(), + updateNftTreasuryWithAndWithoutAdminKey(), + updateOnlyTokenKeysAndGetTheUpdatedValues(), + updateOnlyKeysForNonFungibleToken(), + updateTokenWithoutNameSymbolMemo()); + } + + List negativeCases() { + return List.of(updateWithTooLongNameAndSymbol(), updateTokenWithKeysNegative()); + } + + private HapiSpec updateTokenWithKeysHappyPath() { + final AtomicReference vanillaTokenID = new AtomicReference<>(); + return propertyPreservingHapiSpec("updateTokenWithKeysHappyPath") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ED25519KEY).shape(ED25519), + newKeyNamed(ECDSA_KEY).shape(SECP256K1), + newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), + newKeyNamed(MULTI_KEY).shape(ED25519_ON), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(MULTI_KEY), + cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), + uploadInitCode(TOKEN_UPDATE_CONTRACT), + contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .feeScheduleKey(MULTI_KEY) + .pauseKey(MULTI_KEY) + .wipeKey(MULTI_KEY) + .freezeKey(MULTI_KEY) + .kycKey(MULTI_KEY) + .initialSupply(1_000) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + tokenAssociate(ACCOUNT, VANILLA_TOKEN), + grantTokenKyc(VANILLA_TOKEN, ACCOUNT), + cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_UPDATE_CONTRACT, + "updateTokenWithAllFields", + HapiParserUtil.asHeadlongAddress(new byte[20]), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + AUTO_RENEW_PERIOD, + CUSTOM_NAME, + CUSTOM_SYMBOL, + CUSTOM_MEMO) + .via(UPDATE_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_UPDATE_CONTRACT, + "updateTokenWithAllFields", + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + AUTO_RENEW_PERIOD, + CUSTOM_NAME, + CUSTOM_SYMBOL, + CUSTOM_MEMO) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT), + newKeyNamed(DELEGATE_KEY).shape(DELEGATE_CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT)), + newKeyNamed(TOKEN_UPDATE_AS_KEY).shape(CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT))))) + .then( + childRecordsCheck( + UPDATE_TXN, + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith().status(INVALID_TOKEN_ID)), + sourcing(() -> getTokenInfo(VANILLA_TOKEN) + .logged() + .hasTokenType(TokenType.FUNGIBLE_COMMON) + .hasSymbol(CUSTOM_SYMBOL) + .hasName(CUSTOM_NAME) + .hasEntityMemo(CUSTOM_MEMO) + .hasTreasury(ACCOUNT) + .hasAutoRenewAccount(ACCOUNT) + .hasAutoRenewPeriod(AUTO_RENEW_PERIOD) + .hasSupplyType(TokenSupplyType.INFINITE) + .searchKeysGlobally() + .hasAdminKey(ED25519KEY) + .hasPauseKey(MULTI_KEY) + .hasKycKey(ED25519KEY) + .hasFreezeKey(ECDSA_KEY) + .hasWipeKey(ECDSA_KEY) + .hasFeeScheduleKey(DELEGATE_KEY) + .hasSupplyKey(TOKEN_UPDATE_AS_KEY) + .hasPauseKey(TOKEN_UPDATE_AS_KEY))); + } + + public HapiSpec updateNftTreasuryWithAndWithoutAdminKey() { + final var newTokenTreasury = "newTokenTreasury"; + final var NO_ADMIN_TOKEN = "noAdminKeyToken"; + final AtomicReference noAdminKeyToken = new AtomicReference<>(); + final AtomicReference nftToken = new AtomicReference<>(); + return propertyPreservingHapiSpec("updateNftTreasuryWithAndWithoutAdminKey") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(newTokenTreasury).keyShape(ED25519_ON).maxAutomaticTokenAssociations(6), + newKeyNamed(MULTI_KEY).shape(ED25519_ON), + cryptoCreate(ACCOUNT).key(MULTI_KEY).balance(ONE_MILLION_HBARS), + uploadInitCode(TOKEN_UPDATE_CONTRACT), + contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), + tokenCreate(NO_ADMIN_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasury(TOKEN_TREASURY) + .initialSupply(0) + .supplyKey(MULTI_KEY) + .exposingCreatedIdTo(id -> noAdminKeyToken.set(asToken(id))), + tokenCreate(VANILLA_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasury(TOKEN_TREASURY) + .initialSupply(0) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .pauseKey(MULTI_KEY) + .exposingCreatedIdTo(id -> nftToken.set(asToken(id))), + mintToken(VANILLA_TOKEN, List.of(ByteString.copyFromUtf8("nft0"))), + tokenAssociate(newTokenTreasury, VANILLA_TOKEN), + mintToken(NO_ADMIN_TOKEN, List.of(ByteString.copyFromUtf8("nft1")))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_UPDATE_CONTRACT, + "updateTokenTreasury", + HapiParserUtil.asHeadlongAddress(asAddress(noAdminKeyToken.get())), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(newTokenTreasury)))) + .via("noAdminKey") + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT, newTokenTreasury) + .alsoSigningWithFullPrefix(ACCOUNT, newTokenTreasury) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_UPDATE_CONTRACT, + "updateTokenTreasury", + HapiParserUtil.asHeadlongAddress(asAddress(nftToken.get())), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(newTokenTreasury)))) + .via("tokenUpdateTxn") + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT, newTokenTreasury) + .alsoSigningWithFullPrefix(ACCOUNT, newTokenTreasury)))) + .then( + childRecordsCheck( + "noAdminKey", + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith().status(TOKEN_IS_IMMUTABLE)), + getTokenNftInfo(VANILLA_TOKEN, 1) + .hasAccountID(newTokenTreasury) + .logged(), + getAccountBalance(TOKEN_TREASURY).hasTokenBalance(VANILLA_TOKEN, 0), + getAccountBalance(newTokenTreasury).hasTokenBalance(VANILLA_TOKEN, 1), + getTokenInfo(VANILLA_TOKEN) + .hasTreasury(newTokenTreasury) + .hasPauseStatus(TokenPauseStatus.Unpaused) + .logged(), + getTokenNftInfo(VANILLA_TOKEN, 1) + .hasAccountID(newTokenTreasury) + .logged()); + } + + public HapiSpec updateWithTooLongNameAndSymbol() { + final var tooLongString = "ORIGINAL" + TxnUtils.randomUppercase(101); + final var tooLongSymbolTxn = "tooLongSymbolTxn"; + final AtomicReference vanillaTokenID = new AtomicReference<>(); + return propertyPreservingHapiSpec("updateWithTooLongNameAndSymbol") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(TOKEN_TREASURY), + newKeyNamed(MULTI_KEY).shape(ED25519_ON), + cryptoCreate(ACCOUNT).key(MULTI_KEY).balance(ONE_MILLION_HBARS), + uploadInitCode(TOKEN_UPDATE_CONTRACT), + contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .initialSupply(1000) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .pauseKey(MULTI_KEY) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + tokenAssociate(ACCOUNT, VANILLA_TOKEN)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_UPDATE_CONTRACT, + "checkNameAndSymbolLength", + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + tooLongString, + CUSTOM_SYMBOL) + .via(UPDATE_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_UPDATE_CONTRACT, + "checkNameAndSymbolLength", + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + CUSTOM_NAME, + tooLongString) + .via(tooLongSymbolTxn) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)))) + .then(withOpContext((spec, opLog) -> allRunFor( + spec, + childRecordsCheck( + UPDATE_TXN, + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith().status(TOKEN_NAME_TOO_LONG)), + childRecordsCheck( + tooLongSymbolTxn, + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith().status(TOKEN_SYMBOL_TOO_LONG))))); + } + + private HapiSpec updateTokenWithKeysNegative() { + final var updateTokenWithKeysFunc = "updateTokenWithKeys"; + final var NO_FEE_SCHEDULE_KEY_TXN = "NO_FEE_SCHEDULE_KEY_TXN"; + final var NO_PAUSE_KEY_TXN = "NO_PAUSE_KEY_TXN"; + final var NO_KYC_KEY_TXN = "NO_KYC_KEY_TXN"; + final var NO_WIPE_KEY_TXN = "NO_WIPE_KEY_TXN"; + final var NO_FREEZE_KEY_TXN = "NO_FREEZE_KEY_TXN"; + final var NO_SUPPLY_KEY_TXN = "NO_SUPPLY_KEY_TXN"; + final List> tokenList = new ArrayList<>(); + final AtomicReference vanillaTokenID = new AtomicReference<>(); + return propertyPreservingHapiSpec("updateTokenWithKeysNegative") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ED25519KEY).shape(ED25519), + newKeyNamed(ECDSA_KEY).shape(SECP256K1), + newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), + newKeyNamed(MULTI_KEY).shape(ED25519_ON), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(ACCOUNT) + .balance(ONE_MILLION_HBARS) + .key(MULTI_KEY) + .maxAutomaticTokenAssociations(100), + cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), + uploadInitCode(TOKEN_UPDATE_CONTRACT), + contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .feeScheduleKey(MULTI_KEY) + .exposingCreatedIdTo(id -> tokenList.add(new AtomicReference<>(asToken(id)))), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .feeScheduleKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .exposingCreatedIdTo(id -> tokenList.add(new AtomicReference<>(asToken(id)))), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .feeScheduleKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .wipeKey(MULTI_KEY) + .exposingCreatedIdTo(id -> tokenList.add(new AtomicReference<>(asToken(id)))), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .pauseKey(MULTI_KEY) + .feeScheduleKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .wipeKey(MULTI_KEY) + .exposingCreatedIdTo(id -> tokenList.add(new AtomicReference<>(asToken(id)))), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .freezeKey(MULTI_KEY) + .adminKey(MULTI_KEY) + .pauseKey(MULTI_KEY) + .feeScheduleKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .wipeKey(MULTI_KEY) + .exposingCreatedIdTo(id -> tokenList.add(new AtomicReference<>(asToken(id))))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_UPDATE_CONTRACT, + updateTokenWithKeysFunc, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) + .via(NO_FEE_SCHEDULE_KEY_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_UPDATE_CONTRACT, + updateTokenWithKeysFunc, + HapiParserUtil.asHeadlongAddress( + asAddress(tokenList.get(0).get())), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) + .via(NO_SUPPLY_KEY_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_UPDATE_CONTRACT, + updateTokenWithKeysFunc, + HapiParserUtil.asHeadlongAddress( + asAddress(tokenList.get(1).get())), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) + .via(NO_WIPE_KEY_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_UPDATE_CONTRACT, + updateTokenWithKeysFunc, + HapiParserUtil.asHeadlongAddress( + asAddress(tokenList.get(2).get())), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) + .via(NO_PAUSE_KEY_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_UPDATE_CONTRACT, + updateTokenWithKeysFunc, + HapiParserUtil.asHeadlongAddress( + asAddress(tokenList.get(3).get())), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) + .via(NO_FREEZE_KEY_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_UPDATE_CONTRACT, + updateTokenWithKeysFunc, + HapiParserUtil.asHeadlongAddress( + asAddress(tokenList.get(4).get())), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) + .via(NO_KYC_KEY_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)))) + .then(withOpContext((spec, ignore) -> allRunFor( + spec, + childRecordsCheck( + NO_FEE_SCHEDULE_KEY_TXN, + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith().status(TOKEN_HAS_NO_FEE_SCHEDULE_KEY)), + childRecordsCheck( + NO_SUPPLY_KEY_TXN, + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith().status(TOKEN_HAS_NO_SUPPLY_KEY)), + childRecordsCheck( + NO_WIPE_KEY_TXN, + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith().status(TOKEN_HAS_NO_WIPE_KEY)), + childRecordsCheck( + NO_PAUSE_KEY_TXN, + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith().status(TOKEN_HAS_NO_PAUSE_KEY)), + childRecordsCheck( + NO_FREEZE_KEY_TXN, + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith().status(TOKEN_HAS_NO_FREEZE_KEY)), + childRecordsCheck( + NO_KYC_KEY_TXN, + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith().status(TOKEN_HAS_NO_KYC_KEY))))); + } + + private HapiSpec updateOnlyTokenKeysAndGetTheUpdatedValues() { + + final AtomicReference vanillaTokenID = new AtomicReference<>(); + return propertyPreservingHapiSpec("updateOnlyTokenKeysAndGetTheUpdatedValues") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ED25519KEY).shape(ED25519), + newKeyNamed(ECDSA_KEY).shape(SECP256K1), + newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), + newKeyNamed(MULTI_KEY).shape(ED25519_ON), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(MULTI_KEY), + cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), + uploadInitCode(TOKEN_UPDATE_CONTRACT), + contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .feeScheduleKey(MULTI_KEY) + .pauseKey(MULTI_KEY) + .wipeKey(MULTI_KEY) + .freezeKey(MULTI_KEY) + .kycKey(MULTI_KEY) + .initialSupply(1_000) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + tokenAssociate(ACCOUNT, VANILLA_TOKEN), + grantTokenKyc(VANILLA_TOKEN, ACCOUNT), + cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_UPDATE_CONTRACT, + UPDATE_KEY_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT), + newKeyNamed(DELEGATE_KEY).shape(DELEGATE_CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT)), + newKeyNamed(TOKEN_UPDATE_AS_KEY).shape(CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT)), + contractCall( + TOKEN_UPDATE_CONTRACT, + GET_KEY_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + BigInteger.valueOf(ADMIN_KEY_TYPE)) + .via(GET_ADMIN_KEY_TXN), + contractCall( + TOKEN_UPDATE_CONTRACT, + GET_KEY_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + BigInteger.valueOf(KYC_KEY_TYPE)) + .via(GET_KYC_KEY_TXN), + contractCall( + TOKEN_UPDATE_CONTRACT, + GET_KEY_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + BigInteger.valueOf(FREEZE_KEY_TYPE)) + .via(GET_FREEZE_KEY_TXN), + contractCall( + TOKEN_UPDATE_CONTRACT, + GET_KEY_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + BigInteger.valueOf(WIPE_KEY_TYPE)) + .via(GET_WIPE_KEY_TXN), + contractCall( + TOKEN_UPDATE_CONTRACT, + GET_KEY_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + BigInteger.valueOf(FEE_SCHEDULE_KEY_TYPE)) + .via(GET_FEE_KEY_TXN), + contractCall( + TOKEN_UPDATE_CONTRACT, + GET_KEY_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + BigInteger.valueOf(SUPPLY_KEY_TYPE)) + .via(GET_SUPPLY_KEY_TXN), + contractCall( + TOKEN_UPDATE_CONTRACT, + GET_KEY_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + BigInteger.valueOf(PAUSE_KEY_TYPE)) + .via(GET_PAUSE_KEY_TXN), + contractCallLocal( + TOKEN_UPDATE_CONTRACT, + GET_KEY_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + BigInteger.valueOf(ADMIN_KEY_TYPE))))) + .then(withOpContext((spec, opLog) -> allRunFor( + spec, + getTokenInfo(VANILLA_TOKEN) + .logged() + .hasTokenType(TokenType.FUNGIBLE_COMMON) + .hasSupplyType(TokenSupplyType.INFINITE) + .searchKeysGlobally() + .hasAdminKey(ED25519KEY) + .hasPauseKey(MULTI_KEY) + .hasKycKey(ED25519KEY) + .hasFreezeKey(ECDSA_KEY) + .hasWipeKey(ECDSA_KEY) + .hasFeeScheduleKey(DELEGATE_KEY) + .hasSupplyKey(TOKEN_UPDATE_AS_KEY) + .hasPauseKey(TOKEN_UPDATE_AS_KEY), + childRecordsCheck( + GET_ADMIN_KEY_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue( + spec.registry().getKey(ED25519KEY))))), + childRecordsCheck( + GET_KYC_KEY_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue( + spec.registry().getKey(ED25519KEY))))), + childRecordsCheck( + GET_FREEZE_KEY_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue( + spec.registry().getKey(ECDSA_KEY))))), + childRecordsCheck( + GET_WIPE_KEY_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue( + spec.registry().getKey(ECDSA_KEY))))), + childRecordsCheck( + GET_FEE_KEY_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue( + spec.registry().getKey(DELEGATE_KEY))))), + childRecordsCheck( + GET_SUPPLY_KEY_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue( + spec.registry().getKey(TOKEN_UPDATE_AS_KEY))))), + childRecordsCheck( + GET_PAUSE_KEY_TXN, + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(ParsingConstants.FunctionType.HAPI_GET_TOKEN_KEY) + .withStatus(SUCCESS) + .withTokenKeyValue( + spec.registry().getKey(TOKEN_UPDATE_AS_KEY)))))))); + } + + public HapiSpec updateOnlyKeysForNonFungibleToken() { + final AtomicReference nftToken = new AtomicReference<>(); + return propertyPreservingHapiSpec("updateOnlyKeysForNonFungibleToken") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(TOKEN_TREASURY), + newKeyNamed(ED25519KEY).shape(ED25519), + newKeyNamed(ECDSA_KEY).shape(SECP256K1), + newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), + newKeyNamed(MULTI_KEY).shape(ED25519_ON), + cryptoCreate(ACCOUNT).key(MULTI_KEY).balance(ONE_MILLION_HBARS), + cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), + uploadInitCode(TOKEN_UPDATE_CONTRACT), + contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), + tokenCreate(NFT_TOKEN) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasury(TOKEN_TREASURY) + .initialSupply(0) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .feeScheduleKey(MULTI_KEY) + .pauseKey(MULTI_KEY) + .wipeKey(MULTI_KEY) + .freezeKey(MULTI_KEY) + .kycKey(MULTI_KEY) + .exposingCreatedIdTo(id -> nftToken.set(asToken(id))), + mintToken(VANILLA_TOKEN, List.of(ByteString.copyFromUtf8("nft3"))), + tokenAssociate(ACCOUNT, VANILLA_TOKEN)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_UPDATE_CONTRACT, + UPDATE_KEY_FUNC, + HapiParserUtil.asHeadlongAddress(asAddress(nftToken.get())), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT)))) + .via(UPDATE_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT), + newKeyNamed(DELEGATE_KEY).shape(DELEGATE_CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT)), + newKeyNamed(TOKEN_UPDATE_AS_KEY).shape(CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT))))) + .then(withOpContext((spec, opLog) -> allRunFor( + spec, + getTokenInfo(NFT_TOKEN) + .logged() + .hasTokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .hasSupplyType(TokenSupplyType.INFINITE) + .searchKeysGlobally() + .hasAdminKey(ED25519KEY) + .hasPauseKey(MULTI_KEY) + .hasKycKey(ED25519KEY) + .hasFreezeKey(ECDSA_KEY) + .hasWipeKey(ECDSA_KEY) + .hasFeeScheduleKey(DELEGATE_KEY) + .hasSupplyKey(TOKEN_UPDATE_AS_KEY) + .hasPauseKey(TOKEN_UPDATE_AS_KEY)))); + } + + private HapiSpec updateTokenWithoutNameSymbolMemo() { + final var updateTokenWithoutNameSymbolMemoFunc = "updateTokenWithoutNameSymbolMemo"; + final AtomicReference vanillaTokenID = new AtomicReference<>(); + return propertyPreservingHapiSpec("updateTokenWithoutNameSymbolMemo") + .preserving(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(ED25519KEY).shape(ED25519), + newKeyNamed(ECDSA_KEY).shape(SECP256K1), + newKeyNamed(ACCOUNT_TO_ASSOCIATE_KEY), + newKeyNamed(MULTI_KEY).shape(ED25519_ON), + cryptoCreate(TOKEN_TREASURY), + cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(MULTI_KEY), + cryptoCreate(ACCOUNT_TO_ASSOCIATE).key(ACCOUNT_TO_ASSOCIATE_KEY), + uploadInitCode(TOKEN_UPDATE_CONTRACT), + contractCreate(TOKEN_UPDATE_CONTRACT).gas(GAS_TO_OFFER), + tokenCreate(VANILLA_TOKEN) + .symbol(CUSTOM_SYMBOL) + .entityMemo(CUSTOM_MEMO) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .feeScheduleKey(MULTI_KEY) + .pauseKey(MULTI_KEY) + .wipeKey(MULTI_KEY) + .freezeKey(MULTI_KEY) + .kycKey(MULTI_KEY) + .initialSupply(1_000) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + tokenAssociate(ACCOUNT, VANILLA_TOKEN), + grantTokenKyc(VANILLA_TOKEN, ACCOUNT), + cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + TOKEN_UPDATE_CONTRACT, + updateTokenWithoutNameSymbolMemoFunc, + HapiParserUtil.asHeadlongAddress(new byte[20]), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + AUTO_RENEW_PERIOD) + .via(UPDATE_TXN) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + TOKEN_UPDATE_CONTRACT, + "updateTokenWithoutNameSymbolMemo", + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + spec.registry() + .getKey(ED25519KEY) + .getEd25519() + .toByteArray(), + spec.registry() + .getKey(ECDSA_KEY) + .getECDSASecp256K1() + .toByteArray(), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getContractId(TOKEN_UPDATE_CONTRACT))), + HapiParserUtil.asHeadlongAddress( + asAddress(spec.registry().getAccountID(ACCOUNT))), + AUTO_RENEW_PERIOD) + .gas(GAS_TO_OFFER) + .sending(DEFAULT_AMOUNT_TO_SEND) + .signedBy(GENESIS, ACCOUNT) + .alsoSigningWithFullPrefix(ACCOUNT), + newKeyNamed(DELEGATE_KEY).shape(DELEGATE_CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT)), + newKeyNamed(TOKEN_UPDATE_AS_KEY).shape(CONTRACT.signedWith(TOKEN_UPDATE_CONTRACT))))) + .then( + childRecordsCheck( + UPDATE_TXN, + CONTRACT_REVERT_EXECUTED, + TransactionRecordAsserts.recordWith().status(INVALID_TOKEN_ID)), + sourcing(() -> getTokenInfo(VANILLA_TOKEN) + .logged() + .hasTokenType(TokenType.FUNGIBLE_COMMON) + .hasSymbol(CUSTOM_SYMBOL) + .hasName(VANILLA_TOKEN) + .hasEntityMemo(CUSTOM_MEMO) + .hasTreasury(ACCOUNT) + .hasAutoRenewAccount(ACCOUNT) + .hasAutoRenewPeriod(AUTO_RENEW_PERIOD) + .hasSupplyType(TokenSupplyType.INFINITE) + .searchKeysGlobally() + .hasAdminKey(ED25519KEY) + .hasPauseKey(MULTI_KEY) + .hasKycKey(ED25519KEY) + .hasFreezeKey(ECDSA_KEY) + .hasWipeKey(ECDSA_KEY) + .hasFeeScheduleKey(DELEGATE_KEY) + .hasSupplyKey(TOKEN_UPDATE_AS_KEY) + .hasPauseKey(TOKEN_UPDATE_AS_KEY))); + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TopLevelSigsCanBeToggledByPrecompileTypeSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TopLevelSigsCanBeToggledByPrecompileTypeSuite.java index 98cc2ebdf38e..09818deba79c 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TopLevelSigsCanBeToggledByPrecompileTypeSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/TopLevelSigsCanBeToggledByPrecompileTypeSuite.java @@ -58,13 +58,10 @@ import static com.hedera.services.bdd.suites.contract.precompile.CreatePrecompileSuite.ECDSA_KEY; import static com.hedera.services.bdd.suites.contract.precompile.CreatePrecompileSuite.ED25519KEY; import static com.hedera.services.bdd.suites.contract.precompile.CryptoTransferHTSSuite.DELEGATE_KEY; -import static com.hedera.services.bdd.suites.contract.precompile.DeleteTokenPrecompileSuite.DELETE_TOKEN_CONTRACT; -import static com.hedera.services.bdd.suites.contract.precompile.DeleteTokenPrecompileSuite.TOKEN_DELETE_FUNCTION; import static com.hedera.services.bdd.suites.contract.precompile.FreezeUnfreezeTokenPrecompileSuite.FREEZE_CONTRACT; import static com.hedera.services.bdd.suites.contract.precompile.FreezeUnfreezeTokenPrecompileSuite.TOKEN_FREEZE_FUNC; import static com.hedera.services.bdd.suites.contract.precompile.FreezeUnfreezeTokenPrecompileSuite.TOKEN_UNFREEZE_FUNC; import static com.hedera.services.bdd.suites.contract.precompile.GrantRevokeKycSuite.GRANT_REVOKE_KYC_CONTRACT; -import static com.hedera.services.bdd.suites.contract.precompile.GrantRevokeKycSuite.SECOND_ACCOUNT; import static com.hedera.services.bdd.suites.contract.precompile.GrantRevokeKycSuite.TOKEN_GRANT_KYC; import static com.hedera.services.bdd.suites.contract.precompile.GrantRevokeKycSuite.TOKEN_REVOKE_KYC; import static com.hedera.services.bdd.suites.contract.precompile.PauseUnpauseTokenAccountPrecompileSuite.PAUSE_TOKEN_ACCOUNT_FUNCTION_NAME; @@ -75,16 +72,10 @@ import static com.hedera.services.bdd.suites.contract.precompile.TokenUpdatePrecompileSuite.CUSTOM_SYMBOL; import static com.hedera.services.bdd.suites.contract.precompile.TokenUpdatePrecompileSuite.TOKEN_UPDATE_AS_KEY; import static com.hedera.services.bdd.suites.contract.precompile.TokenUpdatePrecompileSuite.TOKEN_UPDATE_CONTRACT; -import static com.hedera.services.bdd.suites.contract.precompile.WipeTokenAccountPrecompileSuite.ADMIN_ACCOUNT; -import static com.hedera.services.bdd.suites.contract.precompile.WipeTokenAccountPrecompileSuite.GAS_TO_OFFER; -import static com.hedera.services.bdd.suites.contract.precompile.WipeTokenAccountPrecompileSuite.WIPE_CONTRACT; -import static com.hedera.services.bdd.suites.contract.precompile.WipeTokenAccountPrecompileSuite.WIPE_FUNGIBLE_TOKEN; -import static com.hedera.services.bdd.suites.contract.precompile.WipeTokenAccountPrecompileSuite.WIPE_KEY; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; import static com.hedera.services.bdd.suites.crypto.CryptoApproveAllowanceSuite.FREEZE_KEY; import static com.hedera.services.bdd.suites.crypto.CryptoApproveAllowanceSuite.KYC_KEY; import static com.hedera.services.bdd.suites.crypto.CryptoApproveAllowanceSuite.PAUSE_KEY; -import static com.hedera.services.bdd.suites.crypto.CryptoCreateSuite.ACCOUNT; -import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.MULTI_KEY; import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; import static com.hedera.services.bdd.suites.token.TokenTransactSpecs.SUPPLY_KEY; import static com.hedera.services.yahcli.commands.validation.ValidationCommand.TOKEN; @@ -108,9 +99,20 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse public class TopLevelSigsCanBeToggledByPrecompileTypeSuite extends HapiSuite { private static final Logger log = LogManager.getLogger(TopLevelSigsCanBeToggledByPrecompileTypeSuite.class); - private static final String CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS = "contracts.allowSystemUseOfHapiSigs"; + + public static final String DELETE_TOKEN_CONTRACT = "DeleteTokenContract"; + public static final String TOKEN_DELETE_FUNCTION = "tokenDelete"; + public static final String WIPE_CONTRACT = "WipeTokenAccount"; + public static final String ADMIN_ACCOUNT = "admin"; + private static final String ACCOUNT = "anybody"; + private static final String SECOND_ACCOUNT = "anybodySecond"; + public static final String WIPE_KEY = "wipeKey"; + private static final String MULTI_KEY = "purpose"; + public static final int GAS_TO_OFFER = 1_000_000; + public static final String WIPE_FUNGIBLE_TOKEN = "wipeFungibleToken"; public static void main(String... args) { new TopLevelSigsCanBeToggledByPrecompileTypeSuite().runSuiteSync(); @@ -131,7 +133,6 @@ public List getSpecsInSuite() { } private HapiSpec canToggleTopLevelSigUsageForWipePrecompile() { - final String ALLOW_SYSTEM_USE_OF_HAPI_SIGS = CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; final var failedWipeTxn = "failedWipeTxn"; final var succeededWipeTxn = "succeededWipeTxn"; @@ -139,7 +140,7 @@ private HapiSpec canToggleTopLevelSigUsageForWipePrecompile() { final AtomicReference accountID = new AtomicReference<>(); final AtomicReference vanillaTokenID = new AtomicReference<>(); return propertyPreservingHapiSpec("CanToggleTopLevelSigUsageForWipePrecompile") - .preserving(ALLOW_SYSTEM_USE_OF_HAPI_SIGS) + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS) .given( newKeyNamed(WIPE_KEY), cryptoCreate(ADMIN_ACCOUNT).exposingCreatedIdTo(adminAccountID::set), @@ -156,7 +157,7 @@ private HapiSpec canToggleTopLevelSigUsageForWipePrecompile() { tokenAssociate(ACCOUNT, VANILLA_TOKEN), cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT)), // First revoke use of top-level signatures from all precompiles - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) .when( // Trying to wipe token with top-level signatures should fail sourcing(() -> contractCall( @@ -171,7 +172,7 @@ private HapiSpec canToggleTopLevelSigUsageForWipePrecompile() { .gas(GAS_TO_OFFER) .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), // But now restore use of top-level signatures for the token wipe precompile - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenAccountWipe"), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenAccountWipe"), // Now the same call should succeed sourcing(() -> contractCall( WIPE_CONTRACT, @@ -193,13 +194,12 @@ private HapiSpec canToggleTopLevelSigUsageForWipePrecompile() { } private HapiSpec canToggleTopLevelSigUsageForUpdatePrecompile() { - final String ALLOW_SYSTEM_USE_OF_HAPI_SIGS = CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; final var failedUpdateTxn = "failedUpdateTxn"; final var succeededUpdateTxn = "succeededUpdateTxn"; final AtomicReference vanillaTokenID = new AtomicReference<>(); return propertyPreservingHapiSpec("CanToggleTopLevelSigUsageForWipePrecompile") - .preserving(ALLOW_SYSTEM_USE_OF_HAPI_SIGS) + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS) .given( newKeyNamed(ED25519KEY).shape(ED25519), newKeyNamed(ECDSA_KEY).shape(SECP256K1), @@ -226,7 +226,7 @@ private HapiSpec canToggleTopLevelSigUsageForUpdatePrecompile() { grantTokenKyc(VANILLA_TOKEN, ACCOUNT), cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT)), // First revoke use of top-level signatures from all precompiles - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) .when( // Trying to update token with top-level signatures should fail withOpContext((spec, opLog) -> allRunFor( @@ -261,7 +261,7 @@ private HapiSpec canToggleTopLevelSigUsageForUpdatePrecompile() { .hasKnownStatus(CONTRACT_REVERT_EXECUTED), // But now restore use of top-level signatures for // the token update precompile - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenUpdate"), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenUpdate"), contractCall( TOKEN_UPDATE_CONTRACT, "updateTokenWithAllFields", @@ -301,7 +301,6 @@ private HapiSpec canToggleTopLevelSigUsageForUpdatePrecompile() { } private HapiSpec canToggleTopLevelSigUsageForPauseAndUnpausePrecompile() { - final String ALLOW_SYSTEM_USE_OF_HAPI_SIGS = CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; final var failedPauseTxn = "failedPauseTxn"; final var failedUnpauseTxn = "failedUnpauseTxn"; final var succeededPauseTxn = "succeededPauseTxn"; @@ -311,7 +310,7 @@ private HapiSpec canToggleTopLevelSigUsageForPauseAndUnpausePrecompile() { final AtomicReference accountID = new AtomicReference<>(); return propertyPreservingHapiSpec("CanToggleTopLevelSigUsageForPauseAndUnpausePrecompile") - .preserving(ALLOW_SYSTEM_USE_OF_HAPI_SIGS) + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS) .given( newKeyNamed(PAUSE_KEY), newKeyNamed(MULTI_KEY), @@ -329,7 +328,7 @@ private HapiSpec canToggleTopLevelSigUsageForPauseAndUnpausePrecompile() { tokenAssociate(ACCOUNT, VANILLA_TOKEN), cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT)), // First revoke use of top-level signatures from all precompiles - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) .when( // Trying to pause with top-level signatures should fail sourcing(() -> contractCall( @@ -342,7 +341,7 @@ private HapiSpec canToggleTopLevelSigUsageForPauseAndUnpausePrecompile() { .gas(GAS_TO_OFFER) .via(failedPauseTxn)), // But now restore use of top-level signatures for the pause precompile - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenPause"), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenPause"), // Now the same call should succeed sourcing(() -> contractCall( PAUSE_UNPAUSE_CONTRACT, @@ -354,7 +353,7 @@ private HapiSpec canToggleTopLevelSigUsageForPauseAndUnpausePrecompile() { .gas(GAS_TO_OFFER) .via(succeededPauseTxn)), // revoke use of top-level signatures from all precompiles again - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, ""), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, ""), // Now the same call should succeed sourcing(() -> contractCall( PAUSE_UNPAUSE_CONTRACT, @@ -366,7 +365,7 @@ private HapiSpec canToggleTopLevelSigUsageForPauseAndUnpausePrecompile() { .gas(GAS_TO_OFFER) .via(failedUnpauseTxn)), // But now restore use of top-level signatures for the unpause precompile - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenUnpause"), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenUnpause"), // Now the same call should succeed sourcing(() -> contractCall( PAUSE_UNPAUSE_CONTRACT, @@ -439,12 +438,11 @@ private HapiSpec canToggleTopLevelSigUsageForAssociatePrecompile() { } private HapiSpec canToggleTopLevelSigUsageForBurnPrecompile() { - final String ALLOW_SYSTEM_USE_OF_HAPI_SIGS = CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; final var failedBurnTxn = "failedBurnTxn"; final var succeededBurnTxn = "succeededBurnTxn"; return propertyPreservingHapiSpec("CanToggleTopLevelSigUsageForBurnPrecompile") - .preserving(ALLOW_SYSTEM_USE_OF_HAPI_SIGS) + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS) .given( newKeyNamed(MULTI_KEY), newKeyNamed(SUPPLY_KEY), @@ -467,7 +465,7 @@ private HapiSpec canToggleTopLevelSigUsageForBurnPrecompile() { .via(CREATION_TX) .gas(GAS_TO_OFFER))), // First revoke use of top-level signatures from all precompiles - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) .when( // Trying to burn with top-level signatures should fail sourcing(() -> contractCall( @@ -478,7 +476,7 @@ private HapiSpec canToggleTopLevelSigUsageForBurnPrecompile() { .via(failedBurnTxn) .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), // But now restore use of top-level signatures for the burn precompile - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenBurn"), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenBurn"), // Now the same call should succeed sourcing(() -> contractCall( THE_BURN_CONTRACT, BURN_TOKEN_WITH_EVENT, BigInteger.valueOf(10L), new long[0]) @@ -496,7 +494,6 @@ private HapiSpec canToggleTopLevelSigUsageForBurnPrecompile() { } private HapiSpec canToggleTopLevelSigUsageForMintPrecompile() { - final String ALLOW_SYSTEM_USE_OF_HAPI_SIGS = CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; final var tokenToMint = "tokenToMint"; final var failedMintTxn = "failedMintTxn"; final var succeededMintTxn = "succeededMintTxn"; @@ -504,7 +501,7 @@ private HapiSpec canToggleTopLevelSigUsageForMintPrecompile() { final AtomicReference
accountAddress = new AtomicReference<>(); final AtomicReference fungible = new AtomicReference<>(); return propertyPreservingHapiSpec("CanToggleTopLevelSigUsageForMintPrecompile") - .preserving(ALLOW_SYSTEM_USE_OF_HAPI_SIGS) + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS) .given( newKeyNamed(MULTI_KEY), newKeyNamed(SUPPLY_KEY), @@ -525,7 +522,7 @@ private HapiSpec canToggleTopLevelSigUsageForMintPrecompile() { sourcing(() -> contractCreate( MINT_CONTRACT, HapiParserUtil.asHeadlongAddress(asAddress(fungible.get())))), // First revoke use of top-level signatures from all precompiles - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) .when( // Trying to mint with top-level signatures should fail sourcing(() -> contractCall( @@ -535,7 +532,7 @@ private HapiSpec canToggleTopLevelSigUsageForMintPrecompile() { .via(failedMintTxn) .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), // But now restore use of top-level signatures for the mint precompile - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenMint"), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenMint"), // Now the same call should succeed sourcing(() -> contractCall( MINT_CONTRACT, MINT_FUNGIBLE_TOKEN_WITH_EVENT, BigInteger.valueOf(10L)) @@ -552,13 +549,12 @@ private HapiSpec canToggleTopLevelSigUsageForMintPrecompile() { } private HapiSpec canToggleTopLevelSigUsageForDeletePrecompile() { - final String ALLOW_SYSTEM_USE_OF_HAPI_SIGS = CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; final var failedDeleteTxn = "failedDeleteTxn"; final var succeededDeleteTxn = "succeededDeleteTxn"; final AtomicReference accountID = new AtomicReference<>(); final AtomicReference vanillaTokenID = new AtomicReference<>(); return propertyPreservingHapiSpec("CanToggleTopLevelSigUsageForDeletePrecompile") - .preserving(ALLOW_SYSTEM_USE_OF_HAPI_SIGS) + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS) .given( newKeyNamed(MULTI_KEY), cryptoCreate(ACCOUNT) @@ -577,7 +573,7 @@ private HapiSpec canToggleTopLevelSigUsageForDeletePrecompile() { tokenAssociate(ACCOUNT, VANILLA_TOKEN), cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT)), // First revoke use of top-level signatures from all precompiles - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) .when( // Trying to delete with top-level signatures should fail sourcing(() -> contractCall( @@ -589,7 +585,7 @@ private HapiSpec canToggleTopLevelSigUsageForDeletePrecompile() { .via(failedDeleteTxn) .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), // But now restore use of top-level signatures for the delete precompile - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenDelete"), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenDelete"), // Now the same call should succeed sourcing(() -> contractCall( DELETE_TOKEN_CONTRACT, @@ -608,7 +604,6 @@ private HapiSpec canToggleTopLevelSigUsageForDeletePrecompile() { } private HapiSpec canToggleTopLevelSigUsageForFreezeAndUnfreezePrecompile() { - final String ALLOW_SYSTEM_USE_OF_HAPI_SIGS = CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; final var failedFreezeTxn = "failedFreezeTxn"; final var failedUnfreezeTxn = "failedUnfreezeTxn"; final var succeededFreezeTxn = "succeededFreezeTxn"; @@ -618,7 +613,7 @@ private HapiSpec canToggleTopLevelSigUsageForFreezeAndUnfreezePrecompile() { final AtomicReference accountID = new AtomicReference<>(); return propertyPreservingHapiSpec("CanToggleTopLevelSigUsageForFreezeAndUnfreezePrecompile") - .preserving(ALLOW_SYSTEM_USE_OF_HAPI_SIGS) + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS) .given( newKeyNamed(FREEZE_KEY), newKeyNamed(MULTI_KEY), @@ -636,7 +631,7 @@ private HapiSpec canToggleTopLevelSigUsageForFreezeAndUnfreezePrecompile() { tokenAssociate(ACCOUNT, VANILLA_TOKEN), cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT)), // First revoke use of top-level signatures from all precompiles - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) .when( // Trying to freezing with top-level signatures should fail sourcing(() -> contractCall( @@ -650,7 +645,7 @@ private HapiSpec canToggleTopLevelSigUsageForFreezeAndUnfreezePrecompile() { .gas(GAS_TO_OFFER) .via(failedFreezeTxn)), // But now restore use of top-level signatures for the freeze precompile - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenFreezeAccount"), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenFreezeAccount"), // Now the same call should succeed sourcing(() -> contractCall( FREEZE_CONTRACT, @@ -663,7 +658,7 @@ private HapiSpec canToggleTopLevelSigUsageForFreezeAndUnfreezePrecompile() { .gas(GAS_TO_OFFER) .via(succeededFreezeTxn)), // revoke use of top-level signatures from all precompiles again - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, ""), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, ""), // Now the same call should succeed sourcing(() -> contractCall( FREEZE_CONTRACT, @@ -676,7 +671,7 @@ private HapiSpec canToggleTopLevelSigUsageForFreezeAndUnfreezePrecompile() { .gas(GAS_TO_OFFER) .via(failedUnfreezeTxn)), // But now restore use of top-level signatures for the unfreeze precompile - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenUnfreezeAccount"), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenUnfreezeAccount"), // Now the same call should succeed sourcing(() -> contractCall( FREEZE_CONTRACT, @@ -701,7 +696,6 @@ private HapiSpec canToggleTopLevelSigUsageForFreezeAndUnfreezePrecompile() { } private HapiSpec canToggleTopLevelSigUsageForGrantKycAndRevokeKycPrecompile() { - final String ALLOW_SYSTEM_USE_OF_HAPI_SIGS = CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; final var failedGrantTxn = "failedGrantTxn"; final var failedRevokeTxn = "failedRevokeTxn"; final var succeededGrantTxn = "succeededGrantTxn"; @@ -712,7 +706,7 @@ private HapiSpec canToggleTopLevelSigUsageForGrantKycAndRevokeKycPrecompile() { final AtomicReference secondAccountID = new AtomicReference<>(); return propertyPreservingHapiSpec("canToggleTopLevelSigUsageForGrantKycAndRevokeKycPrecompile") - .preserving(ALLOW_SYSTEM_USE_OF_HAPI_SIGS) + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS) .given( newKeyNamed(KYC_KEY), cryptoCreate(ACCOUNT) @@ -732,7 +726,7 @@ private HapiSpec canToggleTopLevelSigUsageForGrantKycAndRevokeKycPrecompile() { tokenAssociate(ACCOUNT, VANILLA_TOKEN), tokenAssociate(SECOND_ACCOUNT, VANILLA_TOKEN), // First revoke use of top-level signatures from all precompiles - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) .when( // Trying to grant kyc with top-level signatures should fail sourcing(() -> contractCall( @@ -746,7 +740,7 @@ private HapiSpec canToggleTopLevelSigUsageForGrantKycAndRevokeKycPrecompile() { .gas(GAS_TO_OFFER) .via(failedGrantTxn)), // But now restore use of top-level signatures for the grant kyc precompile - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenGrantKycToAccount"), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenGrantKycToAccount"), // Now the same call should succeed sourcing(() -> contractCall( GRANT_REVOKE_KYC_CONTRACT, @@ -759,7 +753,7 @@ private HapiSpec canToggleTopLevelSigUsageForGrantKycAndRevokeKycPrecompile() { .gas(GAS_TO_OFFER) .via(succeededGrantTxn)), // revoke use of top-level signatures from all precompiles again - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, ""), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, ""), // Now the same call should succeed sourcing(() -> contractCall( GRANT_REVOKE_KYC_CONTRACT, @@ -772,7 +766,7 @@ private HapiSpec canToggleTopLevelSigUsageForGrantKycAndRevokeKycPrecompile() { .gas(GAS_TO_OFFER) .via(failedRevokeTxn)), // But now restore use of top-level signatures for the revoke kyc precompile - overriding(ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenRevokeKycToAccount"), + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "TokenRevokeKycToAccount"), // Now the same call should succeed sourcing(() -> contractCall( GRANT_REVOKE_KYC_CONTRACT, diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/V1SecurityModelOverrides.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/V1SecurityModelOverrides.java new file mode 100644 index 000000000000..35ddd35e0065 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/V1SecurityModelOverrides.java @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +public class V1SecurityModelOverrides { + + public static final String CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS = "contracts.allowSystemUseOfHapiSigs"; + + public static final String CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS = "contracts.maxNumWithHapiSigsAccess"; + public static final String CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF = "10_000_000"; + + private V1SecurityModelOverrides() {} +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/WipeTokenAccountPrecompileSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/WipeTokenAccountPrecompileSuite.java index 6a18305a2073..90740bfcf815 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/WipeTokenAccountPrecompileSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/WipeTokenAccountPrecompileSuite.java @@ -16,62 +16,14 @@ package com.hedera.services.bdd.suites.contract.precompile; -import static com.google.protobuf.ByteString.copyFromUtf8; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; -import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; -import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; -import static com.hedera.services.bdd.spec.infrastructure.providers.ops.crypto.RandomAccount.INITIAL_BALANCE; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; -import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; -import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; -import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; -import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; -import static com.hedera.services.bdd.suites.contract.Utils.asAddress; -import static com.hedera.services.bdd.suites.contract.Utils.asToken; -import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; -import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_DOES_NOT_OWN_WIPED_NFT; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_NFT_ID; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_WIPING_AMOUNT; -import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; -import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; -import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; - import com.hedera.services.bdd.spec.HapiSpec; -import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; import com.hedera.services.bdd.suites.HapiSuite; -import com.hederahashgraph.api.proto.java.AccountID; -import com.hederahashgraph.api.proto.java.TokenID; import java.util.List; -import java.util.concurrent.atomic.AtomicReference; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; public class WipeTokenAccountPrecompileSuite extends HapiSuite { private static final Logger log = LogManager.getLogger(WipeTokenAccountPrecompileSuite.class); - public static final String WIPE_CONTRACT = "WipeTokenAccount"; - public static final String ADMIN_ACCOUNT = "admin"; - private static final String ACCOUNT = "anybody"; - private static final String SECOND_ACCOUNT = "anybodySecond"; - public static final String WIPE_KEY = "wipeKey"; - private static final String MULTI_KEY = "purpose"; - public static final int GAS_TO_OFFER = 1_000_000; - public static final String WIPE_FUNGIBLE_TOKEN = "wipeFungibleToken"; - public static final String WIPE_NON_FUNGIBLE_TOKEN = "wipeNonFungibleToken"; public static void main(String... args) { new WipeTokenAccountPrecompileSuite().runSuiteAsync(); @@ -89,255 +41,6 @@ public boolean canRunConcurrent() { @Override public List getSpecsInSuite() { - return List.of(wipeFungibleTokenScenarios(), wipeNonFungibleTokenScenarios()); - } - - private HapiSpec wipeFungibleTokenScenarios() { - final AtomicReference adminAccountID = new AtomicReference<>(); - final AtomicReference accountID = new AtomicReference<>(); - final AtomicReference secondAccountID = new AtomicReference<>(); - final AtomicReference vanillaTokenID = new AtomicReference<>(); - - return defaultHapiSpec("WipeFungibleTokenScenarios") - .given( - newKeyNamed(WIPE_KEY), - cryptoCreate(ADMIN_ACCOUNT).exposingCreatedIdTo(adminAccountID::set), - cryptoCreate(ACCOUNT).exposingCreatedIdTo(accountID::set), - cryptoCreate(SECOND_ACCOUNT).exposingCreatedIdTo(secondAccountID::set), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(VANILLA_TOKEN) - .tokenType(FUNGIBLE_COMMON) - .treasury(TOKEN_TREASURY) - .wipeKey(WIPE_KEY) - .initialSupply(1_000) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - uploadInitCode(WIPE_CONTRACT), - contractCreate(WIPE_CONTRACT), - tokenAssociate(ACCOUNT, VANILLA_TOKEN), - tokenAssociate(SECOND_ACCOUNT, VANILLA_TOKEN), - cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - contractCall( - WIPE_CONTRACT, - WIPE_FUNGIBLE_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - 10L) - .signedBy(GENESIS, ADMIN_ACCOUNT) - .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) - .via("accountDoesNotOwnWipeKeyTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - cryptoUpdate(ADMIN_ACCOUNT).key(WIPE_KEY), - contractCall( - WIPE_CONTRACT, - WIPE_FUNGIBLE_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - 1_000L) - .signedBy(GENESIS, ADMIN_ACCOUNT) - .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) - .via("amountLargerThanBalanceTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - WIPE_CONTRACT, - WIPE_FUNGIBLE_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(secondAccountID.get())), - 10L) - .signedBy(GENESIS, ADMIN_ACCOUNT) - .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) - .via("accountDoesNotOwnTokensTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - WIPE_CONTRACT, - WIPE_FUNGIBLE_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - 10L) - .signedBy(GENESIS, ADMIN_ACCOUNT) - .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) - .via("wipeFungibleTxn") - .gas(GAS_TO_OFFER), - contractCall( - WIPE_CONTRACT, - WIPE_FUNGIBLE_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - 0L) - .signedBy(GENESIS, ADMIN_ACCOUNT) - .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) - .via("wipeFungibleTxnWithZeroAmount") - .gas(GAS_TO_OFFER)))) - .then( - childRecordsCheck( - "accountDoesNotOwnWipeKeyTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_SIGNATURE) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), - childRecordsCheck( - "amountLargerThanBalanceTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_WIPING_AMOUNT) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_WIPING_AMOUNT)))), - childRecordsCheck( - "accountDoesNotOwnTokensTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_WIPING_AMOUNT) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_WIPING_AMOUNT)))), - childRecordsCheck( - "wipeFungibleTxnWithZeroAmount", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)) - .gasUsed(14085L))), - getTokenInfo(VANILLA_TOKEN).hasTotalSupply(990), - getAccountBalance(ACCOUNT).hasTokenBalance(VANILLA_TOKEN, 490)); - } - - private HapiSpec wipeNonFungibleTokenScenarios() { - final AtomicReference adminAccountID = new AtomicReference<>(); - final AtomicReference accountID = new AtomicReference<>(); - final AtomicReference vanillaTokenID = new AtomicReference<>(); - - return defaultHapiSpec("WipeNonFungibleTokenScenarios") - .given( - newKeyNamed(WIPE_KEY), - newKeyNamed(MULTI_KEY), - cryptoCreate(ADMIN_ACCOUNT).exposingCreatedIdTo(adminAccountID::set), - cryptoCreate(ACCOUNT).balance(INITIAL_BALANCE).exposingCreatedIdTo(accountID::set), - cryptoCreate(TOKEN_TREASURY), - tokenCreate(VANILLA_TOKEN) - .tokenType(NON_FUNGIBLE_UNIQUE) - .treasury(TOKEN_TREASURY) - .wipeKey(WIPE_KEY) - .supplyKey(MULTI_KEY) - .initialSupply(0) - .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), - mintToken(VANILLA_TOKEN, List.of(copyFromUtf8("First!"))), - mintToken(VANILLA_TOKEN, List.of(copyFromUtf8("Second!"))), - uploadInitCode(WIPE_CONTRACT), - contractCreate(WIPE_CONTRACT), - tokenAssociate(ACCOUNT, VANILLA_TOKEN), - cryptoTransfer(movingUnique(VANILLA_TOKEN, 1L).between(TOKEN_TREASURY, ACCOUNT))) - .when(withOpContext((spec, opLog) -> { - final var serialNumbers = new long[] {1L}; - allRunFor( - spec, - contractCall( - WIPE_CONTRACT, - WIPE_NON_FUNGIBLE_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - serialNumbers) - .signedBy(GENESIS, ADMIN_ACCOUNT) - .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) - .via("wipeNonFungibleAccountDoesNotOwnWipeKeyTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - cryptoUpdate(ADMIN_ACCOUNT).key(WIPE_KEY), - contractCall( - WIPE_CONTRACT, - WIPE_NON_FUNGIBLE_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - new long[] {2L}) - .signedBy(GENESIS, ADMIN_ACCOUNT) - .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) - .via("wipeNonFungibleAccountDoesNotOwnTheSerialTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - WIPE_CONTRACT, - WIPE_NON_FUNGIBLE_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - new long[] {-2L}) - .signedBy(GENESIS, ADMIN_ACCOUNT) - .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) - .via("wipeNonFungibleNegativeSerialTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - WIPE_CONTRACT, - WIPE_NON_FUNGIBLE_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - new long[] {3L}) - .signedBy(GENESIS, ADMIN_ACCOUNT) - .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) - .via("wipeNonFungibleSerialDoesNotExistsTxn") - .gas(GAS_TO_OFFER) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED), - contractCall( - WIPE_CONTRACT, - WIPE_NON_FUNGIBLE_TOKEN, - HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), - HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), - serialNumbers) - .signedBy(GENESIS, ADMIN_ACCOUNT) - .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) - .via("wipeNonFungibleTxn") - .gas(GAS_TO_OFFER)); - })) - .then( - childRecordsCheck( - "wipeNonFungibleAccountDoesNotOwnWipeKeyTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_SIGNATURE) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), - childRecordsCheck( - "wipeNonFungibleAccountDoesNotOwnTheSerialTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(ACCOUNT_DOES_NOT_OWN_WIPED_NFT) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .withStatus(ACCOUNT_DOES_NOT_OWN_WIPED_NFT)))), - childRecordsCheck( - "wipeNonFungibleNegativeSerialTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_NFT_ID) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_NFT_ID)))), - childRecordsCheck( - "wipeNonFungibleSerialDoesNotExistsTxn", - CONTRACT_REVERT_EXECUTED, - recordWith() - .status(INVALID_NFT_ID) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(INVALID_NFT_ID)))), - childRecordsCheck( - "wipeNonFungibleTxn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult( - htsPrecompileResult().withStatus(SUCCESS)) - .gasUsed(14085L))), - getTokenInfo(VANILLA_TOKEN).hasTotalSupply(1), - getAccountBalance(ACCOUNT).hasTokenBalance(VANILLA_TOKEN, 0)); + return List.of(); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/WipeTokenAccountPrecompileV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/WipeTokenAccountPrecompileV1SecurityModelSuite.java new file mode 100644 index 000000000000..499bc9409bc3 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/precompile/WipeTokenAccountPrecompileV1SecurityModelSuite.java @@ -0,0 +1,359 @@ +/* + * Copyright (C) 2021-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.contract.precompile; + +import static com.google.protobuf.ByteString.copyFromUtf8; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.infrastructure.providers.ops.crypto.RandomAccount.INITIAL_BALANCE; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoUpdate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.mintToken; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; +import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.movingUnique; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overridingTwo; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.asToken; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.VANILLA_TOKEN; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.ACCOUNT_DOES_NOT_OWN_WIPED_NFT; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_NFT_ID; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_SIGNATURE; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.INVALID_WIPING_AMOUNT; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static com.hederahashgraph.api.proto.java.TokenType.FUNGIBLE_COMMON; +import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; + +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.AccountID; +import com.hederahashgraph.api.proto.java.TokenID; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class WipeTokenAccountPrecompileV1SecurityModelSuite extends HapiSuite { + private static final Logger log = LogManager.getLogger(WipeTokenAccountPrecompileV1SecurityModelSuite.class); + public static final String WIPE_CONTRACT = "WipeTokenAccount"; + public static final String ADMIN_ACCOUNT = "admin"; + private static final String ACCOUNT = "anybody"; + private static final String SECOND_ACCOUNT = "anybodySecond"; + public static final String WIPE_KEY = "wipeKey"; + private static final String MULTI_KEY = "purpose"; + public static final int GAS_TO_OFFER = 1_000_000; + public static final String WIPE_FUNGIBLE_TOKEN = "wipeFungibleToken"; + public static final String WIPE_NON_FUNGIBLE_TOKEN = "wipeNonFungibleToken"; + + public static void main(String... args) { + new WipeTokenAccountPrecompileV1SecurityModelSuite().runSuiteSync(); + } + + @Override + protected Logger getResultsLogger() { + return log; + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + public List getSpecsInSuite() { + return List.of(wipeFungibleTokenScenarios(), wipeNonFungibleTokenScenarios()); + } + + private HapiSpec wipeFungibleTokenScenarios() { + final AtomicReference adminAccountID = new AtomicReference<>(); + final AtomicReference accountID = new AtomicReference<>(); + final AtomicReference secondAccountID = new AtomicReference<>(); + final AtomicReference vanillaTokenID = new AtomicReference<>(); + + return propertyPreservingHapiSpec("wipeFungibleTokenScenarios") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenAccountWipe", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(WIPE_KEY), + cryptoCreate(ADMIN_ACCOUNT).exposingCreatedIdTo(adminAccountID::set), + cryptoCreate(ACCOUNT).exposingCreatedIdTo(accountID::set), + cryptoCreate(SECOND_ACCOUNT).exposingCreatedIdTo(secondAccountID::set), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(VANILLA_TOKEN) + .tokenType(FUNGIBLE_COMMON) + .treasury(TOKEN_TREASURY) + .wipeKey(WIPE_KEY) + .initialSupply(1_000) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + uploadInitCode(WIPE_CONTRACT), + contractCreate(WIPE_CONTRACT), + tokenAssociate(ACCOUNT, VANILLA_TOKEN), + tokenAssociate(SECOND_ACCOUNT, VANILLA_TOKEN), + cryptoTransfer(moving(500, VANILLA_TOKEN).between(TOKEN_TREASURY, ACCOUNT))) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + contractCall( + WIPE_CONTRACT, + WIPE_FUNGIBLE_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + 10L) + .signedBy(GENESIS, ADMIN_ACCOUNT) + .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) + .via("accountDoesNotOwnWipeKeyTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + cryptoUpdate(ADMIN_ACCOUNT).key(WIPE_KEY), + contractCall( + WIPE_CONTRACT, + WIPE_FUNGIBLE_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + 1_000L) + .signedBy(GENESIS, ADMIN_ACCOUNT) + .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) + .via("amountLargerThanBalanceTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + WIPE_CONTRACT, + WIPE_FUNGIBLE_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(secondAccountID.get())), + 10L) + .signedBy(GENESIS, ADMIN_ACCOUNT) + .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) + .via("accountDoesNotOwnTokensTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + WIPE_CONTRACT, + WIPE_FUNGIBLE_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + 10L) + .signedBy(GENESIS, ADMIN_ACCOUNT) + .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) + .via("wipeFungibleTxn") + .gas(GAS_TO_OFFER), + contractCall( + WIPE_CONTRACT, + WIPE_FUNGIBLE_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + 0L) + .signedBy(GENESIS, ADMIN_ACCOUNT) + .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) + .via("wipeFungibleTxnWithZeroAmount") + .gas(GAS_TO_OFFER)))) + .then( + childRecordsCheck( + "accountDoesNotOwnWipeKeyTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_SIGNATURE) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), + childRecordsCheck( + "amountLargerThanBalanceTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_WIPING_AMOUNT) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_WIPING_AMOUNT)))), + childRecordsCheck( + "accountDoesNotOwnTokensTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_WIPING_AMOUNT) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_WIPING_AMOUNT)))), + childRecordsCheck( + "wipeFungibleTxnWithZeroAmount", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)) + .gasUsed(14085L))), + getTokenInfo(VANILLA_TOKEN).hasTotalSupply(990), + getAccountBalance(ACCOUNT).hasTokenBalance(VANILLA_TOKEN, 490)); + } + + private HapiSpec wipeNonFungibleTokenScenarios() { + final AtomicReference adminAccountID = new AtomicReference<>(); + final AtomicReference accountID = new AtomicReference<>(); + final AtomicReference vanillaTokenID = new AtomicReference<>(); + + return propertyPreservingHapiSpec("wipeNonFungibleTokenScenarios") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenAccountWipe", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(WIPE_KEY), + newKeyNamed(MULTI_KEY), + cryptoCreate(ADMIN_ACCOUNT).exposingCreatedIdTo(adminAccountID::set), + cryptoCreate(ACCOUNT).balance(INITIAL_BALANCE).exposingCreatedIdTo(accountID::set), + cryptoCreate(TOKEN_TREASURY), + tokenCreate(VANILLA_TOKEN) + .tokenType(NON_FUNGIBLE_UNIQUE) + .treasury(TOKEN_TREASURY) + .wipeKey(WIPE_KEY) + .supplyKey(MULTI_KEY) + .initialSupply(0) + .exposingCreatedIdTo(id -> vanillaTokenID.set(asToken(id))), + mintToken(VANILLA_TOKEN, List.of(copyFromUtf8("First!"))), + mintToken(VANILLA_TOKEN, List.of(copyFromUtf8("Second!"))), + uploadInitCode(WIPE_CONTRACT), + contractCreate(WIPE_CONTRACT), + tokenAssociate(ACCOUNT, VANILLA_TOKEN), + cryptoTransfer(movingUnique(VANILLA_TOKEN, 1L).between(TOKEN_TREASURY, ACCOUNT))) + .when(withOpContext((spec, opLog) -> { + final var serialNumbers = new long[] {1L}; + allRunFor( + spec, + contractCall( + WIPE_CONTRACT, + WIPE_NON_FUNGIBLE_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + serialNumbers) + .signedBy(GENESIS, ADMIN_ACCOUNT) + .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) + .via("wipeNonFungibleAccountDoesNotOwnWipeKeyTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + cryptoUpdate(ADMIN_ACCOUNT).key(WIPE_KEY), + contractCall( + WIPE_CONTRACT, + WIPE_NON_FUNGIBLE_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + new long[] {2L}) + .signedBy(GENESIS, ADMIN_ACCOUNT) + .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) + .via("wipeNonFungibleAccountDoesNotOwnTheSerialTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + WIPE_CONTRACT, + WIPE_NON_FUNGIBLE_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + new long[] {-2L}) + .signedBy(GENESIS, ADMIN_ACCOUNT) + .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) + .via("wipeNonFungibleNegativeSerialTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + WIPE_CONTRACT, + WIPE_NON_FUNGIBLE_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + new long[] {3L}) + .signedBy(GENESIS, ADMIN_ACCOUNT) + .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) + .via("wipeNonFungibleSerialDoesNotExistsTxn") + .gas(GAS_TO_OFFER) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED), + contractCall( + WIPE_CONTRACT, + WIPE_NON_FUNGIBLE_TOKEN, + HapiParserUtil.asHeadlongAddress(asAddress(vanillaTokenID.get())), + HapiParserUtil.asHeadlongAddress(asAddress(accountID.get())), + serialNumbers) + .signedBy(GENESIS, ADMIN_ACCOUNT) + .alsoSigningWithFullPrefix(ADMIN_ACCOUNT) + .via("wipeNonFungibleTxn") + .gas(GAS_TO_OFFER)); + })) + .then( + childRecordsCheck( + "wipeNonFungibleAccountDoesNotOwnWipeKeyTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_SIGNATURE) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_SIGNATURE)))), + childRecordsCheck( + "wipeNonFungibleAccountDoesNotOwnTheSerialTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(ACCOUNT_DOES_NOT_OWN_WIPED_NFT) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .withStatus(ACCOUNT_DOES_NOT_OWN_WIPED_NFT)))), + childRecordsCheck( + "wipeNonFungibleNegativeSerialTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_NFT_ID) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_NFT_ID)))), + childRecordsCheck( + "wipeNonFungibleSerialDoesNotExistsTxn", + CONTRACT_REVERT_EXECUTED, + recordWith() + .status(INVALID_NFT_ID) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(INVALID_NFT_ID)))), + childRecordsCheck( + "wipeNonFungibleTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult( + htsPrecompileResult().withStatus(SUCCESS)) + .gasUsed(14085L))), + getTokenInfo(VANILLA_TOKEN).hasTotalSupply(1), + getAccountBalance(ACCOUNT).hasTokenBalance(VANILLA_TOKEN, 0)); + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/traceability/TraceabilitySuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/traceability/TraceabilitySuite.java index 4a0b4c94d52f..b0eb83d0d5de 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/traceability/TraceabilitySuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/contract/traceability/TraceabilitySuite.java @@ -56,6 +56,7 @@ import static com.hedera.services.bdd.suites.contract.Utils.captureOneChildCreate2MetaFor; import static com.hedera.services.bdd.suites.contract.Utils.extractBytecodeUnhexed; import static com.hedera.services.bdd.suites.contract.Utils.getABIFor; +import static com.hedera.services.bdd.suites.contract.Utils.getNestedContractAddress; import static com.hedera.services.bdd.suites.contract.Utils.getResourcePath; import static com.hedera.services.bdd.suites.contract.opcodes.Create2OperationSuite.CONTRACT_REPORTED_ADDRESS_MESSAGE; import static com.hedera.services.bdd.suites.contract.opcodes.Create2OperationSuite.CONTRACT_REPORTED_LOG_MESSAGE; @@ -63,7 +64,6 @@ import static com.hedera.services.bdd.suites.contract.opcodes.Create2OperationSuite.EXPECTED_CREATE2_ADDRESS_MESSAGE; import static com.hedera.services.bdd.suites.contract.opcodes.Create2OperationSuite.GET_ADDRESS; import static com.hedera.services.bdd.suites.contract.opcodes.Create2OperationSuite.GET_BYTECODE; -import static com.hedera.services.bdd.suites.contract.precompile.AssociatePrecompileSuite.getNestedContractAddress; import static com.hedera.services.bdd.suites.crypto.AutoAccountCreationSuite.PARTY; import static com.hedera.services.bdd.suites.token.TokenAssociationSpecs.MULTI_KEY; import static com.hedera.services.stream.proto.ContractActionType.CALL; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/EthereumSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/EthereumSuite.java index df97c547fb9f..8388cfab5982 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/EthereumSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/EthereumSuite.java @@ -30,7 +30,6 @@ import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAutoCreatedAccountBalance; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractBytecode; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo; -import static com.hedera.services.bdd.spec.queries.QueryVerbs.getLiteralAliasContractInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; @@ -78,8 +77,6 @@ import com.hedera.node.app.hapi.utils.ethereum.EthTxData.EthTransactionType; import com.hedera.services.bdd.spec.HapiSpec; import com.hedera.services.bdd.spec.assertions.ContractInfoAsserts; -import com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts; -import com.hedera.services.bdd.spec.keys.SigControl; import com.hedera.services.bdd.spec.queries.meta.HapiGetTxnRecord; import com.hedera.services.bdd.spec.transactions.TxnUtils; import com.hedera.services.bdd.suites.BddTestNameDoesNotMatchMethodName; @@ -93,12 +90,10 @@ import java.math.BigInteger; import java.util.Arrays; import java.util.List; -import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Stream; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.tuweni.bytes.Bytes; import org.bouncycastle.util.encoders.Hex; import org.junit.jupiter.api.Assertions; @@ -106,10 +101,9 @@ public class EthereumSuite extends HapiSuite { private static final Logger log = LogManager.getLogger(EthereumSuite.class); - private static final long depositAmount = 20_000L; + private static final long DEPOSIT_AMOUNT = 20_000L; private static final String PAY_RECEIVABLE_CONTRACT = "PayReceivable"; private static final String TOKEN_CREATE_CONTRACT = "NewTokenCreateContract"; - private static final String ERC721_CONTRACT_WITH_HTS_CALLS = "ERC721ContractWithHTSCalls"; private static final String HELLO_WORLD_MINT_CONTRACT = "HelloWorldMint"; public static final long GAS_LIMIT = 1_000_000; @@ -137,12 +131,9 @@ public List getSpecsInSuite() { feePaymentMatrix().stream(), Stream.of( invalidTxData(), - etx007FungibleTokenCreateWithFeesHappyPath(), etx008ContractCreateExecutesWithExpectedRecord(), etx009CallsToTokenAddresses(), etx010TransferToCryptoAccountSucceeds(), - etx012PrecompileCallSucceedsWhenNeededSignatureInEthTxn(), - etx013PrecompileCallSucceedsWhenNeededSignatureInHederaTxn(), etx013PrecompileCallFailsWhenSignatureMissingFromBothEthereumAndHederaTxn(), etx014ContractCreateInheritsSignerProperties(), etx009CallsToTokenAddresses(), @@ -150,7 +141,6 @@ public List getSpecsInSuite() { etx031InvalidNonceEthereumTxFailsAndChargesRelayer(), etxSvc003ContractGetBytecodeQueryReturnsDeployedCode(), sendingLargerBalanceThanAvailableFailsGracefully(), - setApproveForAllUsingLocalNodeSetupPasses(), directTransferWorksForERC20())) .toList(); } @@ -198,206 +188,6 @@ HapiSpec sendingLargerBalanceThanAvailableFailsGracefully() { })); } - HapiSpec setApproveForAllUsingLocalNodeSetupPasses() { - final AtomicReference spenderAutoCreatedAccountId = new AtomicReference<>(); - final AtomicReference tokenCreateContractID = new AtomicReference<>(); - final AtomicReference erc721ContractID = new AtomicReference<>(); - final AtomicReference contractAddressID = new AtomicReference<>(); - final AtomicReference createdTokenAddressString = new AtomicReference<>(); - final String spenderAlias = "spenderAlias"; - final var createTokenContractNum = new AtomicLong(); - return defaultHapiSpec("SetApproveForAllUsingLocalNodeSetupPasses") - .given( - newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), - newKeyNamed(spenderAlias).shape(SECP_256K1_SHAPE), - cryptoCreate(RELAYER).balance(6 * ONE_MILLION_HBARS), - cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_MILLION_HBARS)) - .via(AUTO_ACCOUNT_TRANSACTION_NAME), - cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, spenderAlias, ONE_HUNDRED_HBARS)) - .via("autoAccountSpender"), - getAliasedAccountInfo(spenderAlias) - .exposingContractAccountIdTo(spenderAutoCreatedAccountId::set), - createLargeFile( - GENESIS, TOKEN_CREATE_CONTRACT, TxnUtils.literalInitcodeFor(TOKEN_CREATE_CONTRACT)), - ethereumContractCreate(TOKEN_CREATE_CONTRACT) - .type(EthTransactionType.EIP1559) - .signingWith(SECP_256K1_SOURCE_KEY) - .payingWith(RELAYER) - .nonce(0) - .bytecode(TOKEN_CREATE_CONTRACT) - .gasPrice(10L) - .maxGasAllowance(ONE_HUNDRED_HBARS) - .gasLimit(1_000_000L) - .gas(1_000_000L) - .hasKnownStatusFrom(SUCCESS) - .exposingNumTo(createTokenContractNum::set), - getContractInfo(TOKEN_CREATE_CONTRACT).exposingEvmAddress(tokenCreateContractID::set)) - .when( - withOpContext((spec, opLog) -> { - var createNFTPublicFunctionCall = ethereumCall( - TOKEN_CREATE_CONTRACT, - "createNonFungibleTokenPublic", - asHeadlongAddress(tokenCreateContractID.get())) - .type(EthTransactionType.EIP1559) - .signingWith(SECP_256K1_SOURCE_KEY) - .payingWith(RELAYER) - .nonce(1) - .gasPrice(10L) - .sending(10000000000L) - .gasLimit(1_000_000L) - .via("createTokenTxn") - .exposingEventDataTo(createdTokenAddressString::set); - - allRunFor(spec, createNFTPublicFunctionCall); - - var uploadEthereumContract = uploadInitCode(ERC721_CONTRACT_WITH_HTS_CALLS); - allRunFor(spec, uploadEthereumContract); - - var createEthereumContract = ethereumContractCreate(ERC721_CONTRACT_WITH_HTS_CALLS) - .type(EthTxData.EthTransactionType.EIP1559) - .signingWith(SECP_256K1_SOURCE_KEY) - .payingWith(RELAYER) - .nonce(2) - .gasPrice(10L) - .maxGasAllowance(ONE_HUNDRED_HBARS) - .gasLimit(1_000_000L) - .hasKnownStatusFrom(SUCCESS); - - var exposeEthereumContractAddress = getContractInfo(ERC721_CONTRACT_WITH_HTS_CALLS) - .exposingEvmAddress(address -> erc721ContractID.set("0x" + address)); - allRunFor(spec, createEthereumContract, exposeEthereumContractAddress); - - var contractInfo = getLiteralAliasContractInfo( - erc721ContractID.get().substring(2)) - .exposingEvmAddress(contractAddressID::set); - allRunFor(spec, contractInfo); - assertEquals(erc721ContractID.get().substring(2), contractAddressID.get()); - }), - withOpContext((spec, opLog) -> { - var associateTokenToERC721 = ethereumCall( - ERC721_CONTRACT_WITH_HTS_CALLS, - "associateTokenPublic", - asHeadlongAddress(erc721ContractID.get()), - asHeadlongAddress(Bytes.wrap(createdTokenAddressString - .get() - .toByteArray()) - .toHexString())) - .type(EthTransactionType.EIP1559) - .signingWith(SECP_256K1_SOURCE_KEY) - .payingWith(GENESIS) - .nonce(3) - .gasPrice(10L) - .gasLimit(1_000_000L) - .via("associateTokenTxn") - .hasKnownStatusFrom(SUCCESS); - - var associateTokenToSpender = ethereumCall( - TOKEN_CREATE_CONTRACT, - "associateTokenPublic", - asHeadlongAddress(spenderAutoCreatedAccountId.get()), - asHeadlongAddress(Bytes.wrap(createdTokenAddressString - .get() - .toByteArray()) - .toHexString())) - .type(EthTransactionType.EIP1559) - .signingWith(spenderAlias) - .payingWith(GENESIS) - .nonce(0) - .gasPrice(10L) - .gasLimit(1_000_000L) - .via("associateTokenTxn") - .hasKnownStatusFrom(SUCCESS); - - var isApprovedForAllBefore = ethereumCall( - ERC721_CONTRACT_WITH_HTS_CALLS, - "ercIsApprovedForAll", - asHeadlongAddress(Bytes.wrap(createdTokenAddressString - .get() - .toByteArray()) - .toHexString()), - asHeadlongAddress(erc721ContractID.get()), - asHeadlongAddress(spenderAutoCreatedAccountId.get())) - .type(EthTransactionType.EIP1559) - .signingWith(SECP_256K1_SOURCE_KEY) - .payingWith(RELAYER) - .nonce(4) - .gasPrice(10L) - .gasLimit(1_000_000L) - .via("ercIsApprovedForAllBeforeTxn") - .hasKnownStatusFrom(SUCCESS) - .logged(); - - var isApprovedForAllBeforeCheck = childRecordsCheck( - "ercIsApprovedForAllBeforeTxn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.ERC_IS_APPROVED_FOR_ALL) - .withIsApprovedForAll(false)))); - - var setApprovalForAll = ethereumCall( - ERC721_CONTRACT_WITH_HTS_CALLS, - "ercSetApprovalForAll", - asHeadlongAddress(Bytes.wrap(createdTokenAddressString - .get() - .toByteArray()) - .toHexString()), - asHeadlongAddress(spenderAutoCreatedAccountId.get()), - true) - .type(EthTransactionType.EIP1559) - .signingWith(SECP_256K1_SOURCE_KEY) - .payingWith(RELAYER) - .nonce(5) - .gasPrice(10L) - .gasLimit(1_000_000L) - .via("ercSetApproveForAllTxn") - .hasKnownStatusFrom(SUCCESS) - .logged(); - - var isApprovedForAllAfter = ethereumCall( - ERC721_CONTRACT_WITH_HTS_CALLS, - "ercIsApprovedForAll", - asHeadlongAddress(Bytes.wrap(createdTokenAddressString - .get() - .toByteArray()) - .toHexString()), - asHeadlongAddress(erc721ContractID.get()), - asHeadlongAddress(spenderAutoCreatedAccountId.get())) - .type(EthTransactionType.EIP1559) - .signingWith(SECP_256K1_SOURCE_KEY) - .payingWith(RELAYER) - .nonce(6) - .gasPrice(10L) - .gasLimit(1_000_000L) - .via("ercIsApprovedForAllAfterTxn") - .hasKnownStatusFrom(SUCCESS) - .logged(); - - var isApprovedForAllAfterCheck = childRecordsCheck( - "ercIsApprovedForAllAfterTxn", - SUCCESS, - recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .contractCallResult(htsPrecompileResult() - .forFunction(FunctionType.ERC_IS_APPROVED_FOR_ALL) - .withIsApprovedForAll(true)))); - - allRunFor( - spec, - associateTokenToERC721, - associateTokenToSpender, - isApprovedForAllBefore, - isApprovedForAllBeforeCheck, - setApprovalForAll, - isApprovedForAllAfter, - isApprovedForAllAfterCheck); - })) - .then(withOpContext((spec, opLog) -> {})); - } - HapiSpec etx010TransferToCryptoAccountSucceeds() { String RECEIVER = "RECEIVER"; final String aliasBalanceSnapshot = "aliasBalance"; @@ -507,7 +297,7 @@ HapiSpec matrixedPayerRelayerTest( .accountIsAlias(); final var subop2 = balanceSnapshot(payerBalance, RELAYER); final var subop3 = ethereumCall( - PAY_RECEIVABLE_CONTRACT, "deposit", BigInteger.valueOf(depositAmount)) + PAY_RECEIVABLE_CONTRACT, "deposit", BigInteger.valueOf(DEPOSIT_AMOUNT)) .type(EthTxData.EthTransactionType.EIP1559) .signingWith(SECP_256K1_SOURCE_KEY) .payingWith(RELAYER) @@ -516,7 +306,7 @@ HapiSpec matrixedPayerRelayerTest( .maxGasAllowance(relayerOffered) .maxFeePerGas(senderGasPrice) .gasLimit(GAS_LIMIT) - .sending(depositAmount) + .sending(DEPOSIT_AMOUNT) .hasKnownStatus(success ? ResponseCodeEnum.SUCCESS : ResponseCodeEnum.INSUFFICIENT_TX_FEE); final HapiGetTxnRecord hapiGetTxnRecord = @@ -527,7 +317,7 @@ HapiSpec matrixedPayerRelayerTest( hapiGetTxnRecord.getResponseRecord().getTransactionFee(); final var subop4 = getAutoCreatedAccountBalance(SECP_256K1_SOURCE_KEY) .hasTinyBars( - changeFromSnapshot(senderBalance, success ? (-depositAmount - senderCharged) : 0)); + changeFromSnapshot(senderBalance, success ? (-DEPOSIT_AMOUNT - senderCharged) : 0)); final var subop5 = getAccountBalance(RELAYER) .hasTinyBars(changeFromSnapshot( payerBalance, @@ -625,7 +415,7 @@ HapiSpec etx031InvalidNonceEthereumTxFailsAndChargesRelayer() { .when( balanceSnapshot(relayerSnapshot, RELAYER), balanceSnapshot(senderSnapshot, SECP_256K1_SOURCE_KEY).accountIsAlias(), - ethereumCall(PAY_RECEIVABLE_CONTRACT, "deposit", BigInteger.valueOf(depositAmount)) + ethereumCall(PAY_RECEIVABLE_CONTRACT, "deposit", BigInteger.valueOf(DEPOSIT_AMOUNT)) .type(EthTxData.EthTransactionType.EIP1559) .signingWith(SECP_256K1_SOURCE_KEY) .payingWith(RELAYER) @@ -651,107 +441,6 @@ HapiSpec etx031InvalidNonceEthereumTxFailsAndChargesRelayer() { .has(accountWith().nonce(0L))); } - HapiSpec etx012PrecompileCallSucceedsWhenNeededSignatureInEthTxn() { - final AtomicReference fungible = new AtomicReference<>(); - final String fungibleToken = TOKEN; - final String mintTxn = MINT_TXN; - return defaultHapiSpec("etx012PrecompileCallSucceedsWhenNeededSignatureInEthTxn") - .given( - newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), - cryptoCreate(RELAYER).balance(6 * ONE_MILLION_HBARS), - cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_HUNDRED_HBARS)) - .via(AUTO_ACCOUNT_TRANSACTION_NAME), - withOpContext((spec, opLog) -> updateSpecFor(spec, SECP_256K1_SOURCE_KEY)), - getTxnRecord(AUTO_ACCOUNT_TRANSACTION_NAME).andAllChildRecords(), - uploadInitCode(HELLO_WORLD_MINT_CONTRACT), - tokenCreate(fungibleToken) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(0) - .adminKey(SECP_256K1_SOURCE_KEY) - .supplyKey(SECP_256K1_SOURCE_KEY) - .exposingCreatedIdTo(idLit -> fungible.set(asToken(idLit)))) - .when( - sourcing(() -> contractCreate( - HELLO_WORLD_MINT_CONTRACT, asHeadlongAddress(asAddress(fungible.get())))), - ethereumCall(HELLO_WORLD_MINT_CONTRACT, "brrr", BigInteger.valueOf(5)) - .type(EthTxData.EthTransactionType.EIP1559) - .signingWith(SECP_256K1_SOURCE_KEY) - .payingWith(RELAYER) - .nonce(0) - .gasPrice(50L) - .maxGasAllowance(FIVE_HBARS) - .gasLimit(1_000_000L) - .via(mintTxn) - .hasKnownStatus(SUCCESS)) - .then(withOpContext((spec, opLog) -> allRunFor( - spec, - getTxnRecord(mintTxn) - .logged() - .hasPriority(recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .logs(inOrder()) - .senderId(spec.registry() - .getAccountID(spec.registry() - .aliasIdFor(SECP_256K1_SOURCE_KEY) - .getAlias() - .toStringUtf8()))) - .ethereumHash(ByteString.copyFrom( - spec.registry().getBytes(ETH_HASH_KEY))))))); - } - - HapiSpec etx013PrecompileCallSucceedsWhenNeededSignatureInHederaTxn() { - final AtomicReference fungible = new AtomicReference<>(); - final String fungibleToken = TOKEN; - final String mintTxn = MINT_TXN; - final String MULTI_KEY = "MULTI_KEY"; - return defaultHapiSpec("etx013PrecompileCallSucceedsWhenNeededSignatureInHederaTxn") - .given( - newKeyNamed(MULTI_KEY), - newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), - cryptoCreate(RELAYER).balance(6 * ONE_MILLION_HBARS), - cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_HUNDRED_HBARS)) - .via(AUTO_ACCOUNT_TRANSACTION_NAME), - withOpContext((spec, opLog) -> updateSpecFor(spec, SECP_256K1_SOURCE_KEY)), - getTxnRecord(AUTO_ACCOUNT_TRANSACTION_NAME).andAllChildRecords(), - uploadInitCode(HELLO_WORLD_MINT_CONTRACT), - tokenCreate(fungibleToken) - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(0) - .adminKey(MULTI_KEY) - .supplyKey(MULTI_KEY) - .exposingCreatedIdTo(idLit -> fungible.set(asToken(idLit)))) - .when( - sourcing(() -> contractCreate( - HELLO_WORLD_MINT_CONTRACT, asHeadlongAddress(asAddress(fungible.get())))), - ethereumCall(HELLO_WORLD_MINT_CONTRACT, "brrr", BigInteger.valueOf(5)) - .type(EthTxData.EthTransactionType.EIP1559) - .signingWith(SECP_256K1_SOURCE_KEY) - .payingWith(RELAYER) - .alsoSigningWithFullPrefix(MULTI_KEY) - .nonce(0) - .gasPrice(50L) - .maxGasAllowance(FIVE_HBARS) - .gasLimit(1_000_000L) - .via(mintTxn) - .hasKnownStatus(SUCCESS)) - .then(withOpContext((spec, opLog) -> allRunFor( - spec, - getTxnRecord(mintTxn) - .logged() - .hasPriority(recordWith() - .status(SUCCESS) - .contractCallResult(resultWith() - .logs(inOrder()) - .senderId(spec.registry() - .getAccountID(spec.registry() - .aliasIdFor(SECP_256K1_SOURCE_KEY) - .getAlias() - .toStringUtf8()))) - .ethereumHash(ByteString.copyFrom( - spec.registry().getBytes(ETH_HASH_KEY))))))); - } - HapiSpec etx013PrecompileCallFailsWhenSignatureMissingFromBothEthereumAndHederaTxn() { final AtomicReference fungible = new AtomicReference<>(); final String fungibleToken = TOKEN; @@ -919,73 +608,6 @@ final var record = op.getResponseRecord(); .then(); } - private HapiSpec etx007FungibleTokenCreateWithFeesHappyPath() { - final var createdTokenNum = new AtomicLong(); - final var feeCollectorAndAutoRenew = "feeCollectorAndAutoRenew"; - final var contract = "TokenCreateContract"; - final var EXISTING_TOKEN = "EXISTING_TOKEN"; - final var firstTxn = "firstCreateTxn"; - final long DEFAULT_AMOUNT_TO_SEND = 20 * ONE_HBAR; - - return defaultHapiSpec("etx007FungibleTokenCreateWithFeesHappyPath") - .given( - newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), - cryptoCreate(RELAYER).balance(6 * ONE_MILLION_HBARS), - cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_HUNDRED_HBARS)) - .via(AUTO_ACCOUNT_TRANSACTION_NAME), - cryptoCreate(feeCollectorAndAutoRenew) - .keyShape(SigControl.ED25519_ON) - .balance(ONE_HUNDRED_HBARS), - uploadInitCode(contract), - contractCreate(contract).gas(GAS_LIMIT), - tokenCreate(EXISTING_TOKEN).decimals(5), - tokenAssociate(feeCollectorAndAutoRenew, EXISTING_TOKEN)) - .when(withOpContext((spec, opLog) -> allRunFor( - spec, - ethereumCall( - contract, - "createTokenWithAllCustomFeesAvailable", - spec.registry() - .getKey(SECP_256K1_SOURCE_KEY) - .getECDSASecp256K1() - .toByteArray(), - asHeadlongAddress( - asAddress(spec.registry().getAccountID(feeCollectorAndAutoRenew))), - asHeadlongAddress( - asAddress(spec.registry().getTokenID(EXISTING_TOKEN))), - asHeadlongAddress( - asAddress(spec.registry().getAccountID(feeCollectorAndAutoRenew))), - 8_000_000L) - .via(firstTxn) - .gasLimit(GAS_LIMIT) - .sending(DEFAULT_AMOUNT_TO_SEND) - .alsoSigningWithFullPrefix(feeCollectorAndAutoRenew) - .exposingResultTo(result -> { - log.info("Explicit create result" + " is {}", result[0]); - final var res = (Address) result[0]; - createdTokenNum.set(res.value().longValueExact()); - })))) - .then( - getTxnRecord(firstTxn).andAllChildRecords().logged(), - childRecordsCheck( - firstTxn, - ResponseCodeEnum.SUCCESS, - TransactionRecordAsserts.recordWith().status(ResponseCodeEnum.SUCCESS)), - withOpContext((spec, ignore) -> { - final var op = getTxnRecord(firstTxn); - allRunFor(spec, op); - - final var callResult = op.getResponseRecord().getContractCallResult(); - final var gasUsed = callResult.getGasUsed(); - final var amount = callResult.getAmount(); - final var gasLimit = callResult.getGas(); - Assertions.assertEquals(DEFAULT_AMOUNT_TO_SEND, amount); - Assertions.assertEquals(GAS_LIMIT, gasLimit); - Assertions.assertTrue(gasUsed > 0L); - Assertions.assertTrue(callResult.hasContractID() && callResult.hasSenderId()); - })); - } - private HapiSpec etxSvc003ContractGetBytecodeQueryReturnsDeployedCode() { final var txn = "creation"; final var contract = "EmptyConstructor"; diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/EthereumV1SecurityModelSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/EthereumV1SecurityModelSuite.java new file mode 100644 index 000000000000..be528ba8e15d --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/EthereumV1SecurityModelSuite.java @@ -0,0 +1,503 @@ +/* + * Copyright (C) 2022-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.ethereum; + +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.AssertUtils.inOrder; +import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAliasedAccountInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getContractInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getLiteralAliasContractInfo; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.ethereumCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.ethereumContractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoTransfer.tinyBarsFromAccountToAlias; +import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.childRecordsCheck; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.createLargeFile; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.newKeyNamed; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overridingTwo; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; +import static com.hedera.services.bdd.suites.contract.Utils.asAddress; +import static com.hedera.services.bdd.suites.contract.Utils.asToken; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hedera.services.bdd.suites.crypto.AutoCreateUtils.updateSpecFor; +import static com.hedera.services.bdd.suites.utils.contracts.precompile.HTSPrecompileResult.htsPrecompileResult; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.esaulpaugh.headlong.abi.Address; +import com.google.protobuf.ByteString; +import com.hedera.node.app.hapi.utils.contracts.ParsingConstants.FunctionType; +import com.hedera.node.app.hapi.utils.ethereum.EthTxData; +import com.hedera.node.app.hapi.utils.ethereum.EthTxData.EthTransactionType; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts; +import com.hedera.services.bdd.spec.keys.SigControl; +import com.hedera.services.bdd.spec.transactions.TxnUtils; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hederahashgraph.api.proto.java.ResponseCodeEnum; +import com.hederahashgraph.api.proto.java.TokenID; +import com.hederahashgraph.api.proto.java.TokenType; +import java.math.BigInteger; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.tuweni.bytes.Bytes; +import org.junit.jupiter.api.Assertions; + +@SuppressWarnings("java:S5960") +public class EthereumV1SecurityModelSuite extends HapiSuite { + + private static final Logger log = LogManager.getLogger(EthereumV1SecurityModelSuite.class); + private static final String TOKEN_CREATE_CONTRACT = "NewTokenCreateContract"; + private static final String ERC721_CONTRACT_WITH_HTS_CALLS = "ERC721ContractWithHTSCalls"; + private static final String HELLO_WORLD_MINT_CONTRACT = "HelloWorldMint"; + public static final long GAS_LIMIT = 1_000_000; + + private static final String AUTO_ACCOUNT_TRANSACTION_NAME = "autoAccount"; + private static final String TOKEN = "token"; + private static final String MINT_TXN = "mintTxn"; + + public static void main(String... args) { + new EthereumV1SecurityModelSuite().runSuiteSync(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + public List getSpecsInSuite() { + return List.of( + etx007FungibleTokenCreateWithFeesHappyPath(), + etx012PrecompileCallSucceedsWhenNeededSignatureInEthTxn(), + etx013PrecompileCallSucceedsWhenNeededSignatureInHederaTxn(), + setApproveForAllUsingLocalNodeSetupPasses()); + } + + private HapiSpec setApproveForAllUsingLocalNodeSetupPasses() { + final AtomicReference spenderAutoCreatedAccountId = new AtomicReference<>(); + final AtomicReference tokenCreateContractID = new AtomicReference<>(); + final AtomicReference erc721ContractID = new AtomicReference<>(); + final AtomicReference contractAddressID = new AtomicReference<>(); + final AtomicReference createdTokenAddressString = new AtomicReference<>(); + final String spenderAlias = "spenderAlias"; + final var createTokenContractNum = new AtomicLong(); + return propertyPreservingHapiSpec("SetApproveForAllUsingLocalNodeSetupPasses") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), + newKeyNamed(spenderAlias).shape(SECP_256K1_SHAPE), + cryptoCreate(RELAYER).balance(6 * ONE_MILLION_HBARS), + cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_MILLION_HBARS)) + .via(AUTO_ACCOUNT_TRANSACTION_NAME), + cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, spenderAlias, ONE_HUNDRED_HBARS)) + .via("autoAccountSpender"), + getAliasedAccountInfo(spenderAlias) + .exposingContractAccountIdTo(spenderAutoCreatedAccountId::set), + createLargeFile( + GENESIS, TOKEN_CREATE_CONTRACT, TxnUtils.literalInitcodeFor(TOKEN_CREATE_CONTRACT)), + ethereumContractCreate(TOKEN_CREATE_CONTRACT) + .type(EthTransactionType.EIP1559) + .signingWith(SECP_256K1_SOURCE_KEY) + .payingWith(RELAYER) + .nonce(0) + .bytecode(TOKEN_CREATE_CONTRACT) + .gasPrice(10L) + .maxGasAllowance(ONE_HUNDRED_HBARS) + .gasLimit(1_000_000L) + .gas(1_000_000L) + .hasKnownStatusFrom(SUCCESS) + .exposingNumTo(createTokenContractNum::set), + getContractInfo(TOKEN_CREATE_CONTRACT).exposingEvmAddress(tokenCreateContractID::set)) + .when( + withOpContext((spec, opLog) -> { + var createNFTPublicFunctionCall = ethereumCall( + TOKEN_CREATE_CONTRACT, + "createNonFungibleTokenPublic", + asHeadlongAddress(tokenCreateContractID.get())) + .type(EthTransactionType.EIP1559) + .signingWith(SECP_256K1_SOURCE_KEY) + .payingWith(RELAYER) + .nonce(1) + .gasPrice(10L) + .sending(10000000000L) + .gasLimit(1_000_000L) + .via("createTokenTxn") + .exposingEventDataTo(createdTokenAddressString::set); + + allRunFor(spec, createNFTPublicFunctionCall); + + var uploadEthereumContract = uploadInitCode(ERC721_CONTRACT_WITH_HTS_CALLS); + allRunFor(spec, uploadEthereumContract); + + var createEthereumContract = ethereumContractCreate(ERC721_CONTRACT_WITH_HTS_CALLS) + .type(EthTxData.EthTransactionType.EIP1559) + .signingWith(SECP_256K1_SOURCE_KEY) + .payingWith(RELAYER) + .nonce(2) + .gasPrice(10L) + .maxGasAllowance(ONE_HUNDRED_HBARS) + .gasLimit(1_000_000L) + .hasKnownStatusFrom(SUCCESS); + + var exposeEthereumContractAddress = getContractInfo(ERC721_CONTRACT_WITH_HTS_CALLS) + .exposingEvmAddress(address -> erc721ContractID.set("0x" + address)); + allRunFor(spec, createEthereumContract, exposeEthereumContractAddress); + + var contractInfo = getLiteralAliasContractInfo( + erc721ContractID.get().substring(2)) + .exposingEvmAddress(contractAddressID::set); + allRunFor(spec, contractInfo); + assertEquals(erc721ContractID.get().substring(2), contractAddressID.get()); + }), + withOpContext((spec, opLog) -> { + var associateTokenToERC721 = ethereumCall( + ERC721_CONTRACT_WITH_HTS_CALLS, + "associateTokenPublic", + asHeadlongAddress(erc721ContractID.get()), + asHeadlongAddress(Bytes.wrap(createdTokenAddressString + .get() + .toByteArray()) + .toHexString())) + .type(EthTransactionType.EIP1559) + .signingWith(SECP_256K1_SOURCE_KEY) + .payingWith(GENESIS) + .nonce(3) + .gasPrice(10L) + .gasLimit(1_000_000L) + .via("associateTokenTxn") + .hasKnownStatusFrom(SUCCESS); + + var associateTokenToSpender = ethereumCall( + TOKEN_CREATE_CONTRACT, + "associateTokenPublic", + asHeadlongAddress(spenderAutoCreatedAccountId.get()), + asHeadlongAddress(Bytes.wrap(createdTokenAddressString + .get() + .toByteArray()) + .toHexString())) + .type(EthTransactionType.EIP1559) + .signingWith(spenderAlias) + .payingWith(GENESIS) + .nonce(0) + .gasPrice(10L) + .gasLimit(1_000_000L) + .via("associateTokenTxn") + .hasKnownStatusFrom(SUCCESS); + + var isApprovedForAllBefore = ethereumCall( + ERC721_CONTRACT_WITH_HTS_CALLS, + "ercIsApprovedForAll", + asHeadlongAddress(Bytes.wrap(createdTokenAddressString + .get() + .toByteArray()) + .toHexString()), + asHeadlongAddress(erc721ContractID.get()), + asHeadlongAddress(spenderAutoCreatedAccountId.get())) + .type(EthTransactionType.EIP1559) + .signingWith(SECP_256K1_SOURCE_KEY) + .payingWith(RELAYER) + .nonce(4) + .gasPrice(10L) + .gasLimit(1_000_000L) + .via("ercIsApprovedForAllBeforeTxn") + .hasKnownStatusFrom(SUCCESS) + .logged(); + + var isApprovedForAllBeforeCheck = childRecordsCheck( + "ercIsApprovedForAllBeforeTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.ERC_IS_APPROVED_FOR_ALL) + .withIsApprovedForAll(false)))); + + var setApprovalForAll = ethereumCall( + ERC721_CONTRACT_WITH_HTS_CALLS, + "ercSetApprovalForAll", + asHeadlongAddress(Bytes.wrap(createdTokenAddressString + .get() + .toByteArray()) + .toHexString()), + asHeadlongAddress(spenderAutoCreatedAccountId.get()), + true) + .type(EthTransactionType.EIP1559) + .signingWith(SECP_256K1_SOURCE_KEY) + .payingWith(RELAYER) + .nonce(5) + .gasPrice(10L) + .gasLimit(1_000_000L) + .via("ercSetApproveForAllTxn") + .hasKnownStatusFrom(SUCCESS) + .logged(); + + var isApprovedForAllAfter = ethereumCall( + ERC721_CONTRACT_WITH_HTS_CALLS, + "ercIsApprovedForAll", + asHeadlongAddress(Bytes.wrap(createdTokenAddressString + .get() + .toByteArray()) + .toHexString()), + asHeadlongAddress(erc721ContractID.get()), + asHeadlongAddress(spenderAutoCreatedAccountId.get())) + .type(EthTransactionType.EIP1559) + .signingWith(SECP_256K1_SOURCE_KEY) + .payingWith(RELAYER) + .nonce(6) + .gasPrice(10L) + .gasLimit(1_000_000L) + .via("ercIsApprovedForAllAfterTxn") + .hasKnownStatusFrom(SUCCESS) + .logged(); + + var isApprovedForAllAfterCheck = childRecordsCheck( + "ercIsApprovedForAllAfterTxn", + SUCCESS, + recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .contractCallResult(htsPrecompileResult() + .forFunction(FunctionType.ERC_IS_APPROVED_FOR_ALL) + .withIsApprovedForAll(true)))); + + allRunFor( + spec, + associateTokenToERC721, + associateTokenToSpender, + isApprovedForAllBefore, + isApprovedForAllBeforeCheck, + setApprovalForAll, + isApprovedForAllAfter, + isApprovedForAllAfterCheck); + })) + .then(withOpContext((spec, opLog) -> {})); + } + + private HapiSpec etx012PrecompileCallSucceedsWhenNeededSignatureInEthTxn() { + final AtomicReference fungible = new AtomicReference<>(); + final String fungibleToken = TOKEN; + final String mintTxn = MINT_TXN; + return propertyPreservingHapiSpec("etx012PrecompileCallSucceedsWhenNeededSignatureInEthTxn") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenMint", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), + cryptoCreate(RELAYER).balance(6 * ONE_MILLION_HBARS), + cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_HUNDRED_HBARS)) + .via(AUTO_ACCOUNT_TRANSACTION_NAME), + withOpContext((spec, opLog) -> updateSpecFor(spec, SECP_256K1_SOURCE_KEY)), + getTxnRecord(AUTO_ACCOUNT_TRANSACTION_NAME).andAllChildRecords(), + uploadInitCode(HELLO_WORLD_MINT_CONTRACT), + tokenCreate(fungibleToken) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(0) + .adminKey(SECP_256K1_SOURCE_KEY) + .supplyKey(SECP_256K1_SOURCE_KEY) + .exposingCreatedIdTo(idLit -> fungible.set(asToken(idLit)))) + .when( + sourcing(() -> contractCreate( + HELLO_WORLD_MINT_CONTRACT, asHeadlongAddress(asAddress(fungible.get())))), + ethereumCall(HELLO_WORLD_MINT_CONTRACT, "brrr", BigInteger.valueOf(5)) + .type(EthTxData.EthTransactionType.EIP1559) + .signingWith(SECP_256K1_SOURCE_KEY) + .payingWith(RELAYER) + .nonce(0) + .gasPrice(50L) + .maxGasAllowance(FIVE_HBARS) + .gasLimit(1_000_000L) + .via(mintTxn) + .hasKnownStatus(SUCCESS)) + .then(withOpContext((spec, opLog) -> allRunFor( + spec, + getTxnRecord(mintTxn) + .logged() + .hasPriority(recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .logs(inOrder()) + .senderId(spec.registry() + .getAccountID(spec.registry() + .aliasIdFor(SECP_256K1_SOURCE_KEY) + .getAlias() + .toStringUtf8()))) + .ethereumHash(ByteString.copyFrom( + spec.registry().getBytes(ETH_HASH_KEY))))))); + } + + private HapiSpec etx013PrecompileCallSucceedsWhenNeededSignatureInHederaTxn() { + final AtomicReference fungible = new AtomicReference<>(); + final String fungibleToken = TOKEN; + final String mintTxn = MINT_TXN; + final String MULTI_KEY = "MULTI_KEY"; + return propertyPreservingHapiSpec("etx013PrecompileCallSucceedsWhenNeededSignatureInHederaTxn") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate,TokenMint", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(MULTI_KEY), + newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), + cryptoCreate(RELAYER).balance(6 * ONE_MILLION_HBARS), + cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_HUNDRED_HBARS)) + .via(AUTO_ACCOUNT_TRANSACTION_NAME), + withOpContext((spec, opLog) -> updateSpecFor(spec, SECP_256K1_SOURCE_KEY)), + getTxnRecord(AUTO_ACCOUNT_TRANSACTION_NAME).andAllChildRecords(), + uploadInitCode(HELLO_WORLD_MINT_CONTRACT), + tokenCreate(fungibleToken) + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(0) + .adminKey(MULTI_KEY) + .supplyKey(MULTI_KEY) + .exposingCreatedIdTo(idLit -> fungible.set(asToken(idLit)))) + .when( + sourcing(() -> contractCreate( + HELLO_WORLD_MINT_CONTRACT, asHeadlongAddress(asAddress(fungible.get())))), + ethereumCall(HELLO_WORLD_MINT_CONTRACT, "brrr", BigInteger.valueOf(5)) + .type(EthTxData.EthTransactionType.EIP1559) + .signingWith(SECP_256K1_SOURCE_KEY) + .payingWith(RELAYER) + .alsoSigningWithFullPrefix(MULTI_KEY) + .nonce(0) + .gasPrice(50L) + .maxGasAllowance(FIVE_HBARS) + .gasLimit(1_000_000L) + .via(mintTxn) + .hasKnownStatus(SUCCESS)) + .then(withOpContext((spec, opLog) -> allRunFor( + spec, + getTxnRecord(mintTxn) + .logged() + .hasPriority(recordWith() + .status(SUCCESS) + .contractCallResult(resultWith() + .logs(inOrder()) + .senderId(spec.registry() + .getAccountID(spec.registry() + .aliasIdFor(SECP_256K1_SOURCE_KEY) + .getAlias() + .toStringUtf8()))) + .ethereumHash(ByteString.copyFrom( + spec.registry().getBytes(ETH_HASH_KEY))))))); + } + + private HapiSpec etx007FungibleTokenCreateWithFeesHappyPath() { + final var createdTokenNum = new AtomicLong(); + final var feeCollectorAndAutoRenew = "feeCollectorAndAutoRenew"; + final var contract = "TokenCreateContract"; + final var EXISTING_TOKEN = "EXISTING_TOKEN"; + final var firstTxn = "firstCreateTxn"; + final long DEFAULT_AMOUNT_TO_SEND = 20 * ONE_HBAR; + + return propertyPreservingHapiSpec("etx007FungibleTokenCreateWithFeesHappyPath") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "ContractCall,CryptoTransfer,TokenAssociateToAccount,TokenCreate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), + cryptoCreate(RELAYER).balance(6 * ONE_MILLION_HBARS), + cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_HUNDRED_HBARS)) + .via(AUTO_ACCOUNT_TRANSACTION_NAME), + cryptoCreate(feeCollectorAndAutoRenew) + .keyShape(SigControl.ED25519_ON) + .balance(ONE_HUNDRED_HBARS), + uploadInitCode(contract), + contractCreate(contract).gas(GAS_LIMIT), + tokenCreate(EXISTING_TOKEN).decimals(5), + tokenAssociate(feeCollectorAndAutoRenew, EXISTING_TOKEN)) + .when(withOpContext((spec, opLog) -> allRunFor( + spec, + ethereumCall( + contract, + "createTokenWithAllCustomFeesAvailable", + spec.registry() + .getKey(SECP_256K1_SOURCE_KEY) + .getECDSASecp256K1() + .toByteArray(), + asHeadlongAddress( + asAddress(spec.registry().getAccountID(feeCollectorAndAutoRenew))), + asHeadlongAddress( + asAddress(spec.registry().getTokenID(EXISTING_TOKEN))), + asHeadlongAddress( + asAddress(spec.registry().getAccountID(feeCollectorAndAutoRenew))), + 8_000_000L) + .via(firstTxn) + .gasLimit(GAS_LIMIT) + .sending(DEFAULT_AMOUNT_TO_SEND) + .alsoSigningWithFullPrefix(feeCollectorAndAutoRenew) + .exposingResultTo(result -> { + log.info("Explicit create result" + " is {}", result[0]); + final var res = (Address) result[0]; + createdTokenNum.set(res.value().longValueExact()); + })))) + .then( + getTxnRecord(firstTxn).andAllChildRecords().logged(), + childRecordsCheck( + firstTxn, + ResponseCodeEnum.SUCCESS, + TransactionRecordAsserts.recordWith().status(ResponseCodeEnum.SUCCESS)), + withOpContext((spec, ignore) -> { + final var op = getTxnRecord(firstTxn); + allRunFor(spec, op); + + final var callResult = op.getResponseRecord().getContractCallResult(); + final var gasUsed = callResult.getGasUsed(); + final var amount = callResult.getAmount(); + final var gasLimit = callResult.getGas(); + Assertions.assertEquals(DEFAULT_AMOUNT_TO_SEND, amount); + Assertions.assertEquals(GAS_LIMIT, gasLimit); + Assertions.assertTrue(gasUsed > 0L); + Assertions.assertTrue(callResult.hasContractID() && callResult.hasSenderId()); + })); + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/leaky/LeakyContractTestsSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/leaky/LeakyContractTestsSuite.java index d39456769675..eb9491513f67 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/leaky/LeakyContractTestsSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/leaky/LeakyContractTestsSuite.java @@ -95,6 +95,7 @@ import static com.hedera.services.bdd.suites.contract.Utils.asToken; import static com.hedera.services.bdd.suites.contract.Utils.eventSignatureOf; import static com.hedera.services.bdd.suites.contract.Utils.getABIFor; +import static com.hedera.services.bdd.suites.contract.Utils.mirrorAddrWith; import static com.hedera.services.bdd.suites.contract.Utils.parsedToByteString; import static com.hedera.services.bdd.suites.contract.hapi.ContractCallSuite.ACCOUNT_INFO; import static com.hedera.services.bdd.suites.contract.hapi.ContractCallSuite.ACCOUNT_INFO_AFTER_CALL; @@ -145,8 +146,7 @@ import static com.hedera.services.bdd.suites.contract.precompile.ERCPrecompileSuite.TRANSFER_SIGNATURE; import static com.hedera.services.bdd.suites.contract.precompile.ERCPrecompileSuite.TRANSFER_SIG_NAME; import static com.hedera.services.bdd.suites.contract.precompile.LazyCreateThroughPrecompileSuite.FIRST_META; -import static com.hedera.services.bdd.suites.contract.precompile.LazyCreateThroughPrecompileSuite.mirrorAddrWith; -import static com.hedera.services.bdd.suites.contract.precompile.WipeTokenAccountPrecompileSuite.GAS_TO_OFFER; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; import static com.hedera.services.bdd.suites.crypto.CryptoApproveAllowanceSuite.ADMIN_KEY; import static com.hedera.services.bdd.suites.crypto.CryptoApproveAllowanceSuite.FUNGIBLE_TOKEN; import static com.hedera.services.bdd.suites.crypto.CryptoApproveAllowanceSuite.NON_FUNGIBLE_TOKEN; @@ -225,6 +225,7 @@ import org.apache.logging.log4j.Logger; import org.junit.jupiter.api.Assertions; +@SuppressWarnings("java:S1192") // "string literal should not be duplicated" - this rule makes test suites worse public class LeakyContractTestsSuite extends HapiSuite { private static final Logger log = LogManager.getLogger(LeakyContractTestsSuite.class); public static final String CONTRACTS_MAX_REFUND_PERCENT_OF_GAS_LIMIT1 = "contracts.maxRefundPercentOfGasLimit"; @@ -240,6 +241,7 @@ public class LeakyContractTestsSuite extends HapiSuite { private static final String TOKEN_TRANSFER_CONTRACT = "TokenTransferContract"; private static final String TRANSFER_TOKEN_PUBLIC = "transferTokenPublic"; private static final String HEDERA_ALLOWANCES_IS_ENABLED = "hedera.allowances.isEnabled"; + public static final int GAS_TO_OFFER = 1_000_000; public static void main(String... args) { new LeakyContractTestsSuite().runSuiteSync(); @@ -600,11 +602,15 @@ private HapiSpec transferWorksWithTopLevelSignatures() { final AtomicReference vanillaTokenID = new AtomicReference<>(); final AtomicReference vanillaNftID = new AtomicReference<>(); return propertyPreservingHapiSpec("transferWorksWithTopLevelSignatures") - .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS) + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) .given( // enable top level signatures for // transferToken/transferTokens/transferNft/transferNfts - overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CRYPTO_TRANSFER), + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + CRYPTO_TRANSFER, + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + "10_000_000"), newKeyNamed(SUPPLY_KEY), cryptoCreate(ACCOUNT).exposingCreatedIdTo(accountID::set), cryptoCreate(TOKEN_TREASURY), @@ -830,9 +836,10 @@ HapiSpec payerCannotOverSendValue() { private HapiSpec createTokenWithInvalidFeeCollector() { return propertyPreservingHapiSpec("createTokenWithInvalidFeeCollector") - .preserving(CRYPTO_CREATE_WITH_ALIAS_ENABLED) + .preserving(CRYPTO_CREATE_WITH_ALIAS_ENABLED, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) .given( overriding(CRYPTO_CREATE_WITH_ALIAS_ENABLED, FALSE_VALUE), + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, "10_000_000"), newKeyNamed(ECDSA_KEY).shape(SECP256K1), cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(ECDSA_KEY), uploadInitCode(TOKEN_CREATE_CONTRACT), @@ -878,9 +885,10 @@ private HapiSpec createTokenWithInvalidFixedFeeWithERC721Denomination() { final String feeCollector = ACCOUNT_2; final String someARAccount = "someARAccount"; return propertyPreservingHapiSpec("createTokenWithInvalidFixedFeeWithERC721Denomination") - .preserving(CRYPTO_CREATE_WITH_ALIAS_ENABLED) + .preserving(CRYPTO_CREATE_WITH_ALIAS_ENABLED, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) .given( overriding(CRYPTO_CREATE_WITH_ALIAS_ENABLED, FALSE_VALUE), + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, "10_000_000"), newKeyNamed(ECDSA_KEY).shape(SECP256K1), cryptoCreate(ACCOUNT).balance(ONE_MILLION_HBARS).key(ECDSA_KEY), cryptoCreate(feeCollector).keyShape(ED25519_ON).balance(ONE_HUNDRED_HBARS), @@ -933,9 +941,10 @@ private HapiSpec createTokenWithInvalidRoyaltyFee() { AtomicReference existingToken = new AtomicReference<>(); final String treasuryAndFeeCollectorKey = "treasuryAndFeeCollectorKey"; return propertyPreservingHapiSpec("createTokenWithInvalidRoyaltyFee") - .preserving(CRYPTO_CREATE_WITH_ALIAS_ENABLED) + .preserving(CRYPTO_CREATE_WITH_ALIAS_ENABLED, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) .given( overriding(CRYPTO_CREATE_WITH_ALIAS_ENABLED, FALSE_VALUE), + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, "10_000_000"), newKeyNamed(ECDSA_KEY).shape(SECP256K1), newKeyNamed(ED25519KEY).shape(ED25519), newKeyNamed(CONTRACT_ADMIN_KEY), @@ -991,9 +1000,10 @@ private HapiSpec nonFungibleTokenCreateWithFeesHappyPath() { final var feeCollector = ACCOUNT_2; final var treasuryAndFeeCollectorKey = "treasuryAndFeeCollectorKey"; return propertyPreservingHapiSpec("nonFungibleTokenCreateWithFeesHappyPath") - .preserving(CRYPTO_CREATE_WITH_ALIAS_ENABLED) + .preserving(CRYPTO_CREATE_WITH_ALIAS_ENABLED, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) .given( overriding(CRYPTO_CREATE_WITH_ALIAS_ENABLED, FALSE_VALUE), + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, "10_000_000"), newKeyNamed(ECDSA_KEY).shape(SECP256K1), newKeyNamed(ED25519KEY).shape(ED25519), newKeyNamed(treasuryAndFeeCollectorKey), @@ -1077,9 +1087,10 @@ private HapiSpec fungibleTokenCreateWithFeesHappyPath() { final var arEd25519Key = "arEd25519Key"; final var initialAutoRenewAccount = "initialAutoRenewAccount"; return propertyPreservingHapiSpec("fungibleTokenCreateWithFeesHappyPath") - .preserving(CRYPTO_CREATE_WITH_ALIAS_ENABLED) + .preserving(CRYPTO_CREATE_WITH_ALIAS_ENABLED, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) .given( overriding(CRYPTO_CREATE_WITH_ALIAS_ENABLED, FALSE_VALUE), + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, "10_000_000"), newKeyNamed(arEd25519Key).shape(ED25519), newKeyNamed(ECDSA_KEY).shape(SECP256K1), cryptoCreate(initialAutoRenewAccount).key(arEd25519Key), @@ -1153,11 +1164,13 @@ private HapiSpec fungibleTokenCreateWithFeesHappyPath() { })); } - HapiSpec etx026AccountWithoutAliasCanMakeEthTxnsDueToAutomaticAliasCreation() { + private HapiSpec etx026AccountWithoutAliasCanMakeEthTxnsDueToAutomaticAliasCreation() { final String ACCOUNT = "account"; - return defaultHapiSpec("etx026AccountWithoutAliasCanMakeEthTxnsDueToAutomaticAliasCreation") + return propertyPreservingHapiSpec("etx026AccountWithoutAliasCanMakeEthTxnsDueToAutomaticAliasCreation") + .preserving(CRYPTO_CREATE_WITH_ALIAS_ENABLED, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) .given( overriding(CRYPTO_CREATE_WITH_ALIAS_ENABLED, FALSE_VALUE), + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, "10_000_000"), newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), cryptoCreate(ACCOUNT).key(SECP_256K1_SOURCE_KEY).balance(ONE_HUNDRED_HBARS)) .when(ethereumContractCreate(PAY_RECEIVABLE_CONTRACT) @@ -1741,7 +1754,7 @@ private HapiSpec requiresTopLevelSignatureOrApprovalDependingOnControllingProper final AtomicReference
tokenAddress = new AtomicReference<>(); final var amountPerTransfer = 50L; return propertyPreservingHapiSpec("RequiresTopLevelSignatureOrApprovalDependingOnControllingProperty") - .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS) + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) .given( cryptoCreate(SENDER) .keyShape(SECP256K1_ON) @@ -1762,7 +1775,8 @@ private HapiSpec requiresTopLevelSignatureOrApprovalDependingOnControllingProper uploadInitCode(TRANSFER_CONTRACT), contractCreate(TRANSFER_CONTRACT), // First revoke use of top-level signatures from all precompiles - overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, "")) + overriding(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, ""), + overriding(CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, "10_000_000")) .when( // Then, try to transfer tokens using a top-level signature sourcing(() -> contractCall(TRANSFER_CONTRACT, TRANSFER_MULTIPLE_TOKENS, (Object) new Tuple[] { diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/leaky/LeakySecurityModelV1Suite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/leaky/LeakySecurityModelV1Suite.java new file mode 100644 index 000000000000..0a6859c4b799 --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/leaky/LeakySecurityModelV1Suite.java @@ -0,0 +1,108 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.leaky; + +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.suites.HapiSuite; +import com.hedera.services.bdd.suites.contract.hapi.ContractCallV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.hapi.ContractCreateV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.opcodes.Create2OperationV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.AssociatePrecompileV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.ContractBurnHTSV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.ContractHTSV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.ContractKeysHTSV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.ContractMintHTSV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.CreatePrecompileV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.CryptoTransferHTSV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.DeleteTokenPrecompileV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.DissociatePrecompileV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.ERCPrecompileV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.FreezeUnfreezeTokenPrecompileV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.GrantRevokeKycV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.LazyCreateThroughPrecompileV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.MixedHTSPrecompileTestsV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.PauseUnpauseTokenAccountPrecompileV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.SigningReqsV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.TokenExpiryInfoV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.TokenInfoHTSV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.TokenUpdatePrecompileV1SecurityModelSuite; +import com.hedera.services.bdd.suites.contract.precompile.WipeTokenAccountPrecompileV1SecurityModelSuite; +import com.hedera.services.bdd.suites.ethereum.EthereumV1SecurityModelSuite; +import com.hedera.services.bdd.suites.token.TokenAssociationV1SecurityModelSpecs; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class LeakySecurityModelV1Suite extends HapiSuite { + + private static final Logger log = LogManager.getLogger(LeakySecurityModelV1Suite.class); + + public static void main(String... args) { + new LeakySecurityModelV1Suite().runSuiteSync(); + } + + @NonNull + final List suites; + + public LeakySecurityModelV1Suite() { + suites = List.of( + new AssociatePrecompileV1SecurityModelSuite(), + new ContractBurnHTSV1SecurityModelSuite(), + new ContractCallV1SecurityModelSuite(), + new ContractCreateV1SecurityModelSuite(), + new ContractHTSV1SecurityModelSuite(), + new ContractKeysHTSV1SecurityModelSuite(), + new ContractMintHTSV1SecurityModelSuite(), + new Create2OperationV1SecurityModelSuite(), + new CreatePrecompileV1SecurityModelSuite(), + new CryptoTransferHTSV1SecurityModelSuite(), + new DeleteTokenPrecompileV1SecurityModelSuite(), + new DissociatePrecompileV1SecurityModelSuite(), + new ERCPrecompileV1SecurityModelSuite(), + new EthereumV1SecurityModelSuite(), + new FreezeUnfreezeTokenPrecompileV1SecurityModelSuite(), + new GrantRevokeKycV1SecurityModelSuite(), + new LazyCreateThroughPrecompileV1SecurityModelSuite(), + new MixedHTSPrecompileTestsV1SecurityModelSuite(), + new PauseUnpauseTokenAccountPrecompileV1SecurityModelSuite(), + new SigningReqsV1SecurityModelSuite(), + new TokenAssociationV1SecurityModelSpecs(), + new TokenExpiryInfoV1SecurityModelSuite(), + new TokenInfoHTSV1SecurityModelSuite(), + new TokenUpdatePrecompileV1SecurityModelSuite(), + new WipeTokenAccountPrecompileV1SecurityModelSuite()); + } + + @Override + public List getSpecsInSuite() { + return suites.stream() + .map(HapiSuite::getSpecsInSuite) + .flatMap(List::stream) + .toList(); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/PrecompileMintThrottlingCheck.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/PrecompileMintThrottlingCheck.java index e5b40878c28a..552532730f3f 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/PrecompileMintThrottlingCheck.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/PrecompileMintThrottlingCheck.java @@ -26,7 +26,6 @@ import static com.hedera.services.bdd.spec.utilops.UtilVerbs.overriding; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.runWithProvider; import static com.hedera.services.bdd.suites.contract.precompile.ContractMintHTSSuite.MINT_NFT_CONTRACT; -import static com.hedera.services.bdd.suites.contract.precompile.WipeTokenAccountPrecompileSuite.GAS_TO_OFFER; import static com.hedera.services.bdd.suites.utils.sysfiles.serdes.ThrottleDefsLoader.protoDefsFromResource; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.CONTRACT_REVERT_EXECUTED; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS; @@ -61,6 +60,7 @@ public class PrecompileMintThrottlingCheck extends HapiSuite { private static final int EXPECTED_MAX_MINTS_PER_SEC = 50; private static final double ALLOWED_THROTTLE_NOISE_TOLERANCE = 0.05; private static final String NON_FUNGIBLE_TOKEN = "NON_FUNGIBLE_TOKEN"; + public static final int GAS_TO_OFFER = 1_000_000; public static void main(String... args) { new PrecompileMintThrottlingCheck().runSuiteSync(); diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenAssociationSpecs.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenAssociationSpecs.java index e133cf1ad509..ada6df706c0d 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenAssociationSpecs.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenAssociationSpecs.java @@ -16,14 +16,12 @@ package com.hedera.services.bdd.suites.token; -import static com.hedera.services.bdd.spec.HapiPropertySource.asHexedSolidityAddress; import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.assertions.NoTokenTransfers.emptyTokenTransfers; import static com.hedera.services.bdd.spec.assertions.SomeFungibleTransfers.changingFungibleBalances; import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; import static com.hedera.services.bdd.spec.queries.QueryVerbs.*; import static com.hedera.services.bdd.spec.queries.crypto.ExpectedTokenRel.relationshipWith; -import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.createDefaultContract; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; @@ -36,7 +34,6 @@ import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenFreeze; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenUnfreeze; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; -import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; import static com.hedera.services.bdd.spec.transactions.token.TokenMovement.moving; import static com.hedera.services.bdd.spec.utilops.CustomSpecAssert.allRunFor; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.*; @@ -47,9 +44,7 @@ import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; import static org.junit.jupiter.api.Assertions.assertEquals; -import com.esaulpaugh.headlong.abi.Address; import com.google.protobuf.ByteString; -import com.hedera.services.bdd.spec.HapiPropertySource; import com.hedera.services.bdd.spec.HapiSpec; import com.hedera.services.bdd.spec.HapiSpecOperation; import com.hedera.services.bdd.spec.assertions.BaseErroringAssertsProvider; @@ -62,7 +57,6 @@ import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -101,8 +95,7 @@ public List getSpecsInSuite() { contractInfoQueriesAsExpected(), dissociateHasExpectedSemanticsForDeletedTokens(), dissociateHasExpectedSemanticsForDissociatedContracts(), - canDissociateFromDeletedTokenWithAlreadyDissociatedTreasury(), - multiAssociationWithSameRepeatedTokenAsExpected()); + canDissociateFromDeletedTokenWithAlreadyDissociatedTreasury()); } @Override @@ -110,45 +103,6 @@ public boolean canRunConcurrent() { return true; } - private HapiSpec multiAssociationWithSameRepeatedTokenAsExpected() { - final var nfToken = "nfToken"; - final var civilian = "civilian"; - final var multiAssociate = "multiAssociate"; - final var theContract = "AssociateDissociate"; - final AtomicReference tokenMirrorAddr = new AtomicReference<>(); - final AtomicReference civilianMirrorAddr = new AtomicReference<>(); - - return defaultHapiSpec("MultiAssociationWithSameRepeatedTokenAsExpected") - .given( - cryptoCreate(civilian) - .exposingCreatedIdTo(id -> civilianMirrorAddr.set(asHexedSolidityAddress(id))), - tokenCreate(nfToken) - .tokenType(NON_FUNGIBLE_UNIQUE) - .supplyKey(GENESIS) - .initialSupply(0) - .exposingCreatedIdTo(idLit -> - tokenMirrorAddr.set(asHexedSolidityAddress(HapiPropertySource.asToken(idLit)))), - uploadInitCode(theContract), - contractCreate(theContract)) - .when(sourcing(() -> contractCall( - theContract, - "tokensAssociate", - asHeadlongAddress(civilianMirrorAddr.get()), - (new Address[] { - asHeadlongAddress(tokenMirrorAddr.get()), asHeadlongAddress(tokenMirrorAddr.get()) - })) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED) - .via(multiAssociate) - .payingWith(civilian) - .gas(4_000_000))) - .then( - childRecordsCheck( - multiAssociate, - CONTRACT_REVERT_EXECUTED, - recordWith().status(TOKEN_ID_REPEATED_IN_TOKEN_LIST)), - getAccountInfo(civilian).hasNoTokenRelationship(nfToken)); - } - public HapiSpec handlesUseOfDefaultTokenId() { return defaultHapiSpec("HandlesUseOfDefaultTokenId") .given() diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenAssociationV1SecurityModelSpecs.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenAssociationV1SecurityModelSpecs.java new file mode 100644 index 000000000000..c9f3a4e74b1a --- /dev/null +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/token/TokenAssociationV1SecurityModelSpecs.java @@ -0,0 +1,120 @@ +/* + * Copyright (C) 2020-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.services.bdd.suites.token; + +import static com.hedera.services.bdd.spec.HapiPropertySource.asHexedSolidityAddress; +import static com.hedera.services.bdd.spec.HapiSpec.propertyPreservingHapiSpec; +import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; +import static com.hedera.services.bdd.spec.queries.QueryVerbs.*; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenCreate; +import static com.hedera.services.bdd.spec.transactions.TxnVerbs.uploadInitCode; +import static com.hedera.services.bdd.spec.transactions.contract.HapiParserUtil.asHeadlongAddress; +import static com.hedera.services.bdd.spec.utilops.UtilVerbs.*; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS; +import static com.hedera.services.bdd.suites.contract.precompile.V1SecurityModelOverrides.CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF; +import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.*; +import static com.hederahashgraph.api.proto.java.TokenType.NON_FUNGIBLE_UNIQUE; + +import com.esaulpaugh.headlong.abi.Address; +import com.hedera.services.bdd.spec.HapiPropertySource; +import com.hedera.services.bdd.spec.HapiSpec; +import com.hedera.services.bdd.suites.HapiSuite; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +public class TokenAssociationV1SecurityModelSpecs extends HapiSuite { + private static final Logger log = LogManager.getLogger(TokenAssociationV1SecurityModelSpecs.class); + + public static final String VANILLA_TOKEN = "TokenD"; + public static final String MULTI_KEY = "multiKey"; + public static final String SIMPLE = "simple"; + public static final String FREEZE_KEY = "freezeKey"; + + public static void main(String... args) { + final var spec = new TokenAssociationV1SecurityModelSpecs(); + + spec.deferResultsSummary(); + spec.runSuiteSync(); + spec.summarizeDeferredResults(); + } + + @Override + public List getSpecsInSuite() { + return List.of(multiAssociationWithSameRepeatedTokenAsExpected()); + } + + @Override + public boolean canRunConcurrent() { + return false; + } + + private HapiSpec multiAssociationWithSameRepeatedTokenAsExpected() { + final var nfToken = "nfToken"; + final var civilian = "civilian"; + final var multiAssociate = "multiAssociate"; + final var theContract = "AssociateDissociate"; + final AtomicReference tokenMirrorAddr = new AtomicReference<>(); + final AtomicReference civilianMirrorAddr = new AtomicReference<>(); + + return propertyPreservingHapiSpec("MultiAssociationWithSameRepeatedTokenAsExpected") + .preserving(CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS) + .given( + overridingTwo( + CONTRACTS_ALLOW_SYSTEM_USE_OF_HAPI_SIGS, + "CryptoTransfer,TokenAssociateToAccount,TokenCreate", + CONTRACTS_MAX_NUM_WITH_HAPI_SIGS_ACCESS, + CONTRACTS_V1_SECURITY_MODEL_BLOCK_CUTOFF), + cryptoCreate(civilian) + .exposingCreatedIdTo(id -> civilianMirrorAddr.set(asHexedSolidityAddress(id))), + tokenCreate(nfToken) + .tokenType(NON_FUNGIBLE_UNIQUE) + .supplyKey(GENESIS) + .initialSupply(0) + .exposingCreatedIdTo(idLit -> + tokenMirrorAddr.set(asHexedSolidityAddress(HapiPropertySource.asToken(idLit)))), + uploadInitCode(theContract), + contractCreate(theContract)) + .when(sourcing(() -> contractCall( + theContract, + "tokensAssociate", + asHeadlongAddress(civilianMirrorAddr.get()), + (new Address[] { + asHeadlongAddress(tokenMirrorAddr.get()), asHeadlongAddress(tokenMirrorAddr.get()) + })) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED) + .via(multiAssociate) + .payingWith(civilian) + .gas(4_000_000))) + .then( + childRecordsCheck( + multiAssociate, + CONTRACT_REVERT_EXECUTED, + recordWith().status(TOKEN_ID_REPEATED_IN_TOKEN_LIST)), + getAccountInfo(civilian).hasNoTokenRelationship(nfToken)); + } + + @Override + protected Logger getResultsLogger() { + return log; + } +} diff --git a/hedera-node/test-clients/src/main/resource/bootstrap.properties b/hedera-node/test-clients/src/main/resource/bootstrap.properties index e646f45c12af..7d6f12888897 100644 --- a/hedera-node/test-clients/src/main/resource/bootstrap.properties +++ b/hedera-node/test-clients/src/main/resource/bootstrap.properties @@ -63,7 +63,7 @@ balances.compressOnCreation=true cache.records.ttl=180 contracts.allowAutoAssociations=true contracts.allowSystemUseOfHapiSigs=TokenAssociateToAccount,TokenDissociateFromAccount,TokenFreezeAccount,TokenUnfreezeAccount,TokenGrantKycToAccount,TokenRevokeKycFromAccount,TokenAccountWipe,TokenBurn,TokenDelete,TokenMint,TokenUnpause,TokenPause,TokenCreate,TokenUpdate,ContractCall,CryptoTransfer -contracts.maxNumWithHapiSigsAccess=10_000_000 +contracts.maxNumWithHapiSigsAccess=0 contracts.withSpecialHapiSigsAccess= contracts.allowCreate2=true contracts.chainId=295 diff --git a/hedera-node/test-clients/src/test/java/EndToEndPackageRunner.java b/hedera-node/test-clients/src/test/java/EndToEndPackageRunner.java index b7d5f0c58c98..5886853f84f9 100644 --- a/hedera-node/test-clients/src/test/java/EndToEndPackageRunner.java +++ b/hedera-node/test-clients/src/test/java/EndToEndPackageRunner.java @@ -61,8 +61,6 @@ import com.hedera.services.bdd.suites.contract.precompile.CreatePrecompileSuite; import com.hedera.services.bdd.suites.contract.precompile.CryptoTransferHTSSuite; import com.hedera.services.bdd.suites.contract.precompile.DelegatePrecompileSuite; -import com.hedera.services.bdd.suites.contract.precompile.DissociatePrecompileSuite; -import com.hedera.services.bdd.suites.contract.precompile.MixedHTSPrecompileTestsSuite; import com.hedera.services.bdd.suites.contract.records.LogsSuite; import com.hedera.services.bdd.suites.contract.records.RecordsSuite; import com.hedera.services.bdd.suites.contract.traceability.TraceabilitySuite; @@ -272,10 +270,7 @@ Collection contractPrecompileEth() { @TestFactory Collection contractPrecompile2() { return List.of(new DynamicContainer[] { - extractSpecsFromSuite(CryptoTransferHTSSuite::new), - extractSpecsFromSuite(DelegatePrecompileSuite::new), - extractSpecsFromSuite(DissociatePrecompileSuite::new), - extractSpecsFromSuite(MixedHTSPrecompileTestsSuite::new) + extractSpecsFromSuite(CryptoTransferHTSSuite::new), extractSpecsFromSuite(DelegatePrecompileSuite::new), }); } @@ -285,7 +280,6 @@ Collection contractPrecompile2() { @TestFactory Collection contractPrecompile2Eth() { return List.of(new DynamicContainer[] { - extractSpecsFromSuiteForEth(DissociatePrecompileSuite::new), extractSpecsFromSuiteForEth(CryptoTransferHTSSuite::new), extractSpecsFromSuiteForEth(DelegatePrecompileSuite::new) }); From 65a716f28f9c720e9bae014b94c64b493a1d73b6 Mon Sep 17 00:00:00 2001 From: Iris Simon <122310714+iwsimon@users.noreply.github.com> Date: Mon, 12 Jun 2023 15:17:35 -0400 Subject: [PATCH 11/70] Implement CryptoGetAccountInfoHandler (#6995) Signed-off-by: Iris Simon Signed-off-by: Neeharika-Sompalli Co-authored-by: Neeharika-Sompalli --- .../node/app/hapi/utils/CommonUtils.java | 14 + .../node/app/hapi/utils/CommonUtilsTest.java | 8 + .../ConsensusGetTopicInfoHandler.java | 1 + .../src/main/java/module-info.java | 3 +- .../impl/ReadableStakingInfoStoreImpl.java | 54 ++ .../service/token/impl/TokenServiceImpl.java | 1 + .../handlers/CryptoGetAccountInfoHandler.java | 328 ++++++++- .../token/impl/utils/RewardCalculator.java | 28 + .../src/main/java/module-info.java | 1 + .../ReadableStakingInfoStoreImplTest.java | 78 +++ .../CryptoGetAccountInfoHandlerTest.java | 626 ++++++++++++++++++ .../token/ReadableStakingInfoStore.java | 38 ++ 12 files changed, 1177 insertions(+), 3 deletions(-) create mode 100644 hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableStakingInfoStoreImpl.java create mode 100644 hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/utils/RewardCalculator.java create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableStakingInfoStoreImplTest.java create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetAccountInfoHandlerTest.java create mode 100644 hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableStakingInfoStore.java diff --git a/hedera-node/hapi-utils/src/main/java/com/hedera/node/app/hapi/utils/CommonUtils.java b/hedera-node/hapi-utils/src/main/java/com/hedera/node/app/hapi/utils/CommonUtils.java index 89eb267dc3a4..56b2a65bcfff 100644 --- a/hedera-node/hapi-utils/src/main/java/com/hedera/node/app/hapi/utils/CommonUtils.java +++ b/hedera-node/hapi-utils/src/main/java/com/hedera/node/app/hapi/utils/CommonUtils.java @@ -61,9 +61,11 @@ import static com.hederahashgraph.api.proto.java.HederaFunctionality.TokenUpdate; import static com.hederahashgraph.api.proto.java.HederaFunctionality.UncheckedSubmit; import static com.hederahashgraph.api.proto.java.HederaFunctionality.UtilPrng; +import static java.lang.System.arraycopy; import static java.util.Objects.requireNonNull; import com.google.common.annotations.VisibleForTesting; +import com.google.common.primitives.Longs; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; import com.hedera.node.app.hapi.utils.exception.UnknownHederaFunctionality; @@ -199,4 +201,16 @@ public static HederaFunctionality functionOf(@NonNull final TransactionBody txn) default -> throw new UnknownHederaFunctionality("Unknown HederaFunctionality for " + txn); }; } + + /** + *get the EVM address from the long number + * + * @param num the input long number + * @return evm address + */ + public static byte[] asEvmAddress(final long num) { + final byte[] evmAddress = new byte[20]; + arraycopy(Longs.toByteArray(num), 0, evmAddress, 12, 8); + return evmAddress; + } } diff --git a/hedera-node/hapi-utils/src/test/java/com/hedera/node/app/hapi/utils/CommonUtilsTest.java b/hedera-node/hapi-utils/src/test/java/com/hedera/node/app/hapi/utils/CommonUtilsTest.java index f7b5edd8ea33..c14a69f0dbc8 100644 --- a/hedera-node/hapi-utils/src/test/java/com/hedera/node/app/hapi/utils/CommonUtilsTest.java +++ b/hedera-node/hapi-utils/src/test/java/com/hedera/node/app/hapi/utils/CommonUtilsTest.java @@ -16,6 +16,7 @@ package com.hedera.node.app.hapi.utils; +import static com.hedera.node.app.hapi.utils.CommonUtils.asEvmAddress; import static com.hedera.node.app.hapi.utils.CommonUtils.functionOf; import static com.hedera.node.app.hapi.utils.CommonUtils.noThrowSha384HashOf; import static com.hedera.node.app.hapi.utils.CommonUtils.productWouldOverflow; @@ -317,4 +318,11 @@ private Method getSetter(final B builder, final Class type) { .get(); } } + + @Test + void getExpectEvmAddress() { + final var address = new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 123}; + final var evmAddress = asEvmAddress(123L); + assertArrayEquals(address, evmAddress); + } } diff --git a/hedera-node/hedera-consensus-service-impl/src/main/java/com/hedera/node/app/service/consensus/impl/handlers/ConsensusGetTopicInfoHandler.java b/hedera-node/hedera-consensus-service-impl/src/main/java/com/hedera/node/app/service/consensus/impl/handlers/ConsensusGetTopicInfoHandler.java index 4d3ff31040e2..e65d91854bf0 100644 --- a/hedera-node/hedera-consensus-service-impl/src/main/java/com/hedera/node/app/service/consensus/impl/handlers/ConsensusGetTopicInfoHandler.java +++ b/hedera-node/hedera-consensus-service-impl/src/main/java/com/hedera/node/app/service/consensus/impl/handlers/ConsensusGetTopicInfoHandler.java @@ -124,6 +124,7 @@ public Response findResponse(@NonNull final QueryContext context, @NonNull final * Provides information about a topic. * @param topicID the topic to get information about * @param topicStore the topic store + * @param config the LedgerConfig * @return the information about the topic */ private Optional infoForTopic( diff --git a/hedera-node/hedera-mono-service/src/main/java/module-info.java b/hedera-node/hedera-mono-service/src/main/java/module-info.java index 33e1963d9207..c6fd465f6e91 100644 --- a/hedera-node/hedera-mono-service/src/main/java/module-info.java +++ b/hedera-node/hedera-mono-service/src/main/java/module-info.java @@ -115,7 +115,8 @@ com.hedera.node.config; exports com.hedera.node.app.service.mono.ledger.accounts.staking to com.hedera.node.config, - com.hedera.node.app; + com.hedera.node.app, + com.hedera.node.app.service.token.impl; exports com.hedera.node.app.service.mono.context.init to com.hedera.node.app; exports com.hedera.node.app.service.mono.state.initialization to diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableStakingInfoStoreImpl.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableStakingInfoStoreImpl.java new file mode 100644 index 000000000000..00c160347d4a --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/ReadableStakingInfoStoreImpl.java @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2022-2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl; + +import static com.hedera.node.app.service.token.impl.TokenServiceImpl.STAKING_INFO_KEY; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.state.token.StakingNodeInfo; +import com.hedera.node.app.service.token.ReadableStakingInfoStore; +import com.hedera.node.app.spi.state.ReadableKVState; +import com.hedera.node.app.spi.state.ReadableStates; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; + +/** + * Default implementation of {@link ReadableStakingInfoStore} + */ +public class ReadableStakingInfoStoreImpl implements ReadableStakingInfoStore { + + /** The underlying data storage class that holds the account data. */ + private final ReadableKVState stakingInfoState; + /** + * Create a new {@link ReadableStakingInfoStoreImpl} instance. + * + * @param states The state to use. + */ + public ReadableStakingInfoStoreImpl(@NonNull final ReadableStates states) { + this.stakingInfoState = states.get(STAKING_INFO_KEY); + } + + @Nullable + @Override + public StakingNodeInfo get(@NonNull final AccountID nodeId) { + return getStakingInfoLeaf(nodeId); + } + + private StakingNodeInfo getStakingInfoLeaf(final AccountID nodeId) { + return stakingInfoState.get(nodeId); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/TokenServiceImpl.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/TokenServiceImpl.java index 4b75e44a65f4..220a6c63ee98 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/TokenServiceImpl.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/TokenServiceImpl.java @@ -57,6 +57,7 @@ public class TokenServiceImpl implements TokenService { public static final String ACCOUNTS_KEY = "ACCOUNTS"; public static final String TOKEN_RELS_KEY = "TOKEN_RELS"; public static final String PAYER_RECORDS_KEY = "PAYER_RECORDS"; + public static final String STAKING_INFO_KEY = "STAKING_INFOS"; @Override public void registerSchemas(@NonNull SchemaRegistry registry) { diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountInfoHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountInfoHandler.java index caa5ea480961..67e2dd7cb522 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountInfoHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountInfoHandler.java @@ -16,20 +16,65 @@ package com.hedera.node.app.service.token.impl.handlers; +import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_DELETED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.FAIL_INVALID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; +import static com.hedera.hapi.node.base.ResponseType.ANSWER_ONLY; +import static com.hedera.hapi.node.base.ResponseType.COST_ANSWER; +import static com.hedera.hapi.node.base.TokenFreezeStatus.FREEZE_NOT_APPLICABLE; +import static com.hedera.hapi.node.base.TokenFreezeStatus.FROZEN; +import static com.hedera.hapi.node.base.TokenKycStatus.GRANTED; +import static com.hedera.hapi.node.base.TokenKycStatus.KYC_NOT_APPLICABLE; +import static com.hedera.node.app.hapi.utils.CommonUtils.asEvmAddress; +import static com.hedera.node.app.service.evm.accounts.HederaEvmContractAliases.EVM_ADDRESS_LEN; +import static com.hedera.node.app.spi.key.KeyUtils.ECDSA_SECP256K1_COMPRESSED_KEY_LENGTH; +import static com.hedera.node.app.spi.key.KeyUtils.isEmpty; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; +import static com.swirlds.common.utility.CommonUtils.hex; import static java.util.Objects.requireNonNull; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Duration; import com.hedera.hapi.node.base.HederaFunctionality; +import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.base.QueryHeader; import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.base.StakingInfo; +import com.hedera.hapi.node.base.Timestamp; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenRelationship; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.hapi.node.state.token.StakingNodeInfo; +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.state.token.TokenRelation; +import com.hedera.hapi.node.token.AccountInfo; +import com.hedera.hapi.node.token.CryptoGetInfoQuery; import com.hedera.hapi.node.token.CryptoGetInfoResponse; import com.hedera.hapi.node.transaction.Query; import com.hedera.hapi.node.transaction.Response; +import com.hedera.node.app.service.evm.utils.EthSigsUtils; +import com.hedera.node.app.service.token.ReadableAccountStore; +import com.hedera.node.app.service.token.ReadableStakingInfoStore; +import com.hedera.node.app.service.token.ReadableTokenRelationStore; +import com.hedera.node.app.service.token.ReadableTokenStore; +import com.hedera.node.app.service.token.impl.utils.RewardCalculator; import com.hedera.node.app.spi.workflows.PaidQueryHandler; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.QueryContext; +import com.hedera.node.config.data.LedgerConfig; +import com.hedera.node.config.data.TokensConfig; +import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.function.UnaryOperator; import javax.inject.Inject; import javax.inject.Singleton; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; /** * This class contains all workflow-related functionality regarding {@link @@ -37,6 +82,9 @@ */ @Singleton public class CryptoGetAccountInfoHandler extends PaidQueryHandler { + private static final int EVM_ADDRESS_SIZE = 20; + private static final Logger log = LogManager.getLogger(CryptoGetAccountInfoHandler.class); + @Inject public CryptoGetAccountInfoHandler() { // Exists for injection @@ -58,13 +106,289 @@ public Response createEmptyResponse(@NonNull final ResponseHeader header) { @Override public void validate(@NonNull final QueryContext context) throws PreCheckException { requireNonNull(context); - throw new UnsupportedOperationException("Not implemented"); + final var query = context.query(); + final var accountStore = context.createStore(ReadableAccountStore.class); + final CryptoGetInfoQuery op = query.cryptoGetInfoOrThrow(); + if (op.hasAccountID()) { + final var account = accountStore.getAccountById(requireNonNull(op.accountID())); + validateFalsePreCheck(account == null, INVALID_ACCOUNT_ID); + validateFalsePreCheck(account.deleted(), ACCOUNT_DELETED); + } else { + throw new PreCheckException(INVALID_ACCOUNT_ID); + } } @Override public Response findResponse(@NonNull final QueryContext context, @NonNull final ResponseHeader header) { requireNonNull(context); requireNonNull(header); - throw new UnsupportedOperationException("Not implemented"); + final var query = context.query(); + final var tokensConfig = context.configuration().getConfigData(TokensConfig.class); + final var ledgerConfig = context.configuration().getConfigData(LedgerConfig.class); + final var accountStore = context.createStore(ReadableAccountStore.class); + final var tokenRelationStore = context.createStore(ReadableTokenRelationStore.class); + final var tokenStore = context.createStore(ReadableTokenStore.class); + final var stakingInfoStore = context.createStore(ReadableStakingInfoStore.class); + final var op = query.cryptoGetInfoOrThrow(); + final var response = CryptoGetInfoResponse.newBuilder(); + final var accountId = op.accountIDOrElse(AccountID.DEFAULT); + + response.header(header); + final var responseType = op.headerOrElse(QueryHeader.DEFAULT).responseType(); + if (header.nodeTransactionPrecheckCode() == OK && responseType != COST_ANSWER) { + final var optionalInfo = infoForAccount( + accountId, + accountStore, + tokenStore, + tokenRelationStore, + stakingInfoStore, + tokensConfig, + ledgerConfig, + new RewardCalculator() { + @Override + public long epochSecondAtStartOfPeriod(long stakePeriod) { + return 0; + } + + @Override + public long estimatePendingRewards(Account account, @Nullable StakingNodeInfo stakingNodeInfo) { + return 0; + } + }); // remove this when we have a real reward calculator + if (optionalInfo.isPresent()) { + response.accountInfo(optionalInfo.get()); + } else { + response.header(ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(FAIL_INVALID) + .responseType(ANSWER_ONLY) + .cost(0)); + } + } + + return Response.newBuilder().cryptoGetInfo(response).build(); + } + + /** + * Provides information about an account. + * @param accountID account id + * @param accountStore the account store + * @param tokenStore the token store + * @param tokenRelationStore the token relation store + * @param readableStakingInfoStore the staking info store + * @param tokensConfig the TokensConfig + * @param ledgerConfig the LedgerConfig + * @param rewardCalculator the RewardCalculator + * @return the information about the account + */ + private Optional infoForAccount( + @NonNull final AccountID accountID, + @NonNull final ReadableAccountStore accountStore, + @NonNull final ReadableTokenStore tokenStore, + @NonNull final ReadableTokenRelationStore tokenRelationStore, + @NonNull final ReadableStakingInfoStore readableStakingInfoStore, + @NonNull final TokensConfig tokensConfig, + @NonNull final LedgerConfig ledgerConfig, + @NonNull final RewardCalculator rewardCalculator) { + requireNonNull(accountID); + requireNonNull(accountStore); + requireNonNull(tokenStore); + requireNonNull(tokenRelationStore); + requireNonNull(readableStakingInfoStore); + requireNonNull(tokensConfig); + requireNonNull(ledgerConfig); + requireNonNull(rewardCalculator); + final var account = accountStore.getAccountById(accountID); + if (account == null) { + return Optional.empty(); + } else { + final var info = AccountInfo.newBuilder(); + info.ledgerId(ledgerConfig.id()); + if (!isEmpty(account.key())) info.key(account.key()); + info.accountID(accountID); + info.receiverSigRequired(account.receiverSigRequired()); + info.deleted(account.deleted()); + info.memo(account.memo()); + info.autoRenewPeriod(Duration.newBuilder().seconds(account.autoRenewSecs())); + info.balance(account.tinybarBalance()); + info.expirationTime(Timestamp.newBuilder().seconds(account.expiry())); + info.contractAccountID(getContractAccountId(account, account.alias())); + info.ownedNfts(account.numberOwnedNfts()); + info.maxAutomaticTokenAssociations(account.maxAutoAssociations()); + info.ethereumNonce(account.ethereumNonce()); + // info.proxyAccountID(); Deprecated + info.alias(account.alias()); + info.tokenRelationships(getTokenRelationship(tokensConfig, account, tokenStore, tokenRelationStore)); + info.stakingInfo(getStakingInfo(account, rewardCalculator, readableStakingInfoStore)); + return Optional.of(info.build()); + } + } + + /** + * get TokenRelationship of an Account + * @param tokenConfig use TokenConfig to get maxRelsPerInfoQuery value + * @param account the account to be calculated from + * @param readableTokenStore readable token store + * @param tokenRelationStore token relation store + * @return ArrayList of TokenRelationship object + */ + private List getTokenRelationship( + @NonNull final TokensConfig tokenConfig, + @NonNull final Account account, + @NonNull final ReadableTokenStore readableTokenStore, + @NonNull final ReadableTokenRelationStore tokenRelationStore) { + requireNonNull(account); + requireNonNull(tokenConfig); + requireNonNull(readableTokenStore); + requireNonNull(tokenRelationStore); + final var ret = new ArrayList(); + var tokenNum = account.headTokenNumber(); + int count = 0; + TokenRelation tokenRelation; + Token token; // token from readableToken store by tokenID + TokenID tokenID; // build from tokenNum + AccountID accountID; // build from accountNumber + while (tokenNum != 0 && count < tokenConfig.maxRelsPerInfoQuery()) { + accountID = + AccountID.newBuilder().accountNum(account.accountNumber()).build(); + tokenID = TokenID.newBuilder().tokenNum(tokenNum).build(); + tokenRelation = tokenRelationStore.get(accountID, tokenID); + if (tokenRelation != null) { + token = readableTokenStore.get(tokenID); + if (token != null) { + addTokenRelation(ret, token, tokenRelation, tokenNum); + } + tokenNum = tokenRelation.nextToken(); + } else { + break; + } + count++; + } + return ret; + } + + /** + * add TokenRelationship to ArrayList + * @param ret ArrayList of TokenRelationship object + * @param token token from readableToken store by tokenID + * @param tokenRelation token relation from token relation store + * @param tokenNum token number + */ + private void addTokenRelation( + ArrayList ret, Token token, TokenRelation tokenRelation, long tokenNum) { + final var tokenRelationship = TokenRelationship.newBuilder() + .tokenId(TokenID.newBuilder().tokenNum(tokenNum).build()) + .symbol(token.symbol()) + .balance(tokenRelation.balance()) + .decimals(token.decimals()) + .kycStatus(tokenRelation.kycGranted() ? GRANTED : KYC_NOT_APPLICABLE) + .freezeStatus(tokenRelation.frozen() ? FROZEN : FREEZE_NOT_APPLICABLE) + .automaticAssociation(tokenRelation.automaticAssociation()) + .build(); + ret.add(tokenRelationship); + } + + /** + * get ContractAccountId String of an Account + * @param account the account to be calculated from + * @param alias the alias of the account + * @return String of ContractAccountId + */ + private String getContractAccountId(Account account, final Bytes alias) { + if (alias.toByteArray().length == EVM_ADDRESS_SIZE) { + return hex(alias.toByteArray()); + } + // If we can recover an Ethereum EOA address from the account key, we should return that + final var evmAddress = tryAddressRecovery(account.key(), EthSigsUtils::recoverAddressFromPubKey); + if (evmAddress != null && evmAddress.length == EVM_ADDRESS_LEN) { + return Bytes.wrap(evmAddress).toHex(); + } else { + return hex(asEvmAddress(account.accountNumber())); + } + } + + /** + * recover EVM address from account key + * @param key the key of the account + * @param addressRecovery the function to recover EVM address + * @return byte[] of EVM address + */ + public static byte[] tryAddressRecovery(@Nullable final Key key, final UnaryOperator addressRecovery) { + if (key != null && key.hasEcdsaSecp256k1()) { + // Only compressed keys are stored at the moment + final var keyBytes = key.ecdsaSecp256k1().toByteArray(); + if (keyBytes.length == ECDSA_SECP256K1_COMPRESSED_KEY_LENGTH) { + final var evmAddress = addressRecovery.apply(keyBytes); + if (evmAddress != null && evmAddress.length == EVM_ADDRESS_LEN) { + return evmAddress; + } else { + // Not ever expected, since above checks should imply a valid input to the + // LibSecp256k1 library + log.warn("Unable to recover EVM address from {}", () -> hex(keyBytes)); + } + } + } + return null; + } + + /** + * get StakingInfo of an Account + * @param account the account to be calculated from + * @param rewardCalculator the reward calculator + * @param readableStakingInfoStore readable staking info store + * @return StakingInfo object + */ + private StakingInfo getStakingInfo( + final Account account, + @NonNull final RewardCalculator rewardCalculator, + @NonNull final ReadableStakingInfoStore readableStakingInfoStore) { + final var stakingInfo = + StakingInfo.newBuilder().declineReward(account.declineReward()).stakedToMe(account.stakedToMe()); + + final var stakedNum = account.stakedNumber(); + if (stakedNum < 0) { + // Staked num for a node is (-nodeId -1) + stakingInfo.stakedNodeId(-stakedNum - 1); + addNodeStakeMeta(stakingInfo, account, rewardCalculator, readableStakingInfoStore); + } else if (stakedNum > 0) { + stakingInfo.stakedAccountId( + AccountID.newBuilder().realmNum(0).shardNum(0).accountNum(stakedNum)); + } + + return stakingInfo.build(); + } + + /** + * add staking meta to StakingInfo + * @param stakingInfo the staking info to be added to + * @param account the account to be calculated from + * @param rewardCalculator the reward calculator + * @param readableStakingInfoStore readable staking info store + * @return long of StakedNodeAddressBookId + */ + private void addNodeStakeMeta( + final StakingInfo.Builder stakingInfo, + @NonNull final Account account, + @NonNull final RewardCalculator rewardCalculator, + @NonNull final ReadableStakingInfoStore readableStakingInfoStore) { + final var startSecond = rewardCalculator.epochSecondAtStartOfPeriod(account.stakePeriodStart()); + stakingInfo.stakePeriodStart(Timestamp.newBuilder().seconds(startSecond)); + if (mayHavePendingReward(account)) { + final var stakingNodeInfo = readableStakingInfoStore.get(AccountID.newBuilder() + .accountNum(getStakedNodeAddressBookId(account)) + .build()); + final var pendingReward = rewardCalculator.estimatePendingRewards(account, stakingNodeInfo); + stakingInfo.pendingReward(pendingReward); + } + } + + private boolean mayHavePendingReward(Account account) { + return account.stakedNumber() < 0 && !account.declineReward(); + } + + private long getStakedNodeAddressBookId(Account account) { + if (account.stakedNumber() >= 0) { + throw new IllegalStateException("Account is not staked to a node"); + } + return -account.stakedNumber() - 1; } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/utils/RewardCalculator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/utils/RewardCalculator.java new file mode 100644 index 000000000000..bf52cc98159d --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/utils/RewardCalculator.java @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.utils; + +import com.hedera.hapi.node.state.token.Account; +import com.hedera.hapi.node.state.token.StakingNodeInfo; +import edu.umd.cs.findbugs.annotations.Nullable; + +public interface RewardCalculator { + // those are functions from mono RewardCalculator + long epochSecondAtStartOfPeriod(final long stakePeriod); + + long estimatePendingRewards(final Account account, @Nullable final StakingNodeInfo stakingNodeInfo); +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/module-info.java b/hedera-node/hedera-token-service-impl/src/main/java/module-info.java index edd5b18d871d..993e546be307 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/module-info.java @@ -15,6 +15,7 @@ requires com.swirlds.jasperdb; requires org.apache.commons.lang3; requires org.slf4j; + requires tuweni.bytes; provides com.hedera.node.app.service.token.TokenService with com.hedera.node.app.service.token.impl.TokenServiceImpl; diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableStakingInfoStoreImplTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableStakingInfoStoreImplTest.java new file mode 100644 index 000000000000..87c936e43281 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/ReadableStakingInfoStoreImplTest.java @@ -0,0 +1,78 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test; + +import static com.hedera.node.app.service.token.impl.TokenServiceImpl.STAKING_INFO_KEY; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.BDDMockito.given; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.state.token.StakingNodeInfo; +import com.hedera.node.app.service.token.impl.ReadableStakingInfoStoreImpl; +import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; +import com.hedera.node.app.spi.state.ReadableStates; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class ReadableStakingInfoStoreImplTest { + private static final long ACCOUNT_10 = 10L, ACCOUNT_20 = 20L; + private static final AccountID ACCOUNT_10_ID = + AccountID.newBuilder().accountNum(ACCOUNT_10).build(); + private static final AccountID ACCOUNT_20_ID = + AccountID.newBuilder().accountNum(ACCOUNT_20).build(); + + @Mock + private ReadableStates states; + + @Mock + private StakingNodeInfo tokingNodeInfo; + + private ReadableStakingInfoStoreImpl subject; + + @BeforeEach + void setUp() { + final var readableStakingNodes = MapReadableKVState.builder(STAKING_INFO_KEY) + .value(ACCOUNT_10_ID, tokingNodeInfo) + .build(); + given(states.get(STAKING_INFO_KEY)).willReturn(readableStakingNodes); + + subject = new ReadableStakingInfoStoreImpl(states); + } + + @Test + void testNullConstructorArgs() { + //noinspection DataFlowIssue + assertThrows(NullPointerException.class, () -> new ReadableStakingInfoStoreImpl(null)); + } + + @Test + void testGet() { + final var result = subject.get(ACCOUNT_10_ID); + Assertions.assertThat(result).isEqualTo(tokingNodeInfo); + } + + @Test + void testGetEmpty() { + final var result = subject.get(ACCOUNT_20_ID); + Assertions.assertThat(result).isNull(); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetAccountInfoHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetAccountInfoHandlerTest.java new file mode 100644 index 000000000000..6aaa79dc4dec --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetAccountInfoHandlerTest.java @@ -0,0 +1,626 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.FAIL_INVALID; +import static com.hedera.hapi.node.base.ResponseType.ANSWER_ONLY; +import static com.hedera.hapi.node.base.TokenFreezeStatus.FREEZE_NOT_APPLICABLE; +import static com.hedera.hapi.node.base.TokenKycStatus.KYC_NOT_APPLICABLE; +import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ACCOUNTS_KEY; +import static com.hedera.node.app.service.token.impl.TokenServiceImpl.STAKING_INFO_KEY; +import static com.hedera.node.app.service.token.impl.TokenServiceImpl.TOKENS_KEY; +import static com.hedera.node.app.service.token.impl.TokenServiceImpl.TOKEN_RELS_KEY; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThatCode; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mock.Strictness.LENIENT; +import static org.mockito.Mockito.when; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Duration; +import com.hedera.hapi.node.base.QueryHeader; +import com.hedera.hapi.node.base.ResponseCodeEnum; +import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.base.StakingInfo; +import com.hedera.hapi.node.base.Timestamp; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenRelationship; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.hapi.node.state.token.StakingNodeInfo; +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.state.token.TokenRelation; +import com.hedera.hapi.node.token.AccountInfo; +import com.hedera.hapi.node.token.CryptoGetInfoQuery; +import com.hedera.hapi.node.token.CryptoGetInfoResponse; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.node.app.service.mono.utils.EntityNum; +import com.hedera.node.app.service.mono.utils.EntityNumPair; +import com.hedera.node.app.service.token.ReadableAccountStore; +import com.hedera.node.app.service.token.ReadableStakingInfoStore; +import com.hedera.node.app.service.token.ReadableTokenRelationStore; +import com.hedera.node.app.service.token.ReadableTokenStore; +import com.hedera.node.app.service.token.impl.ReadableAccountStoreImpl; +import com.hedera.node.app.service.token.impl.ReadableStakingInfoStoreImpl; +import com.hedera.node.app.service.token.impl.ReadableTokenRelationStoreImpl; +import com.hedera.node.app.service.token.impl.ReadableTokenStoreImpl; +import com.hedera.node.app.service.token.impl.handlers.CryptoGetAccountInfoHandler; +import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoHandlerTestBase; +import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; +import com.hedera.node.app.spi.state.ReadableStates; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.spi.workflows.QueryContext; +import com.hedera.node.config.converter.BytesConverter; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import java.util.ArrayList; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class CryptoGetAccountInfoHandlerTest extends CryptoHandlerTestBase { + + @Mock(strictness = LENIENT) + private QueryContext context; + + @Mock + private Token token1, token2, token3; + @Mock + private ReadableStates readableStates1, readableStates2, readableStates3, readableStates4; + + private CryptoGetAccountInfoHandler subject; + + @Mock + private StakingNodeInfo stakingNodeInfo; + + @BeforeEach + public void setUp() { + super.setUp(); + subject = new CryptoGetAccountInfoHandler(); + } + + @Test + @DisplayName("Query header is extracted correctly") + void extractsHeader() { + final var query = createCryptoGetInfoQuery(accountNum); + final var header = subject.extractHeader(query); + final var op = query.cryptoGetInfo(); + assertEquals(op.header(), header); + } + + @Test + @DisplayName("Check empty query response is created correctly") + void createsEmptyResponse() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.FAIL_FEE) + .build(); + final var response = subject.createEmptyResponse(responseHeader); + final var expectedResponse = Response.newBuilder() + .cryptoGetInfo(CryptoGetInfoResponse.newBuilder().header(responseHeader)) + .build(); + assertEquals(expectedResponse, response); + } + + @Test + @DisplayName("Validate query is successful with valid account") + void validatesQueryWhenValidAccount() { + readableAccounts = emptyReadableAccountStateBuilder().value(id, account).build(); + given(readableStates.get(ACCOUNTS_KEY)).willReturn(readableAccounts); + readableStore = new ReadableAccountStoreImpl(readableStates); + + final var query = createCryptoGetInfoQuery(accountNum); + given(context.query()).willReturn(query); + given(context.createStore(ReadableAccountStore.class)).willReturn(readableStore); + + assertThatCode(() -> subject.validate(context)).doesNotThrowAnyException(); + } + + @Test + @DisplayName("Empty account failed during validate") + void validatesQueryIfEmptyAccount() throws Throwable { + final var state = + MapReadableKVState.builder(ACCOUNTS_KEY).build(); + given(readableStates.get(ACCOUNTS_KEY)).willReturn(state); + final var store = new ReadableAccountStoreImpl(readableStates); + + final var query = createEmptyCryptoGetInfoQuery(); + + when(context.query()).thenReturn(query); + when(context.createStore(ReadableAccountStore.class)).thenReturn(store); + + assertThatThrownBy(() -> subject.validate(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_ACCOUNT_ID)); + } + + @Test + @DisplayName("Account Id is needed during validate") + void validatesQueryIfInvalidAccount() throws Throwable { + final var state = + MapReadableKVState.builder(ACCOUNTS_KEY).build(); + given(readableStates.get(ACCOUNTS_KEY)).willReturn(state); + final var store = new ReadableAccountStoreImpl(readableStates); + + final var query = createCryptoGetInfoQuery(accountNum); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableAccountStore.class)).thenReturn(store); + + assertThatThrownBy(() -> subject.validate(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_ACCOUNT_ID)); + } + + @Test + @DisplayName("deleted account is not valid") + void validatesQueryIfDeletedAccount() throws Throwable { + deleteAccount = deleteAccount.copyBuilder().deleted(true).build(); + readableAccounts = emptyReadableAccountStateBuilder() + .value(deleteAccountId, deleteAccount) + .build(); + given(readableStates.get(ACCOUNTS_KEY)).willReturn(readableAccounts); + readableStore = new ReadableAccountStoreImpl(readableStates); + + final var query = createCryptoGetInfoQuery(deleteAccountNum); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableAccountStore.class)).thenReturn(readableStore); + + assertThatThrownBy(() -> subject.validate(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.ACCOUNT_DELETED)); + } + + @Test + @DisplayName("failed response is correctly handled in findResponse") + void getsResponseIfFailedResponse() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.INVALID_ACCOUNT_ID) + .build(); + + final var query = createCryptoGetInfoQuery(accountNum); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableAccountStore.class)).thenReturn(readableStore); + + setupConfig(); + + final var response = subject.findResponse(context, responseHeader); + final var op = response.cryptoGetInfo(); + assertEquals(ResponseCodeEnum.INVALID_ACCOUNT_ID, op.header().nodeTransactionPrecheckCode()); + } + + @Test + @DisplayName("fail FAIL_INVALID test") + void getsCorrectResponseHeadIfAccountInfoNotFound() { + final var responseHeader = getOkResponse(); + + setupAccountStore(); + setupTokenStore(); + setupTokenRelationStore(); + setupStakingInfoStore(); + setupConfig(); + + final var query = createCryptoGetInfoQuery(4); + when(context.query()).thenReturn(query); + + final var response = subject.findResponse(context, responseHeader); + final var cryptoGetInfoResponse = response.cryptoGetInfo(); + assertEquals(getFailInvalidResponse(), cryptoGetInfoResponse.header()); + } + + @Test + @DisplayName("OK response is correctly handled in findResponse") + void getsResponseIfOkResponse() { + final var responseHeader = getOkResponse(); + final var expectedInfo = getExpectedAccountInfo(); + + account = account.copyBuilder().stakedNumber(-1).declineReward(false).build(); + setupAccountStore(); + + given(token1.decimals()).willReturn(100); + given(token1.symbol()).willReturn("FOO"); + given(token1.tokenNumber()).willReturn(3L); + setupTokenStore(token1); + + final var tokenRelation = TokenRelation.newBuilder() + .tokenNumber(3L) + .accountNumber(accountNum) + .balance(1000L) + .frozen(false) + .kycGranted(false) + .deleted(false) + .automaticAssociation(true) + .nextToken(4L) + .previousToken(2L) + .build(); + setupTokenRelationStore(tokenRelation); + setupStakingInfoStore(); + setupConfig(); + final var query = createCryptoGetInfoQuery(accountNum); + when(context.query()).thenReturn(query); + + final var response = subject.findResponse(context, responseHeader); + final var cryptoGetInfoResponse = response.cryptoGetInfo(); + assertEquals(ResponseCodeEnum.OK, cryptoGetInfoResponse.header().nodeTransactionPrecheckCode()); + assertEquals(expectedInfo, cryptoGetInfoResponse.accountInfo()); + } + + @Test + @DisplayName("check multiple token relations list") + void checkMulitpleTokenRelations() { + final var responseHeader = getOkResponse(); + final var expectedInfo = getExpectedAccountInfos(); + + account = account.copyBuilder().stakedNumber(-1).declineReward(false).build(); + setupAccountStore(); + + given(token1.decimals()).willReturn(100); + given(token2.decimals()).willReturn(50); + given(token1.symbol()).willReturn("FOO"); + given(token2.symbol()).willReturn("BAR"); + given(token1.tokenNumber()).willReturn(3L); + given(token2.tokenNumber()).willReturn(4L); + setupTokenStore(token1, token2); + + final var tokenRelation1 = TokenRelation.newBuilder() + .tokenNumber(3L) + .accountNumber(accountNum) + .balance(1000L) + .frozen(false) + .kycGranted(false) + .deleted(false) + .automaticAssociation(true) + .nextToken(4L) + .previousToken(2L) + .build(); + final var tokenRelation2 = TokenRelation.newBuilder() + .tokenNumber(4L) + .accountNumber(accountNum) + .balance(100L) + .frozen(false) + .kycGranted(false) + .deleted(false) + .automaticAssociation(true) + .nextToken(5L) + .previousToken(3L) + .build(); + final var tokenRelation3 = TokenRelation.newBuilder() + .tokenNumber(5L) + .accountNumber(accountNum) + .balance(10L) + .frozen(false) + .kycGranted(false) + .deleted(false) + .automaticAssociation(true) + .nextToken(6L) + .previousToken(4L) + .build(); + setupTokenRelationStore(tokenRelation1, tokenRelation2, tokenRelation3); + setupStakingInfoStore(); + setupConfig(); + + final var query = createCryptoGetInfoQuery(accountNum); + when(context.query()).thenReturn(query); + + final var response = subject.findResponse(context, responseHeader); + final var cryptoGetInfoResponse = response.cryptoGetInfo(); + + assertEquals(ResponseCodeEnum.OK, cryptoGetInfoResponse.header().nodeTransactionPrecheckCode()); + assertEquals(expectedInfo, cryptoGetInfoResponse.accountInfo()); + assertEquals(2, cryptoGetInfoResponse.accountInfo().tokenRelationships().size()); + } + + @Test + void testStakeNumber() { + final var responseHeader = getOkResponse(); + final var expectedInfo = getExpectedAccountInfo2(); + + account = account.copyBuilder().stakedNumber(1).declineReward(false).build(); + setupAccountStore(); + + given(token1.decimals()).willReturn(100); + given(token1.symbol()).willReturn("FOO"); + given(token1.tokenNumber()).willReturn(3L); + setupTokenStore(token1); + + final var tokenRelation = TokenRelation.newBuilder() + .tokenNumber(3L) + .accountNumber(accountNum) + .balance(1000L) + .frozen(false) + .kycGranted(false) + .deleted(false) + .automaticAssociation(true) + .nextToken(4L) + .previousToken(2L) + .build(); + setupTokenRelationStore(tokenRelation); + setupStakingInfoStore(); + setupConfig(); + final var query = createCryptoGetInfoQuery(accountNum); + when(context.query()).thenReturn(query); + + final var response = subject.findResponse(context, responseHeader); + final var cryptoGetInfoResponse = response.cryptoGetInfo(); + assertEquals(ResponseCodeEnum.OK, cryptoGetInfoResponse.header().nodeTransactionPrecheckCode()); + assertEquals(expectedInfo, cryptoGetInfoResponse.accountInfo()); + } + + @Test + void testEvmAddressAlias() { + final var responseHeader = getOkResponse(); + final var expectedInfo = getExpectedAccountInfoEvm(); + + account = account.copyBuilder() + .stakedNumber(-1) + .declineReward(false) + .alias(Bytes.wrap(evmAddress)) + .build(); + setupAccountStore(); + + given(token1.decimals()).willReturn(100); + given(token1.symbol()).willReturn("FOO"); + given(token1.tokenNumber()).willReturn(3L); + setupTokenStore(token1); + + final var tokenRelation = TokenRelation.newBuilder() + .tokenNumber(3L) + .accountNumber(accountNum) + .balance(1000L) + .frozen(false) + .kycGranted(false) + .deleted(false) + .automaticAssociation(true) + .nextToken(4L) + .previousToken(2L) + .build(); + setupTokenRelationStore(tokenRelation); + setupStakingInfoStore(); + setupConfig(); + final var query = createCryptoGetInfoQuery(accountNum); + when(context.query()).thenReturn(query); + + final var response = subject.findResponse(context, responseHeader); + final var cryptoGetInfoResponse = response.cryptoGetInfo(); + assertEquals(ResponseCodeEnum.OK, cryptoGetInfoResponse.header().nodeTransactionPrecheckCode()); + assertEquals(expectedInfo, cryptoGetInfoResponse.accountInfo()); + } + + private void setupAccountStore() { + final var readableAccounts = MapReadableKVState.builder(ACCOUNTS_KEY) + .value(id, account) + .build(); + given(readableStates1.get(ACCOUNTS_KEY)).willReturn(readableAccounts); + ReadableAccountStore ReadableAccountStore = new ReadableAccountStoreImpl(readableStates1); + when(context.createStore(ReadableAccountStore.class)).thenReturn(ReadableAccountStore); + } + + private void setupTokenStore(Token... tokens) { + final var readableToken = MapReadableKVState.builder(TOKENS_KEY); + for (Token token : tokens) { + readableToken.value(EntityNum.fromLong(token.tokenNumber()), token); + } + given(readableStates2.get(TOKENS_KEY)).willReturn(readableToken.build()); + final var readableTokenStore = new ReadableTokenStoreImpl(readableStates2); + when(context.createStore(ReadableTokenStore.class)).thenReturn(readableTokenStore); + } + + private void setupTokenRelationStore(TokenRelation... tokenRelations) { + final var readableTokenRel = MapReadableKVState.builder(TOKEN_RELS_KEY); + for (TokenRelation tokenRelation : tokenRelations) { + readableTokenRel.value(EntityNumPair.fromLongs(accountNum, tokenRelation.tokenNumber()), tokenRelation); + } + given(readableStates3.get(TOKEN_RELS_KEY)).willReturn(readableTokenRel.build()); + final var readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates3); + when(context.createStore(ReadableTokenRelationStore.class)).thenReturn(readableTokenRelStore); + } + + private void setupStakingInfoStore() { + final var readableStakingNodes = MapReadableKVState.builder(STAKING_INFO_KEY) + .value(id, stakingNodeInfo) + .build(); + given(readableStates4.get(STAKING_INFO_KEY)).willReturn(readableStakingNodes); + final var readableStakingInfoStore = new ReadableStakingInfoStoreImpl(readableStates4); + when(context.createStore(ReadableStakingInfoStore.class)).thenReturn(readableStakingInfoStore); + } + + private void setupConfig() { + final var config = new HederaTestConfigBuilder() + .withValue("tokens.maxRelsPerInfoQuery", 2) + .withValue("ledger.id", "0x03") + .getOrCreateConfig(); + given(context.configuration()).willReturn(config); + } + + private AccountInfo getExpectedAccountInfo() { + return AccountInfo.newBuilder() + .key(key) + .accountID(id) + .receiverSigRequired(true) + .ledgerId(new BytesConverter().convert("0x03")) + .deleted(false) + .memo("testAccount") + .autoRenewPeriod(Duration.newBuilder().seconds(72000)) + .balance(payerBalance) + .expirationTime(Timestamp.newBuilder().seconds(1_234_567L)) + .ownedNfts(2) + .maxAutomaticTokenAssociations(10) + .ethereumNonce(0) + .alias(alias.alias()) + .contractAccountID("0000000000000000000000000000000000000003") + .tokenRelationships(getExpectedTokenRelationship()) + .stakingInfo(getExpectedStakingInfo()) + .build(); + } + + private AccountInfo getExpectedAccountInfo2() { + return AccountInfo.newBuilder() + .key(key) + .accountID(id) + .receiverSigRequired(true) + .ledgerId(new BytesConverter().convert("0x03")) + .deleted(false) + .memo("testAccount") + .autoRenewPeriod(Duration.newBuilder().seconds(72000)) + .balance(payerBalance) + .expirationTime(Timestamp.newBuilder().seconds(1_234_567L)) + .ownedNfts(2) + .maxAutomaticTokenAssociations(10) + .ethereumNonce(0) + .alias(alias.alias()) + .contractAccountID("0000000000000000000000000000000000000003") + .tokenRelationships(getExpectedTokenRelationship()) + .stakingInfo(getExpectedStakingInfo2()) + .build(); + } + + private AccountInfo getExpectedAccountInfoEvm() { + return AccountInfo.newBuilder() + .key(key) + .accountID(id) + .receiverSigRequired(true) + .ledgerId(new BytesConverter().convert("0x03")) + .deleted(false) + .memo("testAccount") + .autoRenewPeriod(Duration.newBuilder().seconds(72000)) + .balance(payerBalance) + .expirationTime(Timestamp.newBuilder().seconds(1_234_567L)) + .ownedNfts(2) + .maxAutomaticTokenAssociations(10) + .ethereumNonce(0) + .alias(Bytes.wrap(evmAddress)) + .contractAccountID("6aea3773ea468a814d954e6dec795bfee7d76e25") + .tokenRelationships(getExpectedTokenRelationship()) + .stakingInfo(getExpectedStakingInfo()) + .build(); + } + + private AccountInfo getExpectedAccountInfos() { + return AccountInfo.newBuilder() + .key(key) + .accountID(id) + .receiverSigRequired(true) + .ledgerId(new BytesConverter().convert("0x03")) + .deleted(false) + .memo("testAccount") + .autoRenewPeriod(Duration.newBuilder().seconds(72000)) + .balance(payerBalance) + .expirationTime(Timestamp.newBuilder().seconds(1_234_567L)) + .ownedNfts(2) + .maxAutomaticTokenAssociations(10) + .ethereumNonce(0) + .alias(alias.alias()) + .contractAccountID("0000000000000000000000000000000000000003") + .tokenRelationships(getExpectedTokenRelationships()) + .stakingInfo(getExpectedStakingInfo()) + .build(); + } + + private List getExpectedTokenRelationship() { + var ret = new ArrayList(); + final var tokenRelationship1 = TokenRelationship.newBuilder() + .tokenId(TokenID.newBuilder().tokenNum(3L).build()) + .symbol("FOO") + .balance(1000) + .decimals(100) + .kycStatus(KYC_NOT_APPLICABLE) + .freezeStatus(FREEZE_NOT_APPLICABLE) + .automaticAssociation(true) + .build(); + ret.add(tokenRelationship1); + return ret; + } + + private List getExpectedTokenRelationships() { + var ret = new ArrayList(); + final var tokenRelationship1 = TokenRelationship.newBuilder() + .tokenId(TokenID.newBuilder().tokenNum(3L).build()) + .symbol("FOO") + .balance(1000) + .decimals(100) + .kycStatus(KYC_NOT_APPLICABLE) + .freezeStatus(FREEZE_NOT_APPLICABLE) + .automaticAssociation(true) + .build(); + final var tokenRelationship2 = TokenRelationship.newBuilder() + .tokenId(TokenID.newBuilder().tokenNum(4L).build()) + .symbol("BAR") + .balance(100) + .decimals(50) + .kycStatus(KYC_NOT_APPLICABLE) + .freezeStatus(FREEZE_NOT_APPLICABLE) + .automaticAssociation(true) + .build(); + + ret.add(tokenRelationship1); + ret.add(tokenRelationship2); + return ret; + } + + private StakingInfo getExpectedStakingInfo() { + return StakingInfo.newBuilder() + .declineReward(false) + .stakedToMe(1_234L) + .stakedNodeId(0) + .stakePeriodStart(Timestamp.newBuilder().seconds(0)) + .build(); + } + + private StakingInfo getExpectedStakingInfo2() { + return StakingInfo.newBuilder() + .declineReward(false) + .stakedToMe(1_234L) + .stakedAccountId(AccountID.newBuilder().accountNum(1).build()) + .build(); + } + + private ResponseHeader getFailInvalidResponse() { + return ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(FAIL_INVALID) + .responseType(ANSWER_ONLY) + .cost(0) + .build(); + } + + private Query createCryptoGetInfoQuery(final long accountId) { + final var data = CryptoGetInfoQuery.newBuilder() + .accountID(AccountID.newBuilder().accountNum(accountId).build()) + .header(QueryHeader.newBuilder().build()) + .build(); + + return Query.newBuilder().cryptoGetInfo(data).build(); + } + + private Query createEmptyCryptoGetInfoQuery() { + final var data = CryptoGetInfoQuery.newBuilder() + .header(QueryHeader.newBuilder().build()) + .build(); + + return Query.newBuilder().cryptoGetInfo(data).build(); + } + + private ResponseHeader getOkResponse() { + return ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.OK) + .build(); + } +} diff --git a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableStakingInfoStore.java b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableStakingInfoStore.java new file mode 100644 index 000000000000..b2a56df24b69 --- /dev/null +++ b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/ReadableStakingInfoStore.java @@ -0,0 +1,38 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.state.token.StakingNodeInfo; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; + +/** + * Provides read-only methods for interacting with the underlying data storage mechanisms for + * working with node stakimg infos. + */ +public interface ReadableStakingInfoStore { + /** + * Fetches a {@link StakingNodeInfo} object from state with the given {@link AccountID}. If the node could not be + * fetched because the given node doesn't exist, returns {@code null}. + * + * @param nodeId given node account id + * @return {@link StakingNodeInfo} object if successfully fetched or {@code null} if the node doesn't exist + */ + @Nullable + StakingNodeInfo get(@NonNull final AccountID nodeId); +} From d598da9fa81ac708baaf9219c01ae43ffc0673c9 Mon Sep 17 00:00:00 2001 From: Cody Littley <56973212+cody-littley@users.noreply.github.com> Date: Mon, 12 Jun 2023 14:37:02 -0500 Subject: [PATCH 12/70] Make consistency testing tool play nicely with local testing. (#7057) Signed-off-by: Cody Littley --- .../consistency/ConsistencyTestingToolConfig.java | 5 +++-- .../consistency/ConsistencyTestingToolState.java | 13 ++++++++++++- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/platform-sdk/platform-apps/tests/ConsistencyTestingTool/src/main/java/com/swirlds/demo/consistency/ConsistencyTestingToolConfig.java b/platform-sdk/platform-apps/tests/ConsistencyTestingTool/src/main/java/com/swirlds/demo/consistency/ConsistencyTestingToolConfig.java index aa98235719ba..0f05ff3eccb1 100644 --- a/platform-sdk/platform-apps/tests/ConsistencyTestingTool/src/main/java/com/swirlds/demo/consistency/ConsistencyTestingToolConfig.java +++ b/platform-sdk/platform-apps/tests/ConsistencyTestingTool/src/main/java/com/swirlds/demo/consistency/ConsistencyTestingToolConfig.java @@ -22,8 +22,9 @@ /** * Config for consistency testing tool * - * @param logfileName the name of the log file + * @param logfileDirectory the directory where consistency information is stored, relative to + * {@link com.swirlds.common.config.StateConfig#savedStateDirectory()}. */ @ConfigData("consistencyTestingTool") public record ConsistencyTestingToolConfig( - @ConfigProperty(defaultValue = "ConsistencyTestLog.csv") String logfileName) {} + @ConfigProperty(defaultValue = "consistency-test") String logfileDirectory) {} diff --git a/platform-sdk/platform-apps/tests/ConsistencyTestingTool/src/main/java/com/swirlds/demo/consistency/ConsistencyTestingToolState.java b/platform-sdk/platform-apps/tests/ConsistencyTestingTool/src/main/java/com/swirlds/demo/consistency/ConsistencyTestingToolState.java index 947b4af5fc6c..64403275f489 100644 --- a/platform-sdk/platform-apps/tests/ConsistencyTestingTool/src/main/java/com/swirlds/demo/consistency/ConsistencyTestingToolState.java +++ b/platform-sdk/platform-apps/tests/ConsistencyTestingTool/src/main/java/com/swirlds/demo/consistency/ConsistencyTestingToolState.java @@ -35,6 +35,8 @@ import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Files; import java.nio.file.Path; import java.util.Objects; import org.apache.logging.log4j.LogManager; @@ -102,7 +104,16 @@ public void init( final ConsistencyTestingToolConfig testingToolConfig = platform.getContext().getConfiguration().getConfigData(ConsistencyTestingToolConfig.class); - final Path logFilePath = stateConfig.savedStateDirectory().resolve(testingToolConfig.logfileName()); + final Path logFileDirectory = stateConfig + .savedStateDirectory() + .resolve(testingToolConfig.logfileDirectory()) + .resolve(Long.toString(platform.getSelfId().id())); + try { + Files.createDirectories(logFileDirectory); + } catch (final IOException e) { + throw new UncheckedIOException("unable to set up file system for consistency data", e); + } + final Path logFilePath = logFileDirectory.resolve("ConsistencyTestLog.csv"); transactionHandlingHistory.init(logFilePath); } From 368edd2969a7efda6019e86710ac3ca6957a0907 Mon Sep 17 00:00:00 2001 From: Ivan Malygin Date: Mon, 12 Jun 2023 16:25:23 -0400 Subject: [PATCH 13/70] 07046 Temporarily disabled tests to unblock other team and buy some time for the investigation (#7056) Signed-off-by: Ivan Malygin --- .../test/java/com/swirlds/merkledb/MerkleDbSnapshotTest.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/platform-sdk/swirlds-jasperdb/src/test/java/com/swirlds/merkledb/MerkleDbSnapshotTest.java b/platform-sdk/swirlds-jasperdb/src/test/java/com/swirlds/merkledb/MerkleDbSnapshotTest.java index 87a4b0d9fca4..9c38a4b851cd 100644 --- a/platform-sdk/swirlds-jasperdb/src/test/java/com/swirlds/merkledb/MerkleDbSnapshotTest.java +++ b/platform-sdk/swirlds-jasperdb/src/test/java/com/swirlds/merkledb/MerkleDbSnapshotTest.java @@ -39,6 +39,7 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; class MerkleDbSnapshotTest { @@ -86,6 +87,8 @@ private void verify(final MerkleInternal stateRoot) { } @Test + // FUTURE WORK: https://github.com/hashgraph/hedera-services/issues/7046 + @Disabled void snapshotMultipleTablesTestSync() throws Exception { final MerkleInternal initialRoot = new TestInternalNode(); final MerkleDbTableConfig tableConfig = fixedConfig(); @@ -138,6 +141,8 @@ void snapshotMultipleTablesTestSync() throws Exception { } @Test + // FUTURE WORK: https://github.com/hashgraph/hedera-services/issues/7046 + @Disabled void snapshotMultipleTablesTestAsync() throws Exception { final MerkleInternal initialRoot = new TestInternalNode(); final MerkleDbTableConfig tableConfig = fixedConfig(); From 377c02bf15df18210e9491b7d9f85cea03442d42 Mon Sep 17 00:00:00 2001 From: Cody Littley <56973212+cody-littley@users.noreply.github.com> Date: Mon, 12 Jun 2023 15:50:21 -0500 Subject: [PATCH 14/70] 07061 sync test flake (#7062) Signed-off-by: Cody Littley --- .../src/test/java/com/swirlds/platform/DummyHashgraph.java | 6 ++++-- .../test/java/com/swirlds/platform/SyncManagerTest.java | 7 +++++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/DummyHashgraph.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/DummyHashgraph.java index a9b9d1c4f9ca..7e6b311885c1 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/DummyHashgraph.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/DummyHashgraph.java @@ -20,6 +20,8 @@ import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.test.RandomAddressBookGenerator; import java.util.HashMap; +import java.util.Random; +import org.checkerframework.checker.nullness.qual.NonNull; public class DummyHashgraph { @@ -30,12 +32,12 @@ public class DummyHashgraph { public long lastRoundReceivedAllTransCons; public AddressBook addressBook; - DummyHashgraph(final int selfIndex) { + DummyHashgraph(@NonNull final Random random, final int selfIndex) { eventIntakeQueueSize = 0; isInCriticalQuorum = new HashMap<>(); numUserTransEvents = 0; lastRoundReceivedAllTransCons = 0; - addressBook = new RandomAddressBookGenerator().setSize(100).build(); + addressBook = new RandomAddressBookGenerator(random).setSize(100).build(); this.selfId = addressBook.getNodeId(selfIndex); } diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SyncManagerTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SyncManagerTest.java index 69456633297a..fe0e22cfc7c9 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SyncManagerTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SyncManagerTest.java @@ -19,6 +19,7 @@ import static com.swirlds.common.system.EventCreationRuleResponse.CREATE; import static com.swirlds.common.system.EventCreationRuleResponse.DONT_CREATE; import static com.swirlds.common.system.EventCreationRuleResponse.PASS; +import static com.swirlds.common.test.RandomUtils.getRandomPrintSeed; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; @@ -46,6 +47,7 @@ import com.swirlds.platform.state.SwirldStateManager; import com.swirlds.test.framework.config.TestConfigBuilder; import java.util.List; +import java.util.Random; import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; @@ -79,7 +81,8 @@ public SyncManagerTestData() { public SyncManagerTestData(final SwirldStateManager swirldStateManager) { freezeManager = mock(FreezeManager.class); startUpEventFrozenManager = mock(StartUpEventFrozenManager.class); - hashgraph = new DummyHashgraph(0); + final Random random = getRandomPrintSeed(); + hashgraph = new DummyHashgraph(random, 0); eventTransactionPool = spy(new EventTransactionPool(new NoOpMetrics(), null, null)); this.swirldStateManager = swirldStateManager; @@ -176,7 +179,7 @@ void basicTest() { } // add the report that will go over the [fallenBehindThreshold] - test.syncManager.reportFallenBehind(new NodeId(neighbors[10])); + test.syncManager.reportFallenBehind(test.addressBook.getNodeId(neighbors[10])); // we should now say we have fallen behind assertTrue(test.syncManager.hasFallenBehind()); From f57aa2525e9414b5b730da9fba774188709df4b5 Mon Sep 17 00:00:00 2001 From: Lazar Petrovic Date: Tue, 13 Jun 2023 11:26:47 +0200 Subject: [PATCH 15/70] Validate manifest file (#6842) Signed-off-by: Lazar Petrovic Signed-off-by: Quan Nguyen Co-authored-by: Quan Nguyen --- platform-sdk/settings.gradle.kts | 370 ++++++++++-------- .../java/com/swirlds/platform/Browser.java | 2 +- .../swirlds/platform/SavedStateLoader.java | 2 +- .../com/swirlds/platform/SwirldsPlatform.java | 2 +- .../cli/ValidateManifestFileCommand.java | 52 +++ .../platform/gossip/GossipFactory.java | 2 +- .../gossip/chatter/ChatterGossip.java | 2 +- .../platform/gossip/sync/SyncGossip.java | 2 +- .../emergency/EmergencyReconnectLearner.java | 2 +- .../emergency/EmergencyReconnectProtocol.java | 2 +- .../EmergencySignedStateValidator.java | 2 +- .../EmergencyRecoveryManager.java | 3 +- .../recovery/EventRecoveryWorkflow.java | 2 +- .../recovery/emergencyfile/Bootstrap.java | 34 ++ .../emergencyfile}/EmergencyRecoveryFile.java | 67 ++-- .../recovery/emergencyfile/Intervals.java | 25 ++ .../recovery/emergencyfile/Location.java | 37 ++ .../recovery/emergencyfile/Package.java | 26 ++ .../recovery/emergencyfile/Recovery.java | 38 ++ .../recovery/emergencyfile/State.java | 44 +++ .../recovery/emergencyfile/Stream.java | 25 ++ .../state/signed/SignedStateFileWriter.java | 2 +- .../src/main/java/module-info.java | 1 + .../platform/SavedStateLoaderTests.java | 4 +- .../EmergencyReconnectProtocolTests.java | 4 +- .../emergency/EmergencyReconnectTests.java | 4 +- .../EmergencySignedStateValidatorTests.java | 2 +- .../recovery/EventRecoveryWorkflowTests.java | 8 +- .../state/EmergencyRecoveryFileTests.java | 81 +++- .../invalid/emergencyRecovery.yaml | 22 ++ .../valid/emergencyRecovery.yaml | 23 ++ 31 files changed, 648 insertions(+), 244 deletions(-) create mode 100644 platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/cli/ValidateManifestFileCommand.java rename platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/{state => recovery}/EmergencyRecoveryManager.java (96%) create mode 100644 platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Bootstrap.java rename platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/{state => recovery/emergencyfile}/EmergencyRecoveryFile.java (67%) create mode 100644 platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Intervals.java create mode 100644 platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Location.java create mode 100644 platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Package.java create mode 100644 platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Recovery.java create mode 100644 platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/State.java create mode 100644 platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Stream.java create mode 100644 platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/recovery/emergencyfile/invalid/emergencyRecovery.yaml create mode 100644 platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/recovery/emergencyfile/valid/emergencyRecovery.yaml diff --git a/platform-sdk/settings.gradle.kts b/platform-sdk/settings.gradle.kts index 277dbcbc728c..35063f629648 100644 --- a/platform-sdk/settings.gradle.kts +++ b/platform-sdk/settings.gradle.kts @@ -85,181 +85,205 @@ include(":swirlds-unit-tests:core:swirlds-platform-test") include(":swirlds-unit-tests:structures:swirlds-merkle-test") dependencyResolutionManagement { - @Suppress("UnstableApiUsage") - versionCatalogs { - // The libs of this catalog are the **ONLY** ones that are authorized to be part of the runtime - // distribution. These libs can be depended on during compilation, or bundled as part of - // runtime. - create("libs") { - // Define the approved version numbers - // Third-party dependency versions - - // Cryptography Libraries - version("lazysodium-version", "5.1.1") - version("bouncycastle-version", "1.70") - - // Apache Commons - version("commons-lang3-version", "3.12.0") - version("commons-io-version", "2.11.0") - version("commons-codec-version", "1.15") - version("commons-math3-version", "3.6.1") - version("commons-collections4-version", "4.4") - - // Eclipse Commons - version("eclipse-collections-version", "10.4.0") - - // Classgraph - version("classgraph-version", "4.8.65") - - // Logging - version("slf4j-version", "2.0.0") - version("log4j-version", "2.17.2") - - // Parsers - version("jackson-version", "2.13.3") - - // Network - version("portmapper-version", "2.0.4") - - // JavaFX - version("javafx-version", "17") - - // JNI - version("resource-loader-version", "2.0.1") - version("jna-version", "5.12.1") - - // Protobuf - version("protobuf-version", "3.21.5") - - // Prometheus Java client - version("prometheus-client", "0.16.0") - - // PicoCLI - version("picocli-version", "4.6.3") - - version("spotbugs-version", "4.7.3") - - // List of bundles provided for us. When applicable, favor using these over individual - // libraries. - bundle("eclipse", listOf("eclipse-collections")) - bundle("cryptography-core", listOf("lazysodium", "bc-provider", "bc-pkix")) - bundle("cryptography-runtime", listOf("jna", "resource-loader")) - bundle("logging-api", listOf("log4j-api", "slf4j-api")) - bundle("logging-impl", listOf("log4j-core", "slf4j-nop")) - bundle( - "jackson", - listOf("jackson-databind", "jackson-datatype-jsr310", "jackson-dataformat-yaml")) - bundle("networking", listOf("portmapper")) - bundle("javafx", listOf("javafx-base")) - bundle("picocli", listOf("picocli")) - - // Define the individual libraries - // Commons Bundle - library("commons-lang3", "org.apache.commons", "commons-lang3") - .versionRef("commons-lang3-version") - library("commons-io", "commons-io", "commons-io").versionRef("commons-io-version") - library("commons-codec", "commons-codec", "commons-codec").versionRef("commons-codec-version") - library("commons-math3", "org.apache.commons", "commons-math3") - .versionRef("commons-math3-version") - library("commons-collections4", "org.apache.commons", "commons-collections4") - .versionRef("commons-collections4-version") - // Eclipse Bundle - library("eclipse-collections", "org.eclipse.collections", "eclipse-collections") - .versionRef("eclipse-collections-version") - // Cryptography Bundle - library("bc-provider", "org.bouncycastle", "bcprov-jdk15on") - .versionRef("bouncycastle-version") - library("bc-pkix", "org.bouncycastle", "bcpkix-jdk15on").versionRef("bouncycastle-version") - library("lazysodium", "com.goterl", "lazysodium-java").versionRef("lazysodium-version") - // Log4j Bundle - library("log4j-api", "org.apache.logging.log4j", "log4j-api").versionRef("log4j-version") - library("log4j-core", "org.apache.logging.log4j", "log4j-core").versionRef("log4j-version") - // Slf4j Bundle - library("slf4j-api", "org.slf4j", "slf4j-api").versionRef("slf4j-version") - library("slf4j-nop", "org.slf4j", "slf4j-nop").versionRef("slf4j-version") - // Jackson Bundle - library("jackson-databind", "com.fasterxml.jackson.core", "jackson-databind") - .versionRef("jackson-version") - library("jackson-datatype-joda", "com.fasterxml.jackson.datatype", "jackson-datatype-joda") - .versionRef("jackson-version") - library("jackson-datatype-guava", "com.fasterxml.jackson.datatype", "jackson-datatype-guava") - .versionRef("jackson-version") - library("jackson-datatype-jdk8", "com.fasterxml.jackson.datatype", "jackson-datatype-jdk8") - .versionRef("jackson-version") - library( - "jackson-datatype-jsr310", - "com.fasterxml.jackson.datatype", - "jackson-datatype-jsr310") - .versionRef("jackson-version") - library( - "jackson-dataformat-yaml", - "com.fasterxml.jackson.dataformat", - "jackson-dataformat-yaml") - .versionRef("jackson-version") - // Networking Bundle - library("portmapper", "com.offbynull.portmapper", "portmapper") - .versionRef("portmapper-version") - // JavaFX Bundle - library("javafx-base", "org.openjfx", "javafx-base").versionRef("javafx-version") - // Misc - library("classgraph", "io.github.classgraph", "classgraph").versionRef("classgraph-version") - library("jna", "net.java.dev.jna", "jna").versionRef("jna-version") - library("resource-loader", "com.goterl", "resource-loader") - .versionRef("resource-loader-version") - library("protobuf", "com.google.protobuf", "protobuf-java").versionRef("protobuf-version") - library("prometheus-httpserver", "io.prometheus", "simpleclient_httpserver") - .versionRef("prometheus-client") - // PicoCLI Bundle - library("picocli", "info.picocli", "picocli").versionRef("picocli-version") - - library("spotbugs-annotations", "com.github.spotbugs", "spotbugs-annotations") - .versionRef("spotbugs-version") + @Suppress("UnstableApiUsage") + versionCatalogs { + // The libs of this catalog are the **ONLY** ones that are authorized to be part of the + // runtime + // distribution. These libs can be depended on during compilation, or bundled as part of + // runtime. + create("libs") { + // Define the approved version numbers + // Third-party dependency versions + + // Cryptography Libraries + version("lazysodium-version", "5.1.1") + version("bouncycastle-version", "1.70") + + // Apache Commons + version("commons-lang3-version", "3.12.0") + version("commons-io-version", "2.11.0") + version("commons-codec-version", "1.15") + version("commons-math3-version", "3.6.1") + version("commons-collections4-version", "4.4") + + // Eclipse Commons + version("eclipse-collections-version", "10.4.0") + + // Classgraph + version("classgraph-version", "4.8.65") + + // Logging + version("slf4j-version", "2.0.0") + version("log4j-version", "2.17.2") + + // Parsers + version("jackson-version", "2.13.3") + + // Network + version("portmapper-version", "2.0.4") + + // JavaFX + version("javafx-version", "17") + + // JNI + version("resource-loader-version", "2.0.1") + version("jna-version", "5.12.1") + + // Protobuf + version("protobuf-version", "3.21.5") + + // Prometheus Java client + version("prometheus-client", "0.16.0") + + // PicoCLI + version("picocli-version", "4.6.3") + + version("spotbugs-version", "4.7.3") + + // List of bundles provided for us. When applicable, favor using these over individual + // libraries. + bundle("eclipse", listOf("eclipse-collections")) + bundle("cryptography-core", listOf("lazysodium", "bc-provider", "bc-pkix")) + bundle("cryptography-runtime", listOf("jna", "resource-loader")) + bundle("logging-api", listOf("log4j-api", "slf4j-api")) + bundle("logging-impl", listOf("log4j-core", "slf4j-nop")) + bundle( + "jackson", + listOf("jackson-databind", "jackson-datatype-jsr310", "jackson-dataformat-yaml") + ) + bundle("networking", listOf("portmapper")) + bundle("javafx", listOf("javafx-base")) + bundle("picocli", listOf("picocli")) + + // Define the individual libraries + // Commons Bundle + library("commons-lang3", "org.apache.commons", "commons-lang3") + .versionRef("commons-lang3-version") + library("commons-io", "commons-io", "commons-io").versionRef("commons-io-version") + library("commons-codec", "commons-codec", "commons-codec") + .versionRef("commons-codec-version") + library("commons-math3", "org.apache.commons", "commons-math3") + .versionRef("commons-math3-version") + library("commons-collections4", "org.apache.commons", "commons-collections4") + .versionRef("commons-collections4-version") + // Eclipse Bundle + library("eclipse-collections", "org.eclipse.collections", "eclipse-collections") + .versionRef("eclipse-collections-version") + // Cryptography Bundle + library("bc-provider", "org.bouncycastle", "bcprov-jdk15on") + .versionRef("bouncycastle-version") + library("bc-pkix", "org.bouncycastle", "bcpkix-jdk15on") + .versionRef("bouncycastle-version") + library("lazysodium", "com.goterl", "lazysodium-java").versionRef("lazysodium-version") + // Log4j Bundle + library("log4j-api", "org.apache.logging.log4j", "log4j-api") + .versionRef("log4j-version") + library("log4j-core", "org.apache.logging.log4j", "log4j-core") + .versionRef("log4j-version") + // Slf4j Bundle + library("slf4j-api", "org.slf4j", "slf4j-api").versionRef("slf4j-version") + library("slf4j-nop", "org.slf4j", "slf4j-nop").versionRef("slf4j-version") + // Jackson Bundle + library("jackson-databind", "com.fasterxml.jackson.core", "jackson-databind") + .versionRef("jackson-version") + library( + "jackson-datatype-joda", + "com.fasterxml.jackson.datatype", + "jackson-datatype-joda" + ) + .versionRef("jackson-version") + library( + "jackson-datatype-guava", + "com.fasterxml.jackson.datatype", + "jackson-datatype-guava" + ) + .versionRef("jackson-version") + library( + "jackson-datatype-jdk8", + "com.fasterxml.jackson.datatype", + "jackson-datatype-jdk8" + ) + .versionRef("jackson-version") + library( + "jackson-datatype-jsr310", + "com.fasterxml.jackson.datatype", + "jackson-datatype-jsr310" + ) + .versionRef("jackson-version") + library( + "jackson-dataformat-yaml", + "com.fasterxml.jackson.dataformat", + "jackson-dataformat-yaml" + ) + .versionRef("jackson-version") + // Networking Bundle + library("portmapper", "com.offbynull.portmapper", "portmapper") + .versionRef("portmapper-version") + // JavaFX Bundle + library("javafx-base", "org.openjfx", "javafx-base").versionRef("javafx-version") + // Misc + library("classgraph", "io.github.classgraph", "classgraph") + .versionRef("classgraph-version") + library("jna", "net.java.dev.jna", "jna").versionRef("jna-version") + library("resource-loader", "com.goterl", "resource-loader") + .versionRef("resource-loader-version") + library("protobuf", "com.google.protobuf", "protobuf-java") + .versionRef("protobuf-version") + library("prometheus-httpserver", "io.prometheus", "simpleclient_httpserver") + .versionRef("prometheus-client") + // PicoCLI Bundle + library("picocli", "info.picocli", "picocli").versionRef("picocli-version") + + library("spotbugs-annotations", "com.github.spotbugs", "spotbugs-annotations") + .versionRef("spotbugs-version") + } + + create("testLibs") { + // Define the approved version numbers + // Third-party dependency versions + + // Test Frameworks + version("junit-version", "5.9.0") + + // Mocking Frameworks + version("mockito-version", "4.11.0") + + // Test Utils + version("awaitility-version", "4.2.0") + version("assertj-version", "3.23.1") + version("truth-version", "1.1.3") + + // List of bundles provided for us. When applicable, favor using these over individual + // libraries. + bundle("junit", listOf("junit-jupiter", "junit-jupiter-api", "junit-jupiter-params")) + bundle("mocking", listOf("mockito-core", "mockito-junit")) + bundle("utils", listOf("awaitility", "assertj-core", "truth")) + + // Define the individual libraries + // JUnit Bundle + library("junit-jupiter", "org.junit.jupiter", "junit-jupiter") + .versionRef("junit-version") + library("junit-jupiter-api", "org.junit.jupiter", "junit-jupiter-api") + .versionRef("junit-version") + library("junit-jupiter-params", "org.junit.jupiter", "junit-jupiter-params") + .versionRef("junit-version") + + // Mocking Bundle + library("mockito-core", "org.mockito", "mockito-core").versionRef("mockito-version") + library("mockito-junit", "org.mockito", "mockito-junit-jupiter") + .versionRef("mockito-version") + + // Utils Bundle + library("awaitility", "org.awaitility", "awaitility").versionRef("awaitility-version") + library("assertj-core", "org.assertj", "assertj-core").versionRef("assertj-version") + library("truth", "com.google.truth", "truth").versionRef("truth-version") + } } - - create("testLibs") { - // Define the approved version numbers - // Third-party dependency versions - - // Test Frameworks - version("junit-version", "5.9.0") - - // Mocking Frameworks - version("mockito-version", "4.7.0") - - // Test Utils - version("awaitility-version", "4.2.0") - version("assertj-version", "3.23.1") - version("truth-version", "1.1.3") - - // List of bundles provided for us. When applicable, favor using these over individual - // libraries. - bundle("junit", listOf("junit-jupiter", "junit-jupiter-api", "junit-jupiter-params")) - bundle("mocking", listOf("mockito-core", "mockito-junit")) - bundle("utils", listOf("awaitility", "assertj-core", "truth")) - - // Define the individual libraries - // JUnit Bundle - library("junit-jupiter", "org.junit.jupiter", "junit-jupiter").versionRef("junit-version") - library("junit-jupiter-api", "org.junit.jupiter", "junit-jupiter-api") - .versionRef("junit-version") - library("junit-jupiter-params", "org.junit.jupiter", "junit-jupiter-params") - .versionRef("junit-version") - - // Mocking Bundle - library("mockito-core", "org.mockito", "mockito-core").versionRef("mockito-version") - library("mockito-junit", "org.mockito", "mockito-junit-jupiter").versionRef("mockito-version") - - // Utils Bundle - library("awaitility", "org.awaitility", "awaitility").versionRef("awaitility-version") - library("assertj-core", "org.assertj", "assertj-core").versionRef("assertj-version") - library("truth", "com.google.truth", "truth").versionRef("truth-version") - } - } } gradleEnterprise { - buildScan { - termsOfServiceUrl = "https://gradle.com/terms-of-service" - termsOfServiceAgree = "yes" - } + buildScan { + termsOfServiceUrl = "https://gradle.com/terms-of-service" + termsOfServiceAgree = "yes" + } } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java index e9fa3efc6589..15bb7cee2acd 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java @@ -98,7 +98,7 @@ import com.swirlds.platform.health.filesystem.OSFileSystemChecker; import com.swirlds.platform.network.Network; import com.swirlds.platform.reconnect.emergency.EmergencySignedStateValidator; -import com.swirlds.platform.state.EmergencyRecoveryManager; +import com.swirlds.platform.recovery.EmergencyRecoveryManager; import com.swirlds.platform.state.address.AddressBookInitializer; import com.swirlds.platform.state.signed.ReservedSignedState; import com.swirlds.platform.state.signed.SavedStateInfo; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SavedStateLoader.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SavedStateLoader.java index 33e17fb1dc80..7a3aa2621077 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SavedStateLoader.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SavedStateLoader.java @@ -30,7 +30,7 @@ import com.swirlds.platform.dispatch.triggers.control.ShutdownRequestedTrigger; import com.swirlds.platform.internal.SignedStateLoadingException; import com.swirlds.platform.reconnect.emergency.EmergencySignedStateValidator; -import com.swirlds.platform.state.EmergencyRecoveryManager; +import com.swirlds.platform.recovery.EmergencyRecoveryManager; import com.swirlds.platform.state.signed.DeserializedSignedState; import com.swirlds.platform.state.signed.ReservedSignedState; import com.swirlds.platform.state.signed.SavedStateInfo; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java index 1762b5a30930..a86b450a5734 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java @@ -132,7 +132,7 @@ import com.swirlds.platform.observers.ConsensusRoundObserver; import com.swirlds.platform.observers.EventObserverDispatcher; import com.swirlds.platform.observers.PreConsensusEventObserver; -import com.swirlds.platform.state.EmergencyRecoveryManager; +import com.swirlds.platform.recovery.EmergencyRecoveryManager; import com.swirlds.platform.state.State; import com.swirlds.platform.state.SwirldStateManager; import com.swirlds.platform.state.signed.ReservedSignedState; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/cli/ValidateManifestFileCommand.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/cli/ValidateManifestFileCommand.java new file mode 100644 index 000000000000..3d43647dcc5b --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/cli/ValidateManifestFileCommand.java @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.cli; + +import com.swirlds.cli.PlatformCli; +import com.swirlds.cli.utility.AbstractCommand; +import com.swirlds.cli.utility.SubcommandOf; +import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.IOException; +import java.nio.file.Path; +import picocli.CommandLine; + +@CommandLine.Command( + name = "validate-manifest-file", + mixinStandardHelpOptions = true, + description = "Validate whether an emergency recovery file is well formed and has the necessary information") +@SubcommandOf(PlatformCli.class) +public class ValidateManifestFileCommand extends AbstractCommand { + + /** The path to the emergency recovery file. */ + private Path dir; + + @SuppressWarnings("unused") + @CommandLine.Parameters( + description = "the path to dir containing manifest file which should be named emergencyRecovery.yaml") + private void setDir(final Path dir) { + this.pathMustExist(dir); + this.dir = dir; + } + + @Override + public @NonNull Integer call() throws IOException { + EmergencyRecoveryFile.read(dir, true); + System.out.println("The emergency recovery file is well formed and has the necessary information."); + return 0; + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/GossipFactory.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/GossipFactory.java index 92198341f984..8419835092c7 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/GossipFactory.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/GossipFactory.java @@ -44,7 +44,7 @@ import com.swirlds.platform.gossip.sync.config.SyncConfig; import com.swirlds.platform.metrics.EventIntakeMetrics; import com.swirlds.platform.observers.EventObserverDispatcher; -import com.swirlds.platform.state.EmergencyRecoveryManager; +import com.swirlds.platform.recovery.EmergencyRecoveryManager; import com.swirlds.platform.state.SwirldStateManager; import com.swirlds.platform.state.signed.SignedState; import edu.umd.cs.findbugs.annotations.NonNull; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/ChatterGossip.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/ChatterGossip.java index 56bc9551951e..35ca9e48cef4 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/ChatterGossip.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/ChatterGossip.java @@ -82,7 +82,7 @@ import com.swirlds.platform.reconnect.ReconnectController; import com.swirlds.platform.reconnect.ReconnectProtocol; import com.swirlds.platform.reconnect.emergency.EmergencyReconnectProtocol; -import com.swirlds.platform.state.EmergencyRecoveryManager; +import com.swirlds.platform.recovery.EmergencyRecoveryManager; import com.swirlds.platform.state.SwirldStateManager; import com.swirlds.platform.state.signed.SignedState; import com.swirlds.platform.threading.PauseAndClear; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncGossip.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncGossip.java index 8cb6adf8c7d4..642d289f7d2e 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncGossip.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncGossip.java @@ -65,7 +65,7 @@ import com.swirlds.platform.reconnect.ReconnectController; import com.swirlds.platform.reconnect.ReconnectProtocol; import com.swirlds.platform.reconnect.emergency.EmergencyReconnectProtocol; -import com.swirlds.platform.state.EmergencyRecoveryManager; +import com.swirlds.platform.recovery.EmergencyRecoveryManager; import com.swirlds.platform.state.SwirldStateManager; import com.swirlds.platform.state.signed.SignedState; import edu.umd.cs.findbugs.annotations.NonNull; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectLearner.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectLearner.java index 335effae23f4..c59f2dce169a 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectLearner.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectLearner.java @@ -21,7 +21,7 @@ import com.swirlds.platform.network.Connection; import com.swirlds.platform.reconnect.ReconnectController; import com.swirlds.platform.reconnect.ReconnectException; -import com.swirlds.platform.state.EmergencyRecoveryFile; +import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; import java.io.IOException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectProtocol.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectProtocol.java index a6a8fb9d8f8a..835a94e85508 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectProtocol.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectProtocol.java @@ -31,7 +31,7 @@ import com.swirlds.platform.network.protocol.Protocol; import com.swirlds.platform.reconnect.ReconnectController; import com.swirlds.platform.reconnect.ReconnectThrottle; -import com.swirlds.platform.state.EmergencyRecoveryManager; +import com.swirlds.platform.recovery.EmergencyRecoveryManager; import com.swirlds.platform.state.signed.SignedStateFinder; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencySignedStateValidator.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencySignedStateValidator.java index 659e038ab0f3..10d701eb1933 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencySignedStateValidator.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencySignedStateValidator.java @@ -23,7 +23,7 @@ import com.swirlds.common.crypto.Hash; import com.swirlds.common.merkle.utility.MerkleTreeVisualizer; import com.swirlds.common.system.address.AddressBook; -import com.swirlds.platform.state.EmergencyRecoveryFile; +import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; import com.swirlds.platform.state.signed.SignedState; import com.swirlds.platform.state.signed.SignedStateInvalidException; import com.swirlds.platform.state.signed.SignedStateValidationData; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/EmergencyRecoveryManager.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EmergencyRecoveryManager.java similarity index 96% rename from platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/EmergencyRecoveryManager.java rename to platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EmergencyRecoveryManager.java index 7820c0ec7bbc..4dcf44c1a632 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/EmergencyRecoveryManager.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EmergencyRecoveryManager.java @@ -14,12 +14,13 @@ * limitations under the License. */ -package com.swirlds.platform.state; +package com.swirlds.platform.recovery; import static com.swirlds.logging.LogMarker.EXCEPTION; import static com.swirlds.platform.system.SystemExitCode.EMERGENCY_RECOVERY_ERROR; import com.swirlds.platform.dispatch.triggers.control.ShutdownRequestedTrigger; +import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; import java.io.IOException; import java.nio.file.Path; import org.apache.logging.log4j.LogManager; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EventRecoveryWorkflow.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EventRecoveryWorkflow.java index f767e7836005..3469d4698338 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EventRecoveryWorkflow.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EventRecoveryWorkflow.java @@ -47,9 +47,9 @@ import com.swirlds.config.api.Configuration; import com.swirlds.config.api.ConfigurationBuilder; import com.swirlds.platform.internal.EventImpl; +import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; import com.swirlds.platform.recovery.internal.EventStreamRoundIterator; import com.swirlds.platform.recovery.internal.RecoveryPlatform; -import com.swirlds.platform.state.EmergencyRecoveryFile; import com.swirlds.platform.state.MinGenInfo; import com.swirlds.platform.state.State; import com.swirlds.platform.state.signed.ReservedSignedState; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Bootstrap.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Bootstrap.java new file mode 100644 index 000000000000..1458b535113b --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Bootstrap.java @@ -0,0 +1,34 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.recovery.emergencyfile; + +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.ToStringSerializer; +import com.swirlds.common.jackson.InstantDeserializer; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.time.Instant; + +/** + * Data about the bootstrap state loaded during event recovery (the starting state) + * + * @param timestamp + * the consensus timestamp of the bootstrap state + */ +public record Bootstrap( + @NonNull @JsonSerialize(using = ToStringSerializer.class) @JsonDeserialize(using = InstantDeserializer.class) + Instant timestamp) {} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/EmergencyRecoveryFile.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/EmergencyRecoveryFile.java similarity index 67% rename from platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/EmergencyRecoveryFile.java rename to platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/EmergencyRecoveryFile.java index 3f6731525fc2..b99c5da5a68d 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/EmergencyRecoveryFile.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/EmergencyRecoveryFile.java @@ -14,20 +14,16 @@ * limitations under the License. */ -package com.swirlds.platform.state; +package com.swirlds.platform.recovery.emergencyfile; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import com.fasterxml.jackson.databind.ser.std.ToStringSerializer; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.fasterxml.jackson.dataformat.yaml.YAMLGenerator; import com.swirlds.common.config.StateConfig; import com.swirlds.common.config.singleton.ConfigurationHolder; import com.swirlds.common.crypto.Hash; -import com.swirlds.common.jackson.HashDeserializer; -import com.swirlds.common.jackson.InstantDeserializer; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -36,7 +32,7 @@ /** * Defines all data related to the emergency recovery file and how it is formatted. */ -public record EmergencyRecoveryFile(Recovery recovery) { +public record EmergencyRecoveryFile(@NonNull Recovery recovery) { private static final String OUTPUT_FILENAME = "emergencyRecovery.yaml"; private static final String INPUT_FILENAME = ConfigurationHolder.getConfigData(StateConfig.class).emergencyStateFileName(); @@ -48,8 +44,8 @@ public record EmergencyRecoveryFile(Recovery recovery) { * @param hash the hash of the state this file is for * @param timestamp the consensus timestamp of the state this file is for */ - public EmergencyRecoveryFile(final long round, final Hash hash, final Instant timestamp) { - this(new Recovery(new State(round, hash, timestamp), null)); + public EmergencyRecoveryFile(final long round, final Hash hash, @NonNull final Instant timestamp) { + this(new Recovery(new State(round, hash, timestamp), null, null, null)); } /** @@ -59,8 +55,8 @@ public EmergencyRecoveryFile(final long round, final Hash hash, final Instant ti * @param state emergency recovery data for the state resulting from the event recovery process * @param bootstrapTime the consensus timestamp of the bootstrap state used to start the event recovery process */ - public EmergencyRecoveryFile(final State state, final Instant bootstrapTime) { - this(new Recovery(state, new Boostrap(bootstrapTime))); + public EmergencyRecoveryFile(@NonNull final State state, @NonNull final Instant bootstrapTime) { + this(new Recovery(state, new Bootstrap(bootstrapTime), null, null)); } /** @@ -97,7 +93,7 @@ public Recovery recovery() { * @param directory the directory to write to. Must exist and be writable. * @throws IOException if an exception occurs creating or writing to the file */ - public void write(final Path directory) throws IOException { + public void write(@NonNull final Path directory) throws IOException { final ObjectMapper mapper = new ObjectMapper(new YAMLFactory().disable(YAMLGenerator.Feature.WRITE_DOC_START_MARKER)); mapper.writeValue(directory.resolve(OUTPUT_FILENAME).toFile(), this); @@ -108,11 +104,13 @@ public void write(final Path directory) throws IOException { * the file does not exist. * * @param directory the directory containing the emergency recovery file. Must exist and be readable. + * @param failOnMissingFields if true, throw an exception if the file is missing any fields. If false, ignore * @return a new record containing the emergency recovery data in the file, or null if no emergency recovery file * exists * @throws IOException if an exception occurs reading from the file, or the file content is not properly formatted */ - public static EmergencyRecoveryFile read(final Path directory) throws IOException { + public static EmergencyRecoveryFile read(@NonNull final Path directory, final boolean failOnMissingFields) + throws IOException { final Path fileToRead = directory.resolve(INPUT_FILENAME); if (!Files.exists(fileToRead)) { return null; @@ -120,11 +118,22 @@ public static EmergencyRecoveryFile read(final Path directory) throws IOExceptio final ObjectMapper mapper = new ObjectMapper(new YAMLFactory()) .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) .configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, true); + if (failOnMissingFields) { + mapper.configure(DeserializationFeature.FAIL_ON_MISSING_CREATOR_PROPERTIES, true); + mapper.configure(DeserializationFeature.FAIL_ON_NULL_CREATOR_PROPERTIES, true); + } final EmergencyRecoveryFile file = mapper.readValue(fileToRead.toFile(), EmergencyRecoveryFile.class); validate(file); return file; } + /** + * Same as {@link #read(Path, boolean)} but with failOnMissingFields set to false. + */ + public static EmergencyRecoveryFile read(@NonNull final Path directory) throws IOException { + return read(directory, false); + } + private static void validate(final EmergencyRecoveryFile file) throws IOException { if (file == null) { throw new IOException("Failed to read emergency recovery file, object mapper returned null value"); @@ -134,36 +143,4 @@ private static void validate(final EmergencyRecoveryFile file) throws IOExceptio throw new IOException("Required field 'hash' is null."); } } - - /** - * The top level of the emergency recovery YAML structure. - * - * @param state information about the state written to disk - * @param boostrap information about the state used to bootstrap event recovery. Not written during normal - * operation. Only written during event recovery. - */ - public record Recovery(State state, Boostrap boostrap) {} - - /** - * Data about the state written to disk, either during normal operation or at the end of event recovery. - * - * @param round the round of the state. This value is required by the platform when reading a file. - * @param hash the hash of the state. This value is required by the platform when reading a file. - * @param timestamp the consensus timestamp of the state. This value is optional for the platform when reading a - * file, but should always be populated with an accurate value when written by the platform. - */ - public record State( - long round, - @JsonSerialize(using = ToStringSerializer.class) @JsonDeserialize(using = HashDeserializer.class) Hash hash, - @JsonSerialize(using = ToStringSerializer.class) @JsonDeserialize(using = InstantDeserializer.class) - Instant timestamp) {} - - /** - * Data about the bootstrap state loaded during event recovery (the starting state) - * - * @param timestamp the consensus timestamp of the bootstrap state - */ - public record Boostrap( - @JsonSerialize(using = ToStringSerializer.class) @JsonDeserialize(using = InstantDeserializer.class) - Instant timestamp) {} } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Intervals.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Intervals.java new file mode 100644 index 000000000000..44d4ad642691 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Intervals.java @@ -0,0 +1,25 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.recovery.emergencyfile; + +/** + * The intervals at which various stream files are written, in milliseconds. + * @param record record stream files are written at this interval, in milliseconds + * @param event event stream files are written at this interval, in milliseconds + * @param balances balance files are written at this interval, in milliseconds + */ +public record Intervals(long record, long event, long balances) {} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Location.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Location.java new file mode 100644 index 000000000000..cf787f98818e --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Location.java @@ -0,0 +1,37 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.recovery.emergencyfile; + +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.ToStringSerializer; +import com.swirlds.common.crypto.Hash; +import com.swirlds.common.jackson.HashDeserializer; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.net.URL; + +/** + * A location where a recovery package can be downloaded from + * @param type the type of package file (e.g. "zip") + * @param url the URL where the package can be downloaded from + * @param hash the hash of the package file + */ +public record Location( + @NonNull String type, + @NonNull URL url, + @NonNull @JsonSerialize(using = ToStringSerializer.class) @JsonDeserialize(using = HashDeserializer.class) + Hash hash) {} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Package.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Package.java new file mode 100644 index 000000000000..6dca6b63214b --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Package.java @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.recovery.emergencyfile; + +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; + +/** + * Information about where to find the emergency recovery package + * @param locations the locations of the package + */ +public record Package(@NonNull List locations) {} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Recovery.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Recovery.java new file mode 100644 index 000000000000..688a97eb4370 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Recovery.java @@ -0,0 +1,38 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.recovery.emergencyfile; + +import com.fasterxml.jackson.annotation.JsonProperty; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; + +/** + * The top level of the emergency recovery YAML structure. + * + * @param state + * information about the state written to disk + * @param bootstrap + * information about the state used to bootstrap event recovery. Not written during normal + * operation. Only written during event recovery. + * @param pkg information about where to find the emergency recovery package + * @param stream information about the various file streams + */ +public record Recovery( + @NonNull State state, + @Nullable Bootstrap bootstrap, + @Nullable @JsonProperty("package") Package pkg, + @Nullable Stream stream) {} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/State.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/State.java new file mode 100644 index 000000000000..3ee042fa00a6 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/State.java @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.recovery.emergencyfile; + +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.ToStringSerializer; +import com.swirlds.common.crypto.Hash; +import com.swirlds.common.jackson.HashDeserializer; +import com.swirlds.common.jackson.InstantDeserializer; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.time.Instant; + +/** + * Data about the state written to disk, either during normal operation or at the end of event recovery. + * + * @param round + * the round of the state. This value is required by the platform when reading a file. + * @param hash + * the hash of the state. This value is required by the platform when reading a file. + * @param timestamp + * the consensus timestamp of the state. This value is optional for the platform when reading a + * file, but should always be populated with an accurate value when written by the platform. + */ +public record State( + long round, + @NonNull @JsonSerialize(using = ToStringSerializer.class) @JsonDeserialize(using = HashDeserializer.class) + Hash hash, + @NonNull @JsonSerialize(using = ToStringSerializer.class) @JsonDeserialize(using = InstantDeserializer.class) + Instant timestamp) {} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Stream.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Stream.java new file mode 100644 index 000000000000..5bec4ab6e8ca --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/Stream.java @@ -0,0 +1,25 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.recovery.emergencyfile; + +import edu.umd.cs.findbugs.annotations.NonNull; + +/** + * Information about the various file streams + * @param intervals the output intervals of the streams + */ +public record Stream(@NonNull Intervals intervals) {} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileWriter.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileWriter.java index d4125f183bbf..f49cac258dcc 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileWriter.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileWriter.java @@ -34,7 +34,7 @@ import com.swirlds.common.system.address.AddressBook; import com.swirlds.logging.payloads.StateSavedToDiskPayload; import com.swirlds.platform.Settings; -import com.swirlds.platform.state.EmergencyRecoveryFile; +import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; import com.swirlds.platform.state.State; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/module-info.java b/platform-sdk/swirlds-platform-core/src/main/java/module-info.java index e3fdbf5380dd..58bbe251a0e6 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/module-info.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/module-info.java @@ -125,6 +125,7 @@ exports com.swirlds.platform.gossip; exports com.swirlds.platform.reconnect; exports com.swirlds.platform.gossip.shadowgraph; + exports com.swirlds.platform.recovery.emergencyfile; /* Swirlds Libraries */ requires com.swirlds.base; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SavedStateLoaderTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SavedStateLoaderTests.java index a6bebd41611d..32523239c205 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SavedStateLoaderTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SavedStateLoaderTests.java @@ -40,8 +40,8 @@ import com.swirlds.platform.dispatch.triggers.control.ShutdownRequestedTrigger; import com.swirlds.platform.internal.SignedStateLoadingException; import com.swirlds.platform.reconnect.emergency.EmergencySignedStateValidator; -import com.swirlds.platform.state.EmergencyRecoveryFile; -import com.swirlds.platform.state.EmergencyRecoveryManager; +import com.swirlds.platform.recovery.EmergencyRecoveryManager; +import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; import com.swirlds.platform.state.RandomSignedStateGenerator; import com.swirlds.platform.state.signed.SavedStateInfo; import com.swirlds.platform.state.signed.SignedState; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectProtocolTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectProtocolTests.java index f8c1407f353b..4aa9b69c98cf 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectProtocolTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectProtocolTests.java @@ -36,8 +36,8 @@ import com.swirlds.platform.reconnect.ReconnectController; import com.swirlds.platform.reconnect.ReconnectHelper; import com.swirlds.platform.reconnect.ReconnectThrottle; -import com.swirlds.platform.state.EmergencyRecoveryFile; -import com.swirlds.platform.state.EmergencyRecoveryManager; +import com.swirlds.platform.recovery.EmergencyRecoveryManager; +import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; import com.swirlds.platform.state.signed.SignedStateFinder; import com.swirlds.platform.state.signed.SignedStateManager; import com.swirlds.test.framework.config.TestConfigBuilder; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectTests.java index 81cfd418fccc..e05a93a3dc30 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectTests.java @@ -50,8 +50,8 @@ import com.swirlds.platform.reconnect.ReconnectLearnerFactory; import com.swirlds.platform.reconnect.ReconnectLearnerThrottle; import com.swirlds.platform.reconnect.ReconnectThrottle; -import com.swirlds.platform.state.EmergencyRecoveryFile; -import com.swirlds.platform.state.EmergencyRecoveryManager; +import com.swirlds.platform.recovery.EmergencyRecoveryManager; +import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; import com.swirlds.platform.state.RandomSignedStateGenerator; import com.swirlds.platform.state.State; import com.swirlds.platform.state.signed.ReservedSignedState; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencySignedStateValidatorTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencySignedStateValidatorTests.java index e0d4de4585c8..9106fba0a3d1 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencySignedStateValidatorTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/reconnect/emergency/EmergencySignedStateValidatorTests.java @@ -25,7 +25,7 @@ import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.test.RandomAddressBookGenerator; import com.swirlds.common.test.RandomUtils; -import com.swirlds.platform.state.EmergencyRecoveryFile; +import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; import com.swirlds.platform.state.RandomSignedStateGenerator; import com.swirlds.platform.state.signed.SignedState; import com.swirlds.platform.state.signed.SignedStateInvalidException; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/EventRecoveryWorkflowTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/EventRecoveryWorkflowTests.java index 590201620635..b208316b12f8 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/EventRecoveryWorkflowTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/EventRecoveryWorkflowTests.java @@ -41,7 +41,7 @@ import com.swirlds.common.system.events.ConsensusEvent; import com.swirlds.common.test.RandomUtils; import com.swirlds.platform.internal.EventImpl; -import com.swirlds.platform.state.EmergencyRecoveryFile; +import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; import com.swirlds.platform.state.MinGenInfo; import java.io.IOException; import java.nio.file.Path; @@ -406,10 +406,10 @@ void testUpdateEmergencyRecoveryFile() throws IOException { assertEquals(round, updatedRecoveryFile.round(), "round does not match"); assertEquals(hash, updatedRecoveryFile.hash(), "hash does not match"); assertEquals(stateTimestamp, updatedRecoveryFile.timestamp(), "state timestamp does not match"); - assertNotNull(updatedRecoveryFile.recovery().boostrap(), "bootstrap should not be null"); + assertNotNull(updatedRecoveryFile.recovery().bootstrap(), "bootstrap should not be null"); assertEquals( bootstrapTime, - updatedRecoveryFile.recovery().boostrap().timestamp(), + updatedRecoveryFile.recovery().bootstrap().timestamp(), "bootstrap timestamp does not match"); // Verify the contents of the backup recovery file (copy of the original) @@ -418,7 +418,7 @@ void testUpdateEmergencyRecoveryFile() throws IOException { assertEquals(round, backupFile.round(), "round does not match"); assertEquals(hash, backupFile.hash(), "hash does not match"); assertEquals(stateTimestamp, backupFile.timestamp(), "state timestamp does not match"); - assertNull(backupFile.recovery().boostrap(), "No bootstrap information should exist in the backup"); + assertNull(backupFile.recovery().bootstrap(), "No bootstrap information should exist in the backup"); } // FUTURE WORK reapplyTransactions() test diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/EmergencyRecoveryFileTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/EmergencyRecoveryFileTests.java index 95b4e7fef578..a7e3098b5481 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/EmergencyRecoveryFileTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/state/EmergencyRecoveryFileTests.java @@ -25,17 +25,32 @@ import com.swirlds.common.crypto.DigestType; import com.swirlds.common.crypto.Hash; import com.swirlds.common.test.RandomUtils; +import com.swirlds.platform.recovery.emergencyfile.EmergencyRecoveryFile; +import com.swirlds.platform.recovery.emergencyfile.Intervals; +import com.swirlds.platform.recovery.emergencyfile.Location; +import com.swirlds.platform.recovery.emergencyfile.Package; +import com.swirlds.platform.recovery.emergencyfile.Recovery; +import com.swirlds.platform.recovery.emergencyfile.State; +import com.swirlds.platform.recovery.emergencyfile.Stream; +import com.swirlds.test.framework.ResourceLoader; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.net.URL; import java.nio.file.Path; import java.time.Instant; import java.util.Arrays; +import java.util.List; import java.util.Random; import java.util.stream.Collectors; +import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.tuple.Pair; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; +import org.mockito.MockMakers; +import org.mockito.Mockito; public class EmergencyRecoveryFileTests { @@ -73,10 +88,10 @@ void testReadWriteWithBootstrap() throws IOException { assertEquals(toWrite.round(), readIn.round(), "round does not match"); assertEquals(toWrite.hash(), readIn.hash(), "hash does not match"); assertEquals(toWrite.timestamp(), readIn.timestamp(), "state timestamp does not match"); - assertNotNull(readIn.recovery().boostrap(), "bootstrap should not be null"); + assertNotNull(readIn.recovery().bootstrap(), "bootstrap should not be null"); assertEquals( - toWrite.recovery().boostrap().timestamp(), - readIn.recovery().boostrap().timestamp(), + toWrite.recovery().bootstrap().timestamp(), + readIn.recovery().bootstrap().timestamp(), "bootstrap timestamp does not match"); } @@ -166,6 +181,59 @@ void testFileDoesNotExist() throws IOException { assertNull(EmergencyRecoveryFile.read(tmpDir), "Reading from a file that does not exist should return null"); } + @Test + void testReadWriteLocations() throws IOException { + final Random r = RandomUtils.getRandomPrintSeed(); + final EmergencyRecoveryFile file = new EmergencyRecoveryFile(new Recovery( + new State(r.nextLong(), randomHash(r), Instant.now()), + null, + new Package(List.of(randomLocation(r), randomLocation(r), randomLocation(r))), + null)); + file.write(tmpDir); + assertDoesNotThrow(() -> EmergencyRecoveryFile.read(tmpDir), "Reading a valid file should not throw"); + } + + @Test + void testBadUrl() throws IOException { + final URL badUrl = Mockito.mock(URL.class, Mockito.withSettings().mockMaker(MockMakers.INLINE)); + Mockito.when(badUrl.toString()).thenReturn("not a url"); + final Random r = RandomUtils.getRandomPrintSeed(); + final EmergencyRecoveryFile file = new EmergencyRecoveryFile(new Recovery( + new State(r.nextLong(), randomHash(r), Instant.now()), + null, + new Package(List.of(randomLocation(r), randomLocation(r), new Location("type", badUrl, randomHash(r)))), + null)); + file.write(tmpDir); + assertThrows( + Exception.class, + () -> EmergencyRecoveryFile.read(tmpDir), + "Reading a file with a bad url should throw"); + } + + @Test + void testReadWriteStream() throws IOException { + final Random r = RandomUtils.getRandomPrintSeed(); + final EmergencyRecoveryFile file = new EmergencyRecoveryFile(new Recovery( + new State(r.nextLong(), randomHash(r), Instant.now()), + null, + null, + new Stream(new Intervals(2000, 5000, 900000)))); + file.write(tmpDir); + assertDoesNotThrow(() -> EmergencyRecoveryFile.read(tmpDir), "Reading a valid file should not throw"); + } + + @Test + void testReadAllFields() throws URISyntaxException { + final Path dir = ResourceLoader.getFile("com/swirlds/platform/recovery/emergencyfile/valid/"); + assertDoesNotThrow(() -> EmergencyRecoveryFile.read(dir, true)); + } + + @Test + void testFieldMissing() throws URISyntaxException { + final Path dir = ResourceLoader.getFile("com/swirlds/platform/recovery/emergencyfile/invalid/"); + assertThrows(Exception.class, () -> EmergencyRecoveryFile.read(dir, true)); + } + private EmergencyRecoveryFile createRecoveryFile(final Random r) { return new EmergencyRecoveryFile(r.nextLong(), randomHash(r), Instant.now()); } @@ -222,4 +290,11 @@ private static String randomLongString(final Random r) { private static String randomInstantString(final Random r) { return Instant.ofEpochMilli(r.nextLong()).toString(); } + + private static Location randomLocation(final Random r) throws MalformedURLException { + return new Location( + RandomStringUtils.randomAlphabetic(10), + new URL(String.format("https://%s.com/", RandomStringUtils.randomAlphabetic(10))), + randomHash(r)); + } } diff --git a/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/recovery/emergencyfile/invalid/emergencyRecovery.yaml b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/recovery/emergencyfile/invalid/emergencyRecovery.yaml new file mode 100644 index 000000000000..ed5eea4cb336 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/recovery/emergencyfile/invalid/emergencyRecovery.yaml @@ -0,0 +1,22 @@ +recovery: + package: + locations: + - type: "VjblTCCOjG" + url: "https://CaMlzKSvbK.com/" + hash: "a5085d2e13477f2847b174d51e898ea683430cd42b19288458593b27af4059ee330d1a21bad27a30fceac272afbf0c21" + - type: "brqEpvspux" + url: "https://TGaqyzXhZI.com/" + hash: "154265ebf9a836b186f73890d4a8d5a9a72f513861f55c4402b9a01aa2cba32d4b9e528a1576b4969abb8d3afd1b0471" + - type: "eBQcDiYUEd" + url: "https://nBwKubCcCP.com/" + hash: "4c339d279fa15b753a3995faff0eead7226fb6d7df0180f4770b07f00b71f11bf5cb43563fca3b1aac5729a022099fec" + state: + round: 3388878439625117609 + hash: "1e408e32390aa2b90f32a0abcd8793c2b252c9478bdd9c16a52b8cfbbc5cd798d83b8cab3e6a300f0f6f08f0c0bc789d" + bootstrap: + timestamp: "2023-06-06T10:14:53.993187Z" + stream: + intervals: + record: 10 + event: 20 + balances: 30 diff --git a/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/recovery/emergencyfile/valid/emergencyRecovery.yaml b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/recovery/emergencyfile/valid/emergencyRecovery.yaml new file mode 100644 index 000000000000..ef66d63fa685 --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/test/resources/com/swirlds/platform/recovery/emergencyfile/valid/emergencyRecovery.yaml @@ -0,0 +1,23 @@ +recovery: + package: + locations: + - type: "VjblTCCOjG" + url: "https://CaMlzKSvbK.com/" + hash: "a5085d2e13477f2847b174d51e898ea683430cd42b19288458593b27af4059ee330d1a21bad27a30fceac272afbf0c21" + - type: "brqEpvspux" + url: "https://TGaqyzXhZI.com/" + hash: "154265ebf9a836b186f73890d4a8d5a9a72f513861f55c4402b9a01aa2cba32d4b9e528a1576b4969abb8d3afd1b0471" + - type: "eBQcDiYUEd" + url: "https://nBwKubCcCP.com/" + hash: "4c339d279fa15b753a3995faff0eead7226fb6d7df0180f4770b07f00b71f11bf5cb43563fca3b1aac5729a022099fec" + state: + round: 3388878439625117609 + hash: "1e408e32390aa2b90f32a0abcd8793c2b252c9478bdd9c16a52b8cfbbc5cd798d83b8cab3e6a300f0f6f08f0c0bc789d" + timestamp: "2023-06-06T10:14:53.992985Z" + bootstrap: + timestamp: "2023-06-06T10:14:53.993187Z" + stream: + intervals: + record: 10 + event: 20 + balances: 30 From c9a1b44af630320e4275cb95f121d1210448cb49 Mon Sep 17 00:00:00 2001 From: Cody Littley <56973212+cody-littley@users.noreply.github.com> Date: Tue, 13 Jun 2023 10:01:39 -0500 Subject: [PATCH 16/70] 07001 queue thread idle callback (#7004) Signed-off-by: Cody Littley --- .../AbstractQueueThreadConfiguration.java | 53 ++++++++++++ .../framework/internal/QueueThreadImpl.java | 31 +++++-- .../test/threading/QueueThreadTests.java | 81 +++++++++++++++++++ 3 files changed, 159 insertions(+), 6 deletions(-) diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/AbstractQueueThreadConfiguration.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/AbstractQueueThreadConfiguration.java index 3be0672deae0..ce16ce23cdeb 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/AbstractQueueThreadConfiguration.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/AbstractQueueThreadConfiguration.java @@ -22,6 +22,10 @@ import com.swirlds.common.threading.interrupt.InterruptableConsumer; import com.swirlds.common.threading.interrupt.InterruptableRunnable; import com.swirlds.common.threading.manager.ThreadManager; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.time.Duration; +import java.util.Objects; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; @@ -58,6 +62,16 @@ public abstract class AbstractQueueThreadConfiguration handler) { return (C) this; } + /** + * Set the idle callback that will be called periodically when the queue is empty. + * + * @return this object + */ + @SuppressWarnings("unchecked") + public C setIdleCallback(@NonNull final InterruptableRunnable idleCallback) { + this.idleCallback = idleCallback; + return (C) this; + } + + /** + * Get the idle callback that will be called periodically when the queue is empty. + */ + @Nullable + public InterruptableRunnable getIdleCallback() { + return idleCallback; + } + + /** + * Get the amount of time that the thread blocks while waiting for work. + */ + @NonNull + public Duration getWaitForWorkDuration() { + return waitForWorkDuration; + } + + /** + * Set the amount of time that the thread blocks while waiting for work. + * + * @param waitForWorkDuration the amount of time to wait + * @return this object + */ + @SuppressWarnings("unchecked") + public C setWaitForWorkDuration(@NonNull final Duration waitForWorkDuration) { + this.waitForWorkDuration = Objects.requireNonNull(waitForWorkDuration); + return (C) this; + } + /** * Gets the queue specified by the user, or null if none has been specified. */ diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/QueueThreadImpl.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/QueueThreadImpl.java index 3c131b71dea6..3c3e8db2d742 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/QueueThreadImpl.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/internal/QueueThreadImpl.java @@ -16,7 +16,7 @@ package com.swirlds.common.threading.framework.internal; -import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static java.util.concurrent.TimeUnit.NANOSECONDS; import static org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE; import com.swirlds.common.threading.framework.QueueThread; @@ -24,6 +24,7 @@ import com.swirlds.common.threading.framework.ThreadSeed; import com.swirlds.common.threading.interrupt.InterruptableConsumer; import com.swirlds.common.threading.interrupt.InterruptableRunnable; +import java.time.Duration; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicLong; @@ -37,8 +38,6 @@ */ public class QueueThreadImpl extends AbstractBlockingQueue implements QueueThread { - private static final int WAIT_FOR_WORK_DELAY_MS = 10; - private final int bufferSize; private final List buffer; @@ -53,9 +52,22 @@ public class QueueThreadImpl extends AbstractBlockingQueue implements Queu * Incremented each time we timeout while waiting for work from the queue. */ private final AtomicLong noWorkCount = new AtomicLong(); - /** Tracks metrics related to this queue thread */ + + /** + * Tracks metrics related to this queue thread + */ private final QueueThreadMetrics metrics; + /** + * If not null, called periodically when the queue thread is idle. + */ + private final InterruptableRunnable idleCallback; + + /** + * The amount of time to wait for work. + */ + private final Duration waitForWorkDuration; + /** *

* All instances of this class should be created via the appropriate configuration object. @@ -84,6 +96,8 @@ public QueueThreadImpl(final AbstractQueueThreadConfiguration configuratio buffer = new ArrayList<>(bufferSize); handler = configuration.getHandler(); + idleCallback = configuration.getIdleCallback(); + this.waitForWorkDuration = configuration.getWaitForWorkDuration(); metrics = new QueueThreadMetrics(configuration); stoppableThread = configuration @@ -253,9 +267,14 @@ private void doWork() throws InterruptedException { * if this method is interrupted during execution */ private T waitForItem() throws InterruptedException { - final T item = poll(WAIT_FOR_WORK_DELAY_MS, MILLISECONDS); + final T item = poll(waitForWorkDuration.toNanos(), NANOSECONDS); if (item == null) { noWorkCount.incrementAndGet(); + if (idleCallback != null) { + metrics.startingWork(); + idleCallback.run(); + metrics.finishedWork(); + } } return item; } @@ -285,7 +304,7 @@ public void waitUntilNotBusy() throws InterruptedException { final long initialCount = noWorkCount.get(); while (noWorkCount.get() <= initialCount + 1 && getStatus() != Status.DEAD) { - MILLISECONDS.sleep(WAIT_FOR_WORK_DELAY_MS); + NANOSECONDS.sleep(waitForWorkDuration.toNanos()); } } diff --git a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/threading/QueueThreadTests.java b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/threading/QueueThreadTests.java index a5a71dd2aa71..0e855a62f76c 100644 --- a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/threading/QueueThreadTests.java +++ b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/threading/QueueThreadTests.java @@ -52,6 +52,7 @@ import com.swirlds.common.threading.framework.config.ThreadConfiguration; import com.swirlds.common.threading.framework.internal.QueueThreadMetrics; import com.swirlds.common.threading.interrupt.InterruptableConsumer; +import com.swirlds.common.threading.interrupt.InterruptableRunnable; import com.swirlds.config.api.Configuration; import com.swirlds.test.framework.TestComponentTags; import com.swirlds.test.framework.TestQualifierTags; @@ -997,4 +998,84 @@ void waitUntilNotBusyTest() throws InterruptedException { queue.stop(); } + + @Test + @DisplayName("Idle Callback Test") + void idleCallbackTest() throws InterruptedException { + final AtomicBoolean error = new AtomicBoolean(false); + + final AtomicBoolean idleCallbackPermitted = new AtomicBoolean(false); + final AtomicBoolean idleCallbackCalled = new AtomicBoolean(false); + final InterruptableRunnable idleCallback = () -> { + if (idleCallbackPermitted.get()) { + idleCallbackCalled.set(true); + } else { + error.set(true); + } + }; + + final QueueThread queue = new QueueThreadConfiguration(getStaticThreadManager()) + .setThreadName("test") + .setIdleCallback(idleCallback) + .setHandler(Runnable::run) + .setWaitForWorkDuration(Duration.ofMillis(1)) + .build(); + + final CountDownLatch latch1 = new CountDownLatch(1); + final CountDownLatch latch2 = new CountDownLatch(1); + final CountDownLatch latch3 = new CountDownLatch(1); + + queue.add(() -> { + try { + latch1.await(); + } catch (final InterruptedException ignored) { + error.set(true); + Thread.currentThread().interrupt(); + } + }); + queue.add(() -> { + try { + latch2.await(); + } catch (final InterruptedException ignored) { + error.set(true); + Thread.currentThread().interrupt(); + } + }); + queue.add(() -> { + try { + latch3.await(); + } catch (final InterruptedException ignored) { + error.set(true); + Thread.currentThread().interrupt(); + } + }); + queue.start(); + + // The queue should call the idle callback during this time, + // but give it some time to do bad things if it's going to do bad things. + MILLISECONDS.sleep(10); + + latch1.countDown(); + + // The queue should call the idle callback during this time, + // but give it some time to do bad things if it's going to do bad things. + MILLISECONDS.sleep(10); + + latch2.countDown(); + + // The queue should call the idle callback during this time, + // but give it some time to do bad things if it's going to do bad things. + MILLISECONDS.sleep(10); + + // Once job 3 is permitted to complete, we expect for the idle callback to be invoked shortly afterwards. + idleCallbackPermitted.set(true); + + latch3.countDown(); + + assertEventuallyTrue(idleCallbackCalled::get, Duration.ofSeconds(1), "Idle callback was not called"); + + queue.stop(); + + assertFalse(error.get()); + } } From 60be7adaa154d06aee14515620b7f466c49e5f8d Mon Sep 17 00:00:00 2001 From: Cody Littley <56973212+cody-littley@users.noreply.github.com> Date: Tue, 13 Jun 2023 10:02:13 -0500 Subject: [PATCH 17/70] Use RecycleBin to handle files after discontinuity in PCES. (#7049) Signed-off-by: Cody Littley --- .../common/io/config/RecycleBinConfig.java | 3 +- .../swirlds/common/io/utility/RecycleBin.java | 70 +++----------- .../common/io/utility/RecycleBinImpl.java | 95 +++++++++++++++++++ .../common/test/fixtures/TestRecycleBin.java | 57 +++++++++++ .../com/swirlds/platform/SwirldsPlatform.java | 4 +- .../preconsensus/PreconsensusEventFile.java | 9 +- .../PreconsensusEventFileManager.java | 18 ++-- .../PreconsensusEventStreamConfig.java | 3 - .../common/test/utility/RecycleBinTests.java | 8 +- .../AsyncPreconsensusEventWriterTests.java | 19 ++-- .../PreconsensusEventFileManagerTests.java | 90 +++++++++++------- .../PreconsensusEventFileTests.java | 20 +++- .../SyncPreconsensusEventWriterTests.java | 25 ++--- 13 files changed, 282 insertions(+), 139 deletions(-) create mode 100644 platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/utility/RecycleBinImpl.java create mode 100644 platform-sdk/swirlds-common/src/testFixtures/java/com/swirlds/common/test/fixtures/TestRecycleBin.java diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/config/RecycleBinConfig.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/config/RecycleBinConfig.java index 231963fea7e5..2acd730d784b 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/config/RecycleBinConfig.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/config/RecycleBinConfig.java @@ -17,7 +17,6 @@ package com.swirlds.common.io.config; import com.swirlds.common.config.StateConfig; -import com.swirlds.common.io.utility.RecycleBin; import com.swirlds.common.system.NodeId; import com.swirlds.config.api.ConfigData; import com.swirlds.config.api.ConfigProperty; @@ -25,7 +24,7 @@ import java.nio.file.Path; /** - * Configuration for the {@link RecycleBin} class. + * Configuration for the {@link com.swirlds.common.io.utility.RecycleBin} class. * * @param recycleBinPath the directory where recycled files are stored, relative to the saved state directory defined by * {@link StateConfig#savedStateDirectory()}. diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/utility/RecycleBin.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/utility/RecycleBin.java index 3cfb6ebf28c6..722963ed0c70 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/utility/RecycleBin.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/utility/RecycleBin.java @@ -16,22 +16,11 @@ package com.swirlds.common.io.utility; -import static com.swirlds.logging.LogMarker.EXCEPTION; - -import com.swirlds.common.config.StateConfig; -import com.swirlds.common.io.config.RecycleBinConfig; import com.swirlds.common.system.NodeId; -import com.swirlds.common.threading.locks.AutoClosableLock; -import com.swirlds.common.threading.locks.Locks; -import com.swirlds.common.threading.locks.locked.Locked; import com.swirlds.config.api.Configuration; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; -import java.nio.file.Files; import java.nio.file.Path; -import java.util.Objects; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; /** * This class provides the abstraction of deleting a file, but actually moves the file to a temporary location in case @@ -42,29 +31,7 @@ * code that depends on the existence of files in this temporary location. Files in this temporary location should be * treated as deleted by java code, and only used for debugging purposes. */ -public class RecycleBin { - - private static final Logger logger = LogManager.getLogger(RecycleBin.class); - - private final Path recycleBinPath; - private final AutoClosableLock lock = Locks.createAutoLock(); - - /** - * Create a new recycle bin. - * - * @param configuration the configuration object - * @param selfId the ID of this node - * @throws IOException if the recycle bin directory could not be created - */ - public RecycleBin(@NonNull final Configuration configuration, @NonNull final NodeId selfId) throws IOException { - Objects.requireNonNull(selfId); - - final RecycleBinConfig recycleBinConfig = configuration.getConfigData(RecycleBinConfig.class); - final StateConfig stateConfig = configuration.getConfigData(StateConfig.class); - - recycleBinPath = recycleBinConfig.getRecycleBinPath(stateConfig, selfId); - Files.createDirectories(recycleBinPath); - } +public interface RecycleBin { /** * Remove a file or directory tree from its current location and move it to a temporary location. @@ -76,31 +43,22 @@ public RecycleBin(@NonNull final Configuration configuration, @NonNull final Nod * * @param path the file or directory to recycle */ - public void recycle(@NonNull final Path path) throws IOException { - if (!Files.exists(path)) { - logger.warn(EXCEPTION.getMarker(), "Cannot recycle non-existent file: {}", path); - return; - } - - try (final Locked ignored = lock.lock()) { - final Path fileName = path.getFileName(); - final Path recyclePath = recycleBinPath.resolve(fileName); - - if (Files.exists(recyclePath)) { - Files.delete(recyclePath); - } - - Files.move(path, recyclePath); - } - } + void recycle(@NonNull Path path) throws IOException; /** * Delete all recycled files. */ - public void clear() throws IOException { - try (final Locked ignored = lock.lock()) { - FileUtils.deleteDirectory(recycleBinPath); - Files.createDirectories(recycleBinPath); - } + void clear() throws IOException; + + /** + * Create a new recycle bin. + * + * @param configuration the configuration object + * @param selfId the ID of this node + * @throws IOException if the recycle bin directory could not be created + */ + static RecycleBin create(@NonNull final Configuration configuration, @NonNull final NodeId selfId) + throws IOException { + return new RecycleBinImpl(configuration, selfId); } } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/utility/RecycleBinImpl.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/utility/RecycleBinImpl.java new file mode 100644 index 000000000000..2f6482e8194a --- /dev/null +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/io/utility/RecycleBinImpl.java @@ -0,0 +1,95 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.common.io.utility; + +import static com.swirlds.logging.LogMarker.EXCEPTION; + +import com.swirlds.common.config.StateConfig; +import com.swirlds.common.io.config.RecycleBinConfig; +import com.swirlds.common.system.NodeId; +import com.swirlds.common.threading.locks.AutoClosableLock; +import com.swirlds.common.threading.locks.Locks; +import com.swirlds.common.threading.locks.locked.Locked; +import com.swirlds.config.api.Configuration; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Objects; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * A standard implementation of a {@link RecycleBin}. + */ +class RecycleBinImpl implements RecycleBin { + + private static final Logger logger = LogManager.getLogger(RecycleBinImpl.class); + + private final Path recycleBinPath; + private final AutoClosableLock lock = Locks.createAutoLock(); + + /** + * Create a new recycle bin. + * + * @param configuration the configuration object + * @param selfId the ID of this node + * @throws IOException if the recycle bin directory could not be created + */ + public RecycleBinImpl(@NonNull final Configuration configuration, @NonNull final NodeId selfId) throws IOException { + Objects.requireNonNull(selfId); + + final RecycleBinConfig recycleBinConfig = configuration.getConfigData(RecycleBinConfig.class); + final StateConfig stateConfig = configuration.getConfigData(StateConfig.class); + + recycleBinPath = recycleBinConfig.getRecycleBinPath(stateConfig, selfId); + Files.createDirectories(recycleBinPath); + } + + /** + * {@inheritDoc} + */ + @Override + public void recycle(@NonNull final Path path) throws IOException { + if (!Files.exists(path)) { + logger.warn(EXCEPTION.getMarker(), "Cannot recycle non-existent file: {}", path); + return; + } + + try (final Locked ignored = lock.lock()) { + final Path fileName = path.getFileName(); + final Path recyclePath = recycleBinPath.resolve(fileName); + + if (Files.exists(recyclePath)) { + Files.delete(recyclePath); + } + + Files.move(path, recyclePath); + } + } + + /** + * {@inheritDoc} + */ + @Override + public void clear() throws IOException { + try (final Locked ignored = lock.lock()) { + FileUtils.deleteDirectory(recycleBinPath); + Files.createDirectories(recycleBinPath); + } + } +} diff --git a/platform-sdk/swirlds-common/src/testFixtures/java/com/swirlds/common/test/fixtures/TestRecycleBin.java b/platform-sdk/swirlds-common/src/testFixtures/java/com/swirlds/common/test/fixtures/TestRecycleBin.java new file mode 100644 index 000000000000..c6542c0232ad --- /dev/null +++ b/platform-sdk/swirlds-common/src/testFixtures/java/com/swirlds/common/test/fixtures/TestRecycleBin.java @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.common.test.fixtures; + +import com.swirlds.common.io.utility.FileUtils; +import com.swirlds.common.io.utility.RecycleBin; +import java.io.IOException; +import java.nio.file.Path; + +/** + * An implementation of a {@link RecycleBin} that immediately deletes recycled files. Technically speaking this + * is not a violation of the recycle bin contract. Handy for test scenarios where you don't want to worry about + * cleaning up the recycle bin. + */ +public class TestRecycleBin implements RecycleBin { + + private static final TestRecycleBin INSTANCE = new TestRecycleBin(); + + /** + * Get the singleton instance of this class. + */ + public static TestRecycleBin getInstance() { + return INSTANCE; + } + + private TestRecycleBin() {} + + /** + * {@inheritDoc} + */ + @Override + public void recycle(final Path path) throws IOException { + FileUtils.deleteDirectory(path); + } + + /** + * {@inheritDoc} + */ + @Override + public void clear() throws IOException { + // Nothing to clear, files are deleted immediately + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java index a86b450a5734..5a58fc81254d 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java @@ -351,7 +351,7 @@ public class SwirldsPlatform implements Platform, Startable { registerAddressBookMetrics(metrics, initialAddressBook, selfId); try { - recycleBin = new RecycleBin(platformContext.getConfiguration(), selfId); + recycleBin = RecycleBin.create(platformContext.getConfiguration(), selfId); if (softwareUpgrade) { recycleBin.clear(); } @@ -995,7 +995,7 @@ private EventLinker buildEventLinker(@NonNull final List= indexOfDiscontinuity; index--) { - files.removeLast().deleteFile(databaseDirectory, recycleBinDirectory); + files.removeLast().deleteFile(databaseDirectory, recycleBin); } } catch (final IOException e) { throw new UncheckedIOException("unable to delete file after discontinuity", e); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/PreconsensusEventStreamConfig.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/PreconsensusEventStreamConfig.java index fcbe81a962cf..c03b0cc1aa54 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/PreconsensusEventStreamConfig.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/PreconsensusEventStreamConfig.java @@ -78,8 +78,6 @@ * @param databaseDirectory the directory where preconsensus events will be stored, * relative to * {@link com.swirlds.common.config.StateConfig#savedStateDirectory()}. - * @param recycleBinDirectory the directory where invalid preconsensus event files will be - * moved if a discontinuity is detected * @param enableStorage if true, then stream preconsensus events to files on disk. If * this is disabled then a network wide crash (perhaps due to a * bug) can cause transactions that previously reached consensus @@ -105,7 +103,6 @@ public record PreconsensusEventStreamConfig( @ConfigProperty(defaultValue = "5") int minimumGenerationalCapacity, @ConfigProperty(defaultValue = "false") boolean permitGaps, @ConfigProperty(defaultValue = "preconsensus-events") Path databaseDirectory, - @ConfigProperty(defaultValue = "preconsensus-events-debug") Path recycleBinDirectory, // FUTURE WORK: once tested make this default true @ConfigProperty(defaultValue = "false") boolean enableStorage, // FUTURE WORK: once tested make this default true diff --git a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/utility/RecycleBinTests.java b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/utility/RecycleBinTests.java index aa640134d7b2..743ada4153eb 100644 --- a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/utility/RecycleBinTests.java +++ b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/utility/RecycleBinTests.java @@ -83,7 +83,7 @@ private void validateFile(@NonNull final Path path, @NonNull final String expect @Test @DisplayName("Recycle File Test") void recycleFileTest() throws IOException { - final RecycleBin recycleBin = new RecycleBin(configuration, new NodeId(0)); + final RecycleBin recycleBin = RecycleBin.create(configuration, new NodeId(0)); final Path path1 = testDirectory.resolve("file1.txt"); writeFile(path1, "file1"); @@ -123,7 +123,7 @@ void recycleFileTest() throws IOException { @Test @DisplayName("Recycle Directory Test") void recycleDirectoryTest() throws IOException { - final RecycleBin recycleBin = new RecycleBin(configuration, new NodeId(0)); + final RecycleBin recycleBin = RecycleBin.create(configuration, new NodeId(0)); final Path directory = testDirectory.resolve("foo/bar/baz"); Files.createDirectories(directory); @@ -173,7 +173,7 @@ void recycleDirectoryTest() throws IOException { @Test @DisplayName("Recycle Non-Existent File Test") void recycleNonExistentFileTest() throws IOException { - final RecycleBin recycleBin = new RecycleBin(configuration, new NodeId(0)); + final RecycleBin recycleBin = RecycleBin.create(configuration, new NodeId(0)); final Path path = testDirectory.resolve("file.txt"); recycleBin.recycle(path); @@ -186,7 +186,7 @@ void recycleNonExistentFileTest() throws IOException { @Test @DisplayName("Recycle Duplicate File Test") void recycleDuplicateFileTest() throws IOException { - final RecycleBin recycleBin = new RecycleBin(configuration, new NodeId(0)); + final RecycleBin recycleBin = RecycleBin.create(configuration, new NodeId(0)); final Path path = testDirectory.resolve("file.txt"); final Path recycledPath = diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/AsyncPreconsensusEventWriterTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/AsyncPreconsensusEventWriterTests.java index 7bfd0e732f40..846e3ef76448 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/AsyncPreconsensusEventWriterTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/AsyncPreconsensusEventWriterTests.java @@ -42,6 +42,7 @@ import com.swirlds.common.test.RandomUtils; import com.swirlds.common.test.TransactionGenerator; import com.swirlds.common.test.fixtures.FakeTime; +import com.swirlds.common.test.fixtures.TestRecycleBin; import com.swirlds.common.test.io.FileManipulation; import com.swirlds.common.time.OSTime; import com.swirlds.config.api.Configuration; @@ -180,8 +181,8 @@ static void verifyStream( lastGeneration = Math.max(lastGeneration, event.getGeneration()); } - final PreconsensusEventFileManager reader = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager reader = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); // Verify that the events were written correctly final PreconsensusEventMultiFileIterator eventsIterator = reader.getEventIterator(0, fixDiscontinuities); @@ -275,8 +276,8 @@ void overflowTest(final boolean artificialPauses) throws IOException, Interrupte final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager fileManager = - new PreconsensusEventFileManager(buildContext(), OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( + buildContext(), OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new AsyncPreconsensusEventWriter( @@ -360,7 +361,7 @@ void advanceNonAncientGenerationTest(final AdvanceNonAncientGenerationParams par final FakeTime time = new FakeTime(Duration.ofMillis(1)); final PreconsensusEventFileManager fileManager = - new PreconsensusEventFileManager(platformContext, time, new NodeId(0)); + new PreconsensusEventFileManager(platformContext, time, TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new AsyncPreconsensusEventWriter( @@ -469,8 +470,8 @@ void restartSimulationTest(final boolean truncateLastFile) throws InterruptedExc final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager fileManager1 = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager fileManager1 = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer1 = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer1 = new AsyncPreconsensusEventWriter( @@ -517,8 +518,8 @@ void restartSimulationTest(final boolean truncateLastFile) throws InterruptedExc events1.remove(events1.size() - 1); } - final PreconsensusEventFileManager fileManager2 = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager fileManager2 = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer2 = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer2 = new AsyncPreconsensusEventWriter( platformContext, diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java index 4b4cd38b245f..f509bf398276 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java @@ -27,18 +27,20 @@ import com.swirlds.common.context.DefaultPlatformContext; import com.swirlds.common.context.PlatformContext; import com.swirlds.common.crypto.CryptographyHolder; +import com.swirlds.common.io.config.RecycleBinConfig; import com.swirlds.common.io.streams.SerializableDataOutputStream; import com.swirlds.common.io.utility.FileUtils; +import com.swirlds.common.io.utility.RecycleBin; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.metrics.noop.NoOpMetrics; import com.swirlds.common.system.NodeId; import com.swirlds.common.test.fixtures.FakeTime; +import com.swirlds.common.test.fixtures.TestRecycleBin; import com.swirlds.common.time.OSTime; import com.swirlds.common.utility.CompareTo; import com.swirlds.config.api.Configuration; import com.swirlds.platform.event.preconsensus.PreconsensusEventFile; import com.swirlds.platform.event.preconsensus.PreconsensusEventFileManager; -import com.swirlds.platform.event.preconsensus.PreconsensusEventStreamConfig; import com.swirlds.test.framework.config.TestConfigBuilder; import java.io.FileOutputStream; import java.io.IOException; @@ -132,7 +134,8 @@ void minimumDecreasesTest() throws IOException { assertThrows( IllegalStateException.class, - () -> new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0))); + () -> new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0))); } @Test @@ -149,7 +152,8 @@ void maximumDecreasesTest() throws IOException { assertThrows( IllegalStateException.class, - () -> new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0))); + () -> new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0))); } @Test @@ -166,7 +170,8 @@ void timestampDecreasesTest() throws IOException { assertThrows( IllegalStateException.class, - () -> new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0))); + () -> new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0))); } @Test @@ -207,8 +212,8 @@ void readFilesInOrderTest() throws IOException { final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); assertIteratorEquality( files.iterator(), manager.getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false)); @@ -268,8 +273,8 @@ void readFilesInOrderGapTest(final boolean permitGaps) throws IOException { if (permitGaps) { // Gaps are allowed. We should see all files except for the one that was skipped. - final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); assertIteratorEquality( files.iterator(), @@ -278,7 +283,8 @@ void readFilesInOrderGapTest(final boolean permitGaps) throws IOException { // Gaps are not allowed. assertThrows( IllegalStateException.class, - () -> new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0))); + () -> new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0))); } } @@ -319,8 +325,8 @@ void readFilesFromMiddleTest() throws IOException { final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); // For this test, we want to iterate over files so that we are guaranteed to observe every event // with a generation greater than or equal to the target generation. Choose a generation that falls @@ -398,8 +404,8 @@ void readFilesFromMiddleRepeatingGenerationsTest() throws IOException { } final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); // For this test, we want to iterate over files so that we are guaranteed to observe every event // with a generation greater than or equal to the target generation. Choose a generation that falls @@ -470,8 +476,8 @@ void readFilesFromHighGenerationTest() throws IOException { final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); // Request a generation higher than all files in the data store final long targetGeneration = files.get(fileCount - 1).getMaximumGeneration() + 1; @@ -485,8 +491,8 @@ void readFilesFromHighGenerationTest() throws IOException { void readFilesFromEmptyStreamTest() throws IOException { final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); final Iterator iterator = manager.getFileIterator(1234, false); assertFalse(iterator.hasNext()); @@ -508,8 +514,8 @@ void generateDescriptorsWithManagerTest() throws IOException { long maximumGeneration = random.nextLong(minimumGeneration, minimumGeneration + maxDelta); Instant timestamp = Instant.now(); - final PreconsensusEventFileManager generatingManager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager generatingManager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); for (int i = 0; i < fileCount; i++) { final PreconsensusEventFile file = @@ -527,8 +533,8 @@ void generateDescriptorsWithManagerTest() throws IOException { createDummyFile(file); } - final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); assertIteratorEquality( files.iterator(), manager.getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false)); @@ -583,7 +589,7 @@ void incrementalPruningByGenerationTest() throws IOException { final FakeTime time = new FakeTime(lastFile.getTimestamp().plus(Duration.ofHours(1)), Duration.ZERO); final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, time, new NodeId(0)); + new PreconsensusEventFileManager(platformContext, time, TestRecycleBin.getInstance(), new NodeId(0)); assertIteratorEquality( files.iterator(), manager.getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false)); @@ -599,7 +605,8 @@ void incrementalPruningByGenerationTest() throws IOException { // Parse files with a new manager to make sure we aren't "cheating" by just // removing the in-memory descriptor without also removing the file on disk final List parsedFiles = new ArrayList<>(); - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)) + new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)) .getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false) .forEachRemaining(parsedFiles::add); @@ -639,7 +646,8 @@ void incrementalPruningByGenerationTest() throws IOException { // Parse files with a new manager to make sure we aren't "cheating" by just // removing the in-memory descriptor without also removing the file on disk final List parsedFiles = new ArrayList<>(); - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)) + new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)) .getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false) .forEachRemaining(parsedFiles::add); @@ -705,7 +713,7 @@ void incrementalPruningByTimestampTest() throws IOException { final FakeTime time = new FakeTime(firstFile.getTimestamp().plus(Duration.ofMinutes(59)), Duration.ZERO); final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, time, new NodeId(0)); + new PreconsensusEventFileManager(platformContext, time, TestRecycleBin.getInstance(), new NodeId(0)); assertIteratorEquality( files.iterator(), manager.getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false)); @@ -720,7 +728,8 @@ void incrementalPruningByTimestampTest() throws IOException { // Parse files with a new manager to make sure we aren't "cheating" by just // removing the in-memory descriptor without also removing the file on disk final List parsedFiles = new ArrayList<>(); - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)) + new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)) .getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false) .forEachRemaining(parsedFiles::add); @@ -765,7 +774,8 @@ void incrementalPruningByTimestampTest() throws IOException { // Parse files with a new manager to make sure we aren't "cheating" by just // removing the in-memory descriptor without also removing the file on disk final List parsedFiles = new ArrayList<>(); - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)) + new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)) .getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false) .forEachRemaining(parsedFiles::add); @@ -793,9 +803,10 @@ private void validateRecycledFiles( throws IOException { final StateConfig stateConfig = platformContext.getConfiguration().getConfigData(StateConfig.class); - final PreconsensusEventStreamConfig streamConfig = - platformContext.getConfiguration().getConfigData(PreconsensusEventStreamConfig.class); - final Path recycleBinDirectory = stateConfig.savedStateDirectory().resolve(streamConfig.recycleBinDirectory()); + final RecycleBinConfig recycleBinConfig = + platformContext.getConfiguration().getConfigData(RecycleBinConfig.class); + + final Path recycleBinDirectory = recycleBinConfig.getRecycleBinPath(stateConfig, new NodeId(0)); final Set recycledFiles = new HashSet<>(); try (final Stream stream = Files.walk(recycleBinDirectory)) { @@ -861,9 +872,10 @@ void startAtFirstFileDiscontinuityInMiddleTest(final boolean startAtSpecificGene } final PlatformContext platformContext = buildContext(); + final RecycleBin recycleBin = RecycleBin.create(platformContext.getConfiguration(), new NodeId(0)); final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), recycleBin, new NodeId(0)); // Don't try to fix discontinuities, we should see all files assertIteratorEquality( @@ -939,8 +951,10 @@ void startAtFirstFileDiscontinuityInFirstFileTest(final boolean startAtSpecificG final PlatformContext platformContext = buildContext(); + final RecycleBin recycleBin = RecycleBin.create(platformContext.getConfiguration(), new NodeId(0)); + final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), recycleBin, new NodeId(0)); // Don't try to fix discontinuities, we should see all files assertIteratorEquality( @@ -1032,8 +1046,10 @@ void startAtMiddleFileDiscontinuityInMiddleTest() throws IOException { final PlatformContext platformContext = buildContext(); + final RecycleBin recycleBin = RecycleBin.create(platformContext.getConfiguration(), new NodeId(0)); + final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), recycleBin, new NodeId(0)); // Don't try to fix discontinuities, we should see all files starting with the one we request assertIteratorEquality(files.iterator(), manager.getFileIterator(startGeneration, false)); @@ -1105,8 +1121,10 @@ void startAtDiscontinuityInMiddleTest() throws IOException { final PlatformContext platformContext = buildContext(); + final RecycleBin recycleBin = RecycleBin.create(platformContext.getConfiguration(), new NodeId(0)); + final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), recycleBin, new NodeId(0)); // Don't try to fix discontinuities, we should see all files starting with the one we request assertIteratorEquality(postDiscontinuityFiles.iterator(), manager.getFileIterator(startGeneration, false)); @@ -1173,8 +1191,8 @@ void startAfterDiscontinuityInMiddleTest() throws IOException { final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); // Iterate without fixing discontinuities. assertIteratorEquality(files.iterator(), manager.getFileIterator(startGeneration, false)); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileTests.java index 199895fabe08..a4f485c468f1 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileTests.java @@ -29,8 +29,12 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import com.swirlds.common.io.utility.FileUtils; +import com.swirlds.common.io.utility.RecycleBin; +import com.swirlds.common.system.NodeId; import com.swirlds.common.test.RandomUtils; +import com.swirlds.config.api.Configuration; import com.swirlds.platform.event.preconsensus.PreconsensusEventFile; +import com.swirlds.test.framework.config.TestConfigBuilder; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -282,7 +286,16 @@ void recycleTest() throws IOException { final Instant now = Instant.now(); final Path streamDirectory = testDirectory.resolve("data"); + final NodeId selfId = new NodeId(0); final Path recycleDirectory = testDirectory.resolve("recycle"); + final Path actualRecycleDirectory = recycleDirectory.resolve(selfId.toString()); + + final Configuration configuration = new TestConfigBuilder() + .withValue("recycleBin.recycleBinPath", recycleDirectory.toString()) + .getOrCreateConfig(); + + final RecycleBin recycleBin = RecycleBin.create(configuration, selfId); + Files.createDirectories(streamDirectory); Files.createDirectories(recycleDirectory); @@ -325,11 +338,11 @@ void recycleTest() throws IOException { } } - file.deleteFile(streamDirectory, recycleDirectory); + file.deleteFile(streamDirectory, recycleBin); if (random.nextBoolean()) { // Deleting twice shouldn't have any ill effects - file.deleteFile(streamDirectory, recycleDirectory); + file.deleteFile(streamDirectory, recycleBin); } deletedFiles.add(file); @@ -340,7 +353,8 @@ void recycleTest() throws IOException { // All files should have been moved to the recycle directory for (final PreconsensusEventFile file : files) { - assertTrue(Files.exists(recycleDirectory.resolve(file.getPath().getFileName()))); + assertTrue( + Files.exists(actualRecycleDirectory.resolve(file.getPath().getFileName()))); } } diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/SyncPreconsensusEventWriterTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/SyncPreconsensusEventWriterTests.java index 50082c5b2d42..a6165507823c 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/SyncPreconsensusEventWriterTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/SyncPreconsensusEventWriterTests.java @@ -33,6 +33,7 @@ import com.swirlds.common.metrics.noop.NoOpMetrics; import com.swirlds.common.system.NodeId; import com.swirlds.common.test.RandomUtils; +import com.swirlds.common.test.fixtures.TestRecycleBin; import com.swirlds.common.time.OSTime; import com.swirlds.config.api.Configuration; import com.swirlds.platform.event.preconsensus.PreconsensusEventFile; @@ -117,8 +118,8 @@ void standardOperationTest() throws IOException, InterruptedException { final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager fileManager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new SyncPreconsensusEventWriter(platformContext, fileManager); @@ -173,8 +174,8 @@ void stopFlushesEventsTest() throws IOException, InterruptedException { final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager fileManager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new SyncPreconsensusEventWriter(platformContext, fileManager); @@ -229,8 +230,8 @@ void ancientEventTest() throws IOException, InterruptedException { final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager fileManager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new SyncPreconsensusEventWriter(platformContext, fileManager); @@ -302,8 +303,8 @@ void overflowTest() throws IOException, InterruptedException { final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager fileManager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new SyncPreconsensusEventWriter(platformContext, fileManager); @@ -345,8 +346,8 @@ void beginStreamingEventsTest() throws IOException, InterruptedException { final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager fileManager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new SyncPreconsensusEventWriter(platformContext, fileManager); @@ -403,8 +404,8 @@ void discontinuityTest() throws IOException, InterruptedException { final PlatformContext platformContext = buildContext(); - final PreconsensusEventFileManager fileManager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), new NodeId(0)); + final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( + platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new SyncPreconsensusEventWriter(platformContext, fileManager); From faa56ee74e02bb1eaefd04978791b4ba0fe14118 Mon Sep 17 00:00:00 2001 From: JeffreyDallas <39912573+JeffreyDallas@users.noreply.github.com> Date: Tue, 13 Jun 2023 10:11:41 -0500 Subject: [PATCH 18/70] Set swirlds app thread as users thread to avoid JVM exit (#7050) Signed-off-by: Jeffrey Tang --- .../main/java/com/swirlds/platform/Browser.java | 4 ++++ .../swirlds/platform/ThreadDumpGenerator.java | 17 +++++++++++++---- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java index 15bb7cee2acd..c26d60f92b48 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java @@ -558,6 +558,7 @@ private void startThreadDumpGenerator() { if (!Files.exists(dir)) { rethrowIO(() -> Files.createDirectories(dir)); } + logger.info(STARTUP.getMarker(), "Starting thread dump generator and save to directory {}", dir); ThreadDumpGenerator.generateThreadDumpAtIntervals( dir, Settings.getInstance().getThreadDumpPeriodMs()); } @@ -703,6 +704,9 @@ private Collection createLocalPlatforms( .setThreadName("appMain") .setRunnable(appMain) .build(); + // IMPORTATNT: this swirlds app thread must be non-daemon, + // so that the JVM will not exit when the main thread exits + appThread.setDaemon(false); appRunThreads[ownHostIndex] = appThread; ownHostIndex++; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ThreadDumpGenerator.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ThreadDumpGenerator.java index 6a99c30bc662..c5929dc9a205 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ThreadDumpGenerator.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ThreadDumpGenerator.java @@ -77,7 +77,9 @@ public static synchronized void generateThreadDumpAtIntervals(Path dir, long mil } } }; - new Thread(generator).start(); + final Thread t = new Thread(generator); + t.setDaemon(true); + t.start(); } /** @@ -106,7 +108,7 @@ public static void generateThreadDumpFile(final Path dir, final String heading) if (threadInfo == null) { continue; } - writeThreadTitle(writer, threadInfo.getThreadName(), threadInfo.getThreadId()); + writeThreadTitle(writer, threadInfo.getThreadName(), threadInfo.getThreadId(), threadInfo.isDaemon()); final Thread.State state = threadInfo.getThreadState(); writer.append("\n java.lang.Thread.State: "); writer.append(state.toString()); @@ -125,7 +127,7 @@ public static void generateThreadDumpFile(final Path dir, final String heading) writeLock(writer, lockInfo); if (lockOwnerName != null) { writer.append("\n owned by "); - writeThreadTitle(writer, lockOwnerName, threadInfo.getLockOwnerId()); + writeThreadTitle(writer, lockOwnerName, threadInfo.getLockOwnerId(), threadInfo.isDaemon()); } } if (monitorInfos != null) { @@ -360,12 +362,19 @@ private static String lockGraph( /** * Used to write a textual identification of a thread to a writer */ - private static void writeThreadTitle(Writer writer, String threadName, Long threadId) throws IOException { + private static void writeThreadTitle( + final Writer writer, final String threadName, final Long threadId, final boolean isDaemon) + throws IOException { writer.append('"'); writer.append(threadName); writer.append("\" | "); writer.append("threadId = "); writer.append(Long.toString(threadId)); + if (isDaemon) { + writer.append(" | daemon"); + } else { + writer.append(" | user"); + } } /** From 5dfcd0a38cee1dfea06a292f367e7407ddbd3c27 Mon Sep 17 00:00:00 2001 From: Joseph Sinclair <121976561+jsync-swirlds@users.noreply.github.com> Date: Tue, 13 Jun 2023 09:21:54 -0700 Subject: [PATCH 19/70] Finish small refinements for Helidon version of GRPC (#6984) (#7072) Signed-off-by: Joseph Sinclair --- .../service/mono/grpc/GrpcServerManager.java | 6 +- .../mono/grpc/HelidonGrpcServerManager.java | 107 +++++--- .../grpc/HelidonGrpcServerManagerTest.java | 242 ++++++++++++++++++ 3 files changed, 324 insertions(+), 31 deletions(-) create mode 100644 hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/grpc/HelidonGrpcServerManagerTest.java diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/grpc/GrpcServerManager.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/grpc/GrpcServerManager.java index 8ab11848e1ee..9bf5a156f168 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/grpc/GrpcServerManager.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/grpc/GrpcServerManager.java @@ -16,9 +16,13 @@ package com.hedera.node.app.service.mono.grpc; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.helidon.grpc.server.GrpcServer; +import java.util.List; import java.util.function.Consumer; /** Defines a type able to configure and start the gRPC servers. */ public interface GrpcServerManager { - void start(int port, int tlsPort, Consumer println); + @NonNull + List start(int port, int tlsPort, Consumer println); } diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/grpc/HelidonGrpcServerManager.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/grpc/HelidonGrpcServerManager.java index 8045ad92ecf5..b4404a8c4745 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/grpc/HelidonGrpcServerManager.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/grpc/HelidonGrpcServerManager.java @@ -16,11 +16,9 @@ package com.hedera.node.app.service.mono.grpc; -import static com.hedera.node.app.service.mono.utils.SleepingPause.SLEEPING_PAUSE; -import static java.util.Objects.requireNonNull; - import com.hedera.node.app.service.mono.context.properties.NodeLocalProperties; import com.hedera.node.app.service.mono.utils.Pause; +import com.hedera.node.app.service.mono.utils.SleepingPause; import edu.umd.cs.findbugs.annotations.NonNull; import io.grpc.BindableService; import io.helidon.common.configurable.Resource; @@ -31,12 +29,17 @@ import io.helidon.grpc.server.GrpcRouting; import io.helidon.grpc.server.GrpcServer; import io.helidon.grpc.server.GrpcServerConfiguration; +import io.helidon.grpc.server.GrpcServerConfiguration.Builder; import java.nio.file.Path; +import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.function.Consumer; import javax.inject.Inject; import javax.inject.Singleton; @@ -50,44 +53,74 @@ public class HelidonGrpcServerManager implements GrpcServerManager { private final Set bindableServices; private final Consumer shutdownHook; + private final GrpcServerSource serverSource; private GrpcServer server; private GrpcServer tlsServer; private final NodeLocalProperties nodeProperties; + /** + * Public constructor used by the Dagger injection framework. + * @param bindableServices a Set of BindableService instances to be bound to the GRPC server on startup. + * @param shutdownHook a Thread consumer that will add a shutdown hook to the JVM. Typically a lambda + * for {@link Runtime#addShutdownHook(Thread)}. + * @param nodeProperties The local node properties that control server behavior. These mostly set the port + * and TLS key/certificate locations. + */ @Inject public HelidonGrpcServerManager( @NonNull final Set bindableServices, @NonNull final Consumer shutdownHook, @NonNull final NodeLocalProperties nodeProperties) { - this.bindableServices = requireNonNull(bindableServices); - this.shutdownHook = requireNonNull(shutdownHook); - this.nodeProperties = requireNonNull(nodeProperties); + this(bindableServices, shutdownHook, nodeProperties, new GrpcServerSource()); + } + + /** + * Testing use only, to enable fault injection in the GRPC server instance. + * @param bindableServices a Set of BindableService instances to be bound to the GRPC server on startup. + * @param shutdownHook a Thread consumer that will add a shutdown hook to the JVM. Typically a lambda + * for {@link Runtime#addShutdownHook(Thread)}. + * @param nodeProperties The local node properties that control server behavior. These mostly set the port + * and TLS key/certificate locations. + * @param serverSource an instance of the inner class GrpcServerSource. This is used to allow injecting a mock + * GRPC server and use that for fault injection (e.g. to fail start so retry behavior is tested). + */ + HelidonGrpcServerManager( + @NonNull final Set bindableServices, + @NonNull final Consumer shutdownHook, + @NonNull final NodeLocalProperties nodeProperties, + @NonNull final GrpcServerSource serverSource) { + this.bindableServices = Objects.requireNonNull(bindableServices); + this.shutdownHook = Objects.requireNonNull(shutdownHook); + this.nodeProperties = Objects.requireNonNull(nodeProperties); + this.serverSource = serverSource; } @Override - public void start(int port, int tlsPort, @NonNull Consumer println) { + @NonNull + public List start(int port, int tlsPort, @NonNull Consumer println) { // Add a shutdown hook to the JVM, such that the grpc server is shutdown when the JVM is shutdown - this.shutdownHook.accept(new Thread(() -> { + shutdownHook.accept(new Thread(() -> { terminateOneServer(server, false, port, println); terminateOneServer(tlsServer, true, tlsPort, println); })); - try { - server = startOneServer(false, port, println, SLEEPING_PAUSE); - tlsServer = startOneServer(true, tlsPort, println, SLEEPING_PAUSE); + server = startOneServer(false, port, println, SleepingPause.SLEEPING_PAUSE); + tlsServer = startOneServer(true, tlsPort, println, SleepingPause.SLEEPING_PAUSE); } catch (ResourceException e) { tlsServer = null; String message = logMessage("Could not start", true, tlsPort, false); log.warn("{} ({}).", message, e.getMessage()); println.accept(message); + return server == null ? Collections.emptyList() : List.of(server); } + return List.of(server, tlsServer); } GrpcServer startOneServer(boolean sslEnabled, int port, Consumer println, Pause pause) { println.accept(logMessage("Starting", sslEnabled, port, true)); // Setup the GRPC Routing, such that all grpc services are registered - final var grpcRoutingBuilder = GrpcRouting.builder(); + final GrpcRouting.Builder grpcRoutingBuilder = GrpcRouting.builder(); bindableServices.forEach(grpcRoutingBuilder::register); // Create the GRPC Server @@ -95,7 +128,7 @@ GrpcServer startOneServer(boolean sslEnabled, int port, Consumer println final Config initialConfig = Config.builder(getMapSource(nodeProperties)).build(); - final var configBuilder = + final Builder configBuilder = GrpcServerConfiguration.builder().config(initialConfig).port(port); /* Note: We would like to set all of the following, but Helidon simply doesn't support it. keepAliveTime(nodeProperties.nettyProdKeepAliveTime(), TimeUnit.SECONDS) @@ -118,31 +151,29 @@ GrpcServer startOneServer(boolean sslEnabled, int port, Consumer println .tlsKey(Resource.create(Path.of(nodeProperties.nettyTlsKeyPath()))) .build()); } - - final var grpcServer = GrpcServer.create(configBuilder.build(), grpcRoutingBuilder); + final GrpcServer grpcServer = serverSource.getServer(configBuilder, grpcRoutingBuilder); // Start the grpc server. Note that we have to do some retry logic because our default port is // 50211, 50212, which are both in the ephemeral port range, and may very well be in use right // now. Of course this doesn't fix that, but it does give us a chance. What we really should do, // is stop using ports above 10K. - final var startRetries = nodeProperties.nettyStartRetries(); - final var startRetryIntervalMs = nodeProperties.nettyStartRetryIntervalMs(); - var retryNo = 1; - final var n = Math.max(0, startRetries); - for (; retryNo <= n; retryNo++) { + final int startRetries = nodeProperties.nettyStartRetries(); + final long startRetryIntervalMs = nodeProperties.nettyStartRetryIntervalMs(); + final int maxRetries = Math.max(0, startRetries); + int retryNo; + for (retryNo = 1; retryNo <= maxRetries; retryNo++) { try { grpcServer.start(); break; - } catch (Exception e) { - final var summaryMsg = logMessage("Still trying to start", sslEnabled, port, true); + } catch (RuntimeException e) { + final String summaryMsg = logMessage("Still trying to start", sslEnabled, port, true); log.warn("(Attempts={}) {}", retryNo, summaryMsg, e); - pause.forMs(startRetryIntervalMs); + if (!pause.forMs(startRetryIntervalMs)) break; } } - if (retryNo == n + 1) { - grpcServer.start(); + if (retryNo > maxRetries) { + throw new RuntimeException("Unable to start server after %d retries. Giving up.".formatted(retryNo)); } - println.accept(logMessage("...done starting", sslEnabled, port, false)); return grpcServer; @@ -160,15 +191,17 @@ private void terminateOneServer(GrpcServer server, boolean tlsSupport, int port, if (server == null) { return; } - try { println.accept(logMessage("Terminating", tlsSupport, port, true)); server.shutdown().toCompletableFuture().get(TIME_TO_AWAIT_TERMINATION, TimeUnit.SECONDS); println.accept(logMessage("...done terminating", tlsSupport, port, false)); - } catch (InterruptedException ie) { + } catch (InterruptedException e) { Thread.currentThread().interrupt(); - log.warn("Interrupted while waiting for Helidon gRPC to terminate on port {}!", port, ie); - } catch (Exception e) { + log.warn("Interrupted while waiting for Helidon gRPC to terminate on port {}!", port, e); + } catch (TimeoutException e) { + final String message = "Timed out after {} seconds while waiting for Helidon gRPC to terminate on port {}!"; + log.warn(message, Long.toString(TIME_TO_AWAIT_TERMINATION), Integer.toString(port), e); + } catch (RuntimeException | ExecutionException e) { log.warn("Exception while waiting for Helidon gRPC to terminate on port {}!", port, e); } } @@ -178,4 +211,18 @@ private String logMessage(String action, boolean tlsSupport, int port, boolean i "%s Helidon gRPC%s on port %d%s", action, tlsSupport ? " with TLS support" : "", port, isOpening ? "..." : "."); } + + /** + * Basic inner class to get a server. This is necessary to enable testing to inject faults + * in the GRPC server, which is otherwise unavailable. There should not be any need to ever + * modify this class. + * Note, yes, this is ugly, but it's the only way to get decent testing for failure scenarios. + */ + static class GrpcServerSource { + @NonNull + public GrpcServer getServer( + @NonNull final Builder configBuilder, @NonNull final GrpcRouting.Builder grpcRoutingBuilder) { + return GrpcServer.create(configBuilder.build(), grpcRoutingBuilder); + } + } } diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/grpc/HelidonGrpcServerManagerTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/grpc/HelidonGrpcServerManagerTest.java new file mode 100644 index 000000000000..decff274b2bc --- /dev/null +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/grpc/HelidonGrpcServerManagerTest.java @@ -0,0 +1,242 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.mono.grpc; + +import com.hedera.node.app.service.mono.context.properties.NodeLocalProperties; +import com.hedera.node.app.service.mono.grpc.HelidonGrpcServerManager.GrpcServerSource; +import com.hedera.node.app.service.mono.utils.Pause; +import io.grpc.BindableService; +import io.grpc.ServerServiceDefinition; +import io.helidon.common.configurable.ResourceException; +import io.helidon.grpc.server.GrpcServer; +import java.nio.file.Path; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.function.Consumer; +import org.assertj.core.api.BDDAssertions; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.ArgumentMatchers; +import org.mockito.BDDMockito; +import org.mockito.Mock; +import org.mockito.Mock.Strictness; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class HelidonGrpcServerManagerTest { + private int startRetries = 3; + private long startRetryIntervalMs = 10L; + private int port = 8080; + private int tlsPort = port + 1; + private Set bindableServices; + + @Mock(strictness = Strictness.LENIENT) + private Pause mockPause; + + @Mock(strictness = Strictness.LENIENT) + private Consumer hookAdder; + + @Mock(strictness = Strictness.LENIENT) + private Consumer println; + + @Mock(strictness = Strictness.LENIENT) + private NodeLocalProperties nodeProperties; + + @Mock(strictness = Strictness.LENIENT) + private BindableService mockServiceOne; + + @Mock(strictness = Strictness.LENIENT) + private BindableService mockServiceTwo; + + @Mock(strictness = Strictness.LENIENT) + private BindableService mockServiceThree; + + @Mock(strictness = Strictness.LENIENT) + private GrpcServerSource mockServerSource; + + @Mock(strictness = Strictness.LENIENT) + private GrpcServer mockServer; + + @Mock(strictness = Strictness.LENIENT) + private GrpcServer mockTlsServer; + + private HelidonGrpcServerManager subject; + + @SuppressWarnings("AutoBoxing") + @BeforeEach + void setup() throws Exception { + Path fakeCertPath = + Path.of(ClassLoader.getSystemResource("test-hedera.crt").toURI()); + Path fakeKeyPath = + Path.of(ClassLoader.getSystemResource("test-hedera.key").toURI()); + BDDMockito.given(mockServiceOne.bindService()) + .willReturn(ServerServiceDefinition.builder("MachOne").build()); + BDDMockito.given(mockServiceTwo.bindService()) + .willReturn(ServerServiceDefinition.builder("MachTwo").build()); + BDDMockito.given(mockServiceThree.bindService()) + .willReturn(ServerServiceDefinition.builder("MachThree").build()); + bindableServices = Set.of(mockServiceOne, mockServiceTwo, mockServiceThree); + + BDDMockito.given(mockPause.forMs(ArgumentMatchers.anyLong())).willReturn(true); + + BDDMockito.given(nodeProperties.nettyTlsCrtPath()).willReturn(fakeCertPath.toString()); + BDDMockito.given(nodeProperties.nettyTlsKeyPath()).willReturn(fakeKeyPath.toString()); + BDDMockito.given(nodeProperties.nettyStartRetries()).willReturn(startRetries); + BDDMockito.given(nodeProperties.nettyStartRetryIntervalMs()).willReturn(startRetryIntervalMs); + + subject = new HelidonGrpcServerManager(bindableServices, hookAdder, nodeProperties, mockServerSource); + } + + @Test + void retriesStartingTilSuccess() throws Exception { + final String ExpectedExceptionMessage = "Expected : retriesStartingTilSuccess"; + // setup: + RuntimeException exceptionToThrow = new RuntimeException(ExpectedExceptionMessage); + BDDMockito.given(mockTlsServer.start()) + .willThrow(exceptionToThrow, exceptionToThrow) + .willReturn(null); + BDDMockito.given(mockServerSource.getServer(ArgumentMatchers.any(), ArgumentMatchers.any())) + .willReturn(mockTlsServer); + // when: + GrpcServer server = subject.startOneServer(true, port, ignore -> {}, mockPause); + // then: + BDDAssertions.assertThat(server).isNotNull(); + Mockito.verify(mockPause, Mockito.times(2)).forMs(ArgumentMatchers.anyLong()); + Mockito.verify(server, Mockito.times(3)).start(); + } + + @Test + void givesUpIfMaxRetriesExhaustedAndPropagatesException() throws Exception { + final String ExpectedExceptionMessage = "Expected : givesUpIfMaxRetriesExhaustedAndPropagatesException"; + BDDMockito.given(mockServer.start()).willThrow(new RuntimeException(ExpectedExceptionMessage)); + BDDMockito.given(mockServerSource.getServer(ArgumentMatchers.any(), ArgumentMatchers.any())) + .willReturn(mockServer); + + Assertions.assertThrows( + RuntimeException.class, () -> subject.startOneServer(false, port, ignore -> {}, mockPause)); + Mockito.verify(mockPause, Mockito.times(startRetries)).forMs(startRetryIntervalMs); + } + + @Test + void neverRetriesIfZeroRetriesSet() throws Exception { + final String ExpectedExceptionMessage = "Expected : neverRetriesIfZeroRetriesSet"; + // setup: + BDDMockito.given(nodeProperties.nettyStartRetries()).willReturn(0); + BDDMockito.given(mockServer.start()).willThrow(new RuntimeException(ExpectedExceptionMessage)); + BDDMockito.given(mockServerSource.getServer(ArgumentMatchers.any(), ArgumentMatchers.any())) + .willReturn(mockServer); + // expect: + Assertions.assertThrows( + RuntimeException.class, () -> subject.startOneServer(false, port, ignore -> {}, mockPause)); + // then: + Mockito.verify(mockPause, Mockito.never()).forMs(startRetryIntervalMs); + } + + @Test + void buildsAndAddsHookNonTlsOnNonExistingCert() throws Exception { + final String ExpectedExceptionMessage = "Expected : buildsAndAddsHookNonTlsOnNonExistingCert"; + ArgumentCaptor captor = ArgumentCaptor.forClass(Thread.class); + BDDMockito.willDoNothing().given(hookAdder).accept(captor.capture()); + BDDMockito.given(nodeProperties.nettyTlsCrtPath()).willThrow(new ResourceException(ExpectedExceptionMessage)); + BDDMockito.given(mockServerSource.getServer(ArgumentMatchers.any(), ArgumentMatchers.any())) + .willCallRealMethod(); + // when: + List servers = subject.start(port, tlsPort, println); + // and: + BDDAssertions.assertThat(servers).isNotNull().hasSize(1); // no tlsServer for this test + BDDAssertions.assertThat(servers.get(0)).isNotNull(); + // and: + Assertions.assertDoesNotThrow(() -> captor.getValue().run()); + } + + @Test + void buildsAndAddsHookNonTlsOnNonExistingKey() throws Exception { + final String ExpectedExceptionMessage = "Expected : buildsAndAddsHookNonTlsOnNonExistingKey"; + ArgumentCaptor captor = ArgumentCaptor.forClass(Thread.class); + BDDMockito.willDoNothing().given(hookAdder).accept(captor.capture()); + BDDMockito.given(nodeProperties.nettyTlsKeyPath()).willThrow(new ResourceException(ExpectedExceptionMessage)); + BDDMockito.given(mockServerSource.getServer(ArgumentMatchers.any(), ArgumentMatchers.any())) + .willCallRealMethod(); + // when: + List servers = subject.start(port, tlsPort, println); + // and: + BDDAssertions.assertThat(servers).isNotNull().hasSize(1); // no tlsServer for this test + BDDAssertions.assertThat(servers.get(0)).isNotNull(); + // and: + Assertions.assertDoesNotThrow(() -> captor.getValue().run()); + } + + @SuppressWarnings("CallToThreadRun") + @Test + void buildsAndAddsHookAsExpected() throws Exception { + // setup: + ArgumentCaptor captor = ArgumentCaptor.forClass(Thread.class); + BDDMockito.willDoNothing().given(hookAdder).accept(captor.capture()); + BDDMockito.given(mockServerSource.getServer(ArgumentMatchers.any(), ArgumentMatchers.any())) + .willCallRealMethod(); + // when: + List servers = subject.start(port, tlsPort, println); + // and: + BDDAssertions.assertThat(servers).isNotNull().hasSize(2); + BDDAssertions.assertThat(servers.get(0)).isNotNull(); + BDDAssertions.assertThat(servers.get(1)).isNotNull(); + // and: + Assertions.assertDoesNotThrow(() -> captor.getValue().run()); + } + + @Test + void throwsRteOnProblem() { + BDDMockito.willThrow(RuntimeException.class).given(hookAdder).accept(ArgumentMatchers.any()); + BDDMockito.given(mockServerSource.getServer(ArgumentMatchers.any(), ArgumentMatchers.any())) + .willCallRealMethod(); + // expect: + Assertions.assertThrows(RuntimeException.class, () -> subject.start(port, tlsPort, println)); + } + + @SuppressWarnings({"CallToThreadRun", "unchecked"}) + @Test + void catchesInterruptedException() throws Exception { + // setup: + // a Mock throwing InterruptedException is hard to setup + CompletableFuture mockFuture = Mockito.mock(CompletableFuture.class); + BDDMockito.given(mockFuture.toCompletableFuture()).willReturn(mockFuture); + BDDMockito.given(mockFuture.get()).willThrow(new InterruptedException("Test Interrupt")); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Thread.class); + BDDMockito.willDoNothing().given(hookAdder).accept(captor.capture()); + BDDMockito.given(mockServer.shutdown()).willReturn(mockFuture); + BDDMockito.given(mockServer.start()).willReturn(mockFuture); + BDDMockito.given(mockServerSource.getServer(ArgumentMatchers.any(), ArgumentMatchers.any())) + .willReturn(mockServer); + // Cannot have two servers (nigh impossible to set name correctly, and GRPC requires unique names). + BDDMockito.given(nodeProperties.nettyTlsCrtPath()).willThrow(new ResourceException("No TLS for this Test")); + // when: + List servers = subject.start(port, tlsPort, println); + BDDAssertions.assertThat(servers).isNotNull().hasSize(1); + // then: + BDDAssertions.assertThat(servers.get(0)).isEqualTo(mockServer); + Assertions.assertThrows( + InterruptedException.class, + () -> servers.get(0).shutdown().toCompletableFuture().get()); + Assertions.assertDoesNotThrow(() -> captor.getValue().run()); + } +} From ffdbe7f15f34a06608b02f15dcb3be9cae741031 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20Brandst=C3=A4tter?= Date: Tue, 13 Jun 2023 20:26:42 +0200 Subject: [PATCH 20/70] Remove False warnings from Settings engine for valid parameters (#7075) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Timo Brandstätter --- .../swirlds/platform/SettingConstants.java | 94 +++++++++++++++++++ .../java/com/swirlds/platform/Settings.java | 44 +++++---- .../com/swirlds/platform/SettingsTest.java | 14 +++ 3 files changed, 132 insertions(+), 20 deletions(-) diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java index 2aa2bff246f9..a4f59c8f2e12 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java @@ -16,6 +16,8 @@ package com.swirlds.platform; +import java.util.Set; + /** * @deprecated will be replaced by the {@link com.swirlds.config.api.Configuration} API in near future. If you need * to use this class please try to do as less static access as possible. @@ -101,5 +103,97 @@ public final class SettingConstants { static final String PLAYBACK_END_TIME_STAMP_DEFAULT_VALUE = ""; static final boolean GOSSIP_WITH_DIFFERENT_VERSIONS_DEFAULT_VALUE = false; + static final Set REMOVED_SETTINGS = Set.of( + "reconnectg.active", + "reconnectg.reconnectWindowSeconds", + "reconnectg.fallenBehindThreshold", + "reconnectg.asyncStreamTimeoutMilliseconds", + "reconnectg.asyncOutputStreamFlushMilliseconds", + "reconnectg.asyncStreamBufferSize", + "reconnectg.asyncStreams", + "reconnectg.maxAckDelayMilliseconds", + "reconnectg.maximumReconnectFailuresBeforeShutdown", + "reconnectg.minimumTimeBetweenReconnects", + "chatter.useChatter", + "chatter.attemptedChatterEventPerSecond", + "chatter.chatteringCreationThreshold", + "chatter.chatterIntakeThrottle", + "chatter.otherEventDelay", + "chatter.selfEventQueueCapacity", + "chatter.otherEventQueueCapacity", + "chatter.descriptorQueueCapacity", + "chatter.processingTimeInterval", + "chatter.heartbeatInterval", + "chatter.futureGenerationLimit", + "chatter.criticalQuorumSoftening", + "chatter.sleepAfterFailedNegotiation", + "fcHashMap.maximumGCQueueSize", + "fcHashMap.gCQueueThresholdPeriod", + "fcHashMap.archiveEnabled", + "fcHashMap.rebuildSplitFactor", + "fcHashMap.rebuildThreadCount", + "jasperDb.maxNumOfKeys", + "jasperDb.hashesRamToDiskThreshold", + "jasperDb.mediumMergeCutoffMb", + "jasperDb.smallMergeCutoffMb", + "jasperDb.mergePeriodUnit", + "jasperDb.maxNumberOfFilesInMerge", + "jasperDb.minNumberOfFilesInMerge", + "jasperDb.mergeActivatePeriod", + "jasperDb.mediumMergePeriod", + "jasperDb.fullMergePeriod", + "jasperDb.maxDataFileBytes", + "jasperDb.moveListChunkSize", + "jasperDb.maxRamUsedForMergingGb", + "jasperDb.iteratorInputBufferBytes", + "jasperDb.writerOutputBufferBytes", + "jasperDb.reconnectKeyLeakMitigationEnabled", + "jasperDb.keySetBloomFilterHashCount", + "jasperDb.keySetBloomFilterSizeInBytes", + "jasperDb.keySetHalfDiskHashMapSize", + "jasperDb.keySetHalfDiskHashMapBuffer", + "jasperDb.indexRebuildingEnforced", + "jasperDb.leafRecordCacheSize", + "virtualMap.percentHashThreads", + "virtualMap.numHashThreads", + "virtualMap.percentCleanerThreads", + "virtualMap.numCleanerThreads", + "virtualMap.maximumVirtualMapSize", + "virtualMap.virtualMapWarningThreshold", + "virtualMap.virtualMapWarningInterval", + "virtualMap.flushInterval", + "virtualMap.copyFlushThreshold", + "virtualMap.familyThrottleThreshold", + "virtualMap.preferredFlushQueueSize", + "virtualMap.flushThrottleStepSize", + "virtualMap.maximumFlushThrottlePeriod", + "state.savedStateDirectory", + "state.mainClassNameOverride", + "state.cleanSavedStateDirectory", + "state.stateSavingQueueSize", + "state.saveStatePeriod", + "state.saveReconnectStateToDisk", + "state.signedStateDisk", + "state.dumpStateOnAnyISS", + "state.dumpStateOnFatal", + "state.haltOnAnyIss", + "state.automatedSelfIssRecovery", + "state.haltOnCatastrophicIss", + "state.secondsBetweenISSDumps", + "state.secondsBetweenIssLogs", + "state.stateDeletionErrorLogFrequencySeconds", + "state.enableHashStreamLogging", + "state.debugHashDepth", + "state.maxAgeOfFutureStateSignatures", + "state.roundsToKeepForSigning", + "state.roundsToKeepAfterSigning", + "state.suspiciousSignedStateAge", + "state.stateHistoryEnabled", + "state.debugStackTracesEnabled", + "state.requireStateLoad", + "state.emergencyStateFileName", + "state.checkSignedStateFromDisk", + "signedStateFreq"); + private SettingConstants() {} } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java index 08d57094032b..d3464937aad0 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java @@ -61,6 +61,7 @@ import static com.swirlds.platform.SettingConstants.PROMETHEUS_ENDPOINT_MAX_BACKLOG_ALLOWED_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.PROMETHEUS_ENDPOINT_PORT_NUMBER_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.RANDOM_EVENT_PROBABILITY_DEFAULT_VALUE; +import static com.swirlds.platform.SettingConstants.REMOVED_SETTINGS; import static com.swirlds.platform.SettingConstants.RESCUE_CHILDLESS_INVERSE_PROBABILITY_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SAVED_STRING; import static com.swirlds.platform.SettingConstants.SETTINGS_TXT; @@ -546,30 +547,33 @@ private boolean handleSetting(final String[] pars) { name = split[0]; subName = split[1]; } - final String val = pars.length > 1 ? pars[1].trim() : ""; // the first parameter passed in, or "" if none - boolean good = false; // is name a valid name of a non-final static field in Settings? - final Field field = getFieldByName(Settings.class.getDeclaredFields(), name); - if (field != null && !Modifier.isFinal(field.getModifiers())) { - try { - if (subName == null) { - good = setValue(field, this, val); - } else { - final Field subField = getFieldByName(field.getType().getDeclaredFields(), subName); - if (subField != null) { - good = setValue(subField, field.get(this), val); + if (!REMOVED_SETTINGS.contains(name)) { + final String val = pars.length > 1 ? pars[1].trim() : ""; // the first parameter passed in, or "" if none + boolean good = false; // is name a valid name of a non-final static field in Settings? + final Field field = getFieldByName(Settings.class.getDeclaredFields(), name); + if (field != null && !Modifier.isFinal(field.getModifiers())) { + try { + if (subName == null) { + good = setValue(field, this, val); + } else { + final Field subField = getFieldByName(field.getType().getDeclaredFields(), subName); + if (subField != null) { + good = setValue(subField, field.get(this), val); + } } + } catch (final IllegalArgumentException | IllegalAccessException | SettingsException e) { + logger.error( + EXCEPTION.getMarker(), "illegal line in settings.txt: {}, {} {}", pars[0], pars[1], e); } - } catch (final IllegalArgumentException | IllegalAccessException | SettingsException e) { - logger.error(EXCEPTION.getMarker(), "illegal line in settings.txt: {}, {} {}", pars[0], pars[1], e); } - } - if (!good) { - final String err = "WARNING: " + pars[0] + " is not a valid setting name."; - // this only happens if settings.txt exist, so it's internal, not users, so print it - CommonUtils.tellUserConsole(err); - logger.warn(STARTUP.getMarker(), err); - return false; + if (!good) { + final String err = "WARNING: " + pars[0] + " is not a valid setting name."; + // this only happens if settings.txt exist, so it's internal, not users, so print it + CommonUtils.tellUserConsole(err); + logger.warn(STARTUP.getMarker(), err); + return false; + } } return true; } diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java index fa3ca86162bb..1fd9babb92c3 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java @@ -128,6 +128,20 @@ public void checkEmptyFile() { Assertions.assertDoesNotThrow(() -> settings.loadSettings(emptyFile)); } + @Test + @Tag(TestTypeTags.FUNCTIONAL) + @DisplayName("Checks that loading settings with migrated settings does not throw an exception") + public void checkOnlyConfigSettingsFile() { + // given + final Settings settings = Settings.getInstance(); + final File emptyFile = + new File(SettingsTest.class.getResource("settings13.txt").getFile()); + + // then + Assertions.assertTrue(emptyFile.exists()); + Assertions.assertDoesNotThrow(() -> settings.loadSettings(emptyFile)); + } + @Test @Tag(TestTypeTags.FUNCTIONAL) @DisplayName("Checks that null value for file not allowed") From 1906142ea8d3b3c246e60fb3b24493daaaf76090 Mon Sep 17 00:00:00 2001 From: Matt Hess Date: Tue, 13 Jun 2023 13:55:15 -0600 Subject: [PATCH 21/70] Implement token burn handling (#7058) Signed-off-by: Matt Hess --- .../dispatcher/MonoTransactionDispatcher.java | 6 + .../MonoTransactionDispatcherTest.java | 12 + .../service/token/impl/WritableNftStore.java | 22 + .../token/impl/handlers/BaseTokenHandler.java | 4 + .../token/impl/handlers/TokenBurnHandler.java | 131 ++- .../token/impl/handlers/TokenMintHandler.java | 2 +- .../token/impl/util/TokenHandlerHelper.java | 26 + .../TokenSupplyChangeOpsValidator.java | 62 +- .../token/impl/test/WritableNftStoreTest.java | 43 + .../handlers/TokenBurnHandlerParityTest.java | 73 -- .../test/handlers/TokenBurnHandlerTest.java | 888 ++++++++++++++++++ .../test/handlers/TokenDeleteHandlerTest.java | 18 - ...TokenDissociateFromAccountHandlerTest.java | 33 - .../test/handlers/util/ParityTestBase.java | 46 + .../test/util/TokenHandlerHelperTest.java | 46 +- 15 files changed, 1262 insertions(+), 150 deletions(-) delete mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerParityTest.java create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcher.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcher.java index 50e3305b3e6b..2b5c3be9d605 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcher.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcher.java @@ -94,6 +94,7 @@ public void dispatchHandle(@NonNull final HandleContext context) { case TOKEN_UNPAUSE -> dispatchTokenUnpause(context); case TOKEN_FEE_SCHEDULE_UPDATE -> dispatchTokenFeeScheduleUpdate(context); case TOKEN_DELETION -> dispatchTokenDeletion(context); + case TOKEN_BURN -> dispatchTokenBurn(context); case UTIL_PRNG -> dispatchPrng(context); default -> throw new IllegalArgumentException(TYPE_NOT_SUPPORTED); } @@ -243,4 +244,9 @@ private void dispatchTokenDeletion(@NonNull final HandleContext handleContext) { final var handler = handlers.tokenDeleteHandler(); handler.handle(handleContext); } + + private void dispatchTokenBurn(@NonNull final HandleContext handleContext) { + final var handler = handlers.tokenBurnHandler(); + handler.handle(handleContext); + } } diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java index e12f57481291..29ca0b7b4678 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java @@ -646,6 +646,18 @@ void dispatchesTokenDeleteAsExpected() { verify(handleContext).body(); } + @Test + void dispatchesTokenBurnAsExpected() { + final var txnBody = TransactionBody.newBuilder() + .tokenBurn(TokenBurnTransactionBody.DEFAULT) + .build(); + given(handleContext.body()).willReturn(txnBody); + + dispatcher.dispatchHandle(handleContext); + + verify(handleContext).body(); + } + @Test void dispatchesCryptoCreateAsExpected() { final var txnBody = TransactionBody.newBuilder() diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableNftStore.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableNftStore.java index 9a5905d2d1bd..9c6b9f440178 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableNftStore.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/WritableNftStore.java @@ -101,4 +101,26 @@ public long sizeOfState() { public Set modifiedNfts() { return nftState.modifiedKeys(); } + + /** + * Removes the {@link Nft} with the given serial number + * + * @param serialNum - the combined unique ID of the NFT to remove + */ + public void remove(final @NonNull UniqueTokenId serialNum) { + nftState.remove(requireNonNull(serialNum)); + } + + /** + * Removes the {@link Nft} with the given serial number + * + * @param tokenId - the token id of the NFT to remove + * @param serialNum - the serial number of the NFT to remove + */ + public void remove(final @NonNull TokenID tokenId, final long serialNum) { + remove(UniqueTokenId.newBuilder() + .tokenTypeNumber(tokenId.tokenNum()) + .serialNumber(serialNum) + .build()); + } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java index 4e110da0169a..ff627da810c2 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java @@ -67,6 +67,10 @@ protected void mintFungible( /** * Since token mint and token burn change the supply on the token and treasury account, * this method is used to change the supply. + * + *

+ * Note: This method assumes the given token has a non-null supply key! + * * @param token the token that is minted or burned * @param treasuryRel the treasury relation for the token * @param amount the amount to mint or burn diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java index a33087ae85bb..8068380e294d 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java @@ -16,18 +16,39 @@ package com.hedera.node.app.service.token.impl.handlers; +import static com.hedera.hapi.node.base.ResponseCodeEnum.*; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_NFT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSACTION_BODY; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.HederaFunctionality; import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenType; +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.state.token.TokenRelation; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.service.token.ReadableTokenRelationStore; import com.hedera.node.app.service.token.ReadableTokenStore; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.WritableNftStore; +import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; +import com.hedera.node.app.service.token.impl.WritableTokenStore; +import com.hedera.node.app.service.token.impl.util.TokenHandlerHelper; +import com.hedera.node.app.service.token.impl.validators.TokenSupplyChangeOpsValidator; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.PreHandleContext; import com.hedera.node.app.spi.workflows.TransactionHandler; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.LinkedHashSet; +import java.util.List; import javax.inject.Inject; import javax.inject.Singleton; @@ -36,10 +57,14 @@ * HederaFunctionality#TOKEN_BURN}. */ @Singleton -public class TokenBurnHandler implements TransactionHandler { +public final class TokenBurnHandler extends BaseTokenHandler implements TransactionHandler { + @NonNull + private final TokenSupplyChangeOpsValidator validator; + @Inject - public TokenBurnHandler() { + public TokenBurnHandler(@NonNull final TokenSupplyChangeOpsValidator validator) { // Exists for injection + this.validator = requireNonNull(validator); } @Override @@ -56,8 +81,108 @@ public void preHandle(@NonNull final PreHandleContext context) throws PreCheckEx } } + @Override + public void pureChecks(@NonNull final TransactionBody txn) throws PreCheckException { + final var op = txn.tokenBurnOrThrow(); + final var fungibleCount = op.amount(); + final var serialNums = op.serialNumbers(); + + validateTruePreCheck(op.hasToken(), INVALID_TOKEN_ID); + + // If a positive fungible amount is present, the NFT serial numbers must be empty + validateFalsePreCheck(fungibleCount > 0 && !serialNums.isEmpty(), INVALID_TRANSACTION_BODY); + + validateFalsePreCheck(fungibleCount < 0, INVALID_TOKEN_BURN_AMOUNT); + + // Validate the NFT serial numbers + if (fungibleCount < 1 && !serialNums.isEmpty()) { + for (final var serialNumber : op.serialNumbers()) { + validateTruePreCheck(serialNumber > 0, INVALID_NFT_ID); + } + } + } + @Override public void handle(@NonNull final HandleContext context) throws HandleException { - throw new UnsupportedOperationException("Not implemented"); + requireNonNull(context); + final var accountStore = context.writableStore(WritableAccountStore.class); + final var tokenStore = context.writableStore(WritableTokenStore.class); + final var tokenRelStore = context.writableStore(WritableTokenRelationStore.class); + final var nftStore = context.writableStore(WritableNftStore.class); + final var txn = context.body(); + final var op = txn.tokenBurnOrThrow(); + final var tokenId = op.token(); + final var fungibleBurnCount = op.amount(); + // Wrapping the serial nums this way de-duplicates the serial nums: + final var nftSerialNums = new ArrayList<>(new LinkedHashSet<>(op.serialNumbers())); + final var validated = validateSemantics(tokenId, fungibleBurnCount, nftSerialNums, tokenStore, tokenRelStore); + final var token = validated.token(); + + if (token.tokenType() == TokenType.FUNGIBLE_COMMON) { + changeSupply( + validated.token(), + validated.tokenTreasuryRel(), + -fungibleBurnCount, + INVALID_TOKEN_BURN_AMOUNT, + accountStore, + tokenStore, + tokenRelStore); + } else { + validateTrue(!nftSerialNums.isEmpty(), INVALID_TOKEN_BURN_METADATA); + + // Load and validate the nfts + for (final Long nftSerial : nftSerialNums) { + final var nft = nftStore.get(tokenId, nftSerial); + validateTrue(nft != null, INVALID_NFT_ID); + + final var nftOwner = nft.ownerNumber(); + validateTrue(treasuryOwnsNft(nftOwner), TREASURY_MUST_OWN_BURNED_NFT); + } + + // Update counts for accounts and token rels + changeSupply( + token, + validated.tokenTreasuryRel(), + -nftSerialNums.size(), + FAIL_INVALID, + accountStore, + tokenStore, + tokenRelStore); + + // Update treasury's NFT count + final var treasuryAcct = accountStore.get(asAccount(token.treasuryAccountNumber())); + final var updatedTreasuryAcct = treasuryAcct + .copyBuilder() + .numberOwnedNfts(treasuryAcct.numberOwnedNfts() - nftSerialNums.size()) + .build(); + accountStore.put(updatedTreasuryAcct); + + // Remove the nft objects + nftSerialNums.forEach(serialNum -> nftStore.remove(tokenId, serialNum)); + } + } + + private ValidationResult validateSemantics( + @NonNull final TokenID tokenId, + final long fungibleBurnCount, + @NonNull final List nftSerialNums, + final ReadableTokenStore tokenStore, + final ReadableTokenRelationStore tokenRelStore) { + validateTrue(fungibleBurnCount >= 0, INVALID_TOKEN_BURN_AMOUNT); + + validator.validateBurn(fungibleBurnCount, nftSerialNums); + + final var token = TokenHandlerHelper.getIfUsable(tokenId, tokenStore); + validateTrue(token.supplyKey() != null, TOKEN_HAS_NO_SUPPLY_KEY); + + final var treasuryAcctId = asAccount(token.treasuryAccountNumber()); + final var treasuryRel = TokenHandlerHelper.getIfUsable(treasuryAcctId, tokenId, tokenRelStore); + return new ValidationResult(token, treasuryRel); } + + private boolean treasuryOwnsNft(final long ownerNum) { + return ownerNum == 0; + } + + private record ValidationResult(@NonNull Token token, @NonNull TokenRelation tokenTreasuryRel) {} } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java index 73dcb06d90e0..6f4082cc77db 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java @@ -142,7 +142,7 @@ public void handle(@NonNull final HandleContext context) throws HandleException private void validateSemantics(final HandleContext context) { requireNonNull(context); final var op = context.body().tokenMintOrThrow(); - validator.validate(op.amount(), op.metadata()); + validator.validateMint(op.amount(), op.metadata()); } /** diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java index aa898d2f6eaf..f2a686e58b4f 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java @@ -39,6 +39,7 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_PAUSED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_NOT_ASSOCIATED_TO_ACCOUNT; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_WAS_DELETED; import static com.hedera.node.app.spi.workflows.HandleException.validateFalse; import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; @@ -48,7 +49,9 @@ import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.node.app.service.token.ReadableAccountStore; +import com.hedera.node.app.service.token.ReadableTokenRelationStore; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.spi.validation.EntityType; import com.hedera.node.app.spi.validation.ExpiryValidator; @@ -117,4 +120,27 @@ public static Token getIfUsable(@NonNull final TokenID tokenId, @NonNull final R validateFalse(token.paused(), TOKEN_IS_PAUSED); return token; } + + /** + * Returns the token relation if it exists and is usable + * + * @param accountId the ID of the account + * @param tokenId the ID of the token + * @param tokenRelStore the {@link ReadableTokenRelationStore} to use for token relation retrieval + * @throws HandleException if any of the token relation conditions are not met + */ + @NonNull + public static TokenRelation getIfUsable( + @NonNull final AccountID accountId, + @NonNull final TokenID tokenId, + @NonNull final ReadableTokenRelationStore tokenRelStore) { + requireNonNull(accountId); + requireNonNull(tokenId); + requireNonNull(tokenRelStore); + + final var tokenRel = tokenRelStore.get(accountId, tokenId); + validateTrue(tokenRel != null, TOKEN_NOT_ASSOCIATED_TO_ACCOUNT); + + return tokenRel; + } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java index 89019eda52a9..b9d436a57d41 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java @@ -20,11 +20,13 @@ import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; import static java.util.Objects.requireNonNull; +import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.config.ConfigProvider; import com.hedera.node.config.data.TokensConfig; import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.List; +import java.util.function.ToIntFunction; import javax.inject.Inject; /** @@ -39,34 +41,64 @@ public TokenSupplyChangeOpsValidator(@NonNull final ConfigProvider configProvide } /** - * Validate the token operations mint/wipe/burn given the attributes of the transaction. + * Validate the transaction data for a token mint operation + * + * @param fungibleCount the number of fungible tokens to mint + * @param metaDataList the list of metadata for the NFTs to mint + * @throws HandleException if the transaction data is invalid + */ + public void validateMint(final long fungibleCount, final List metaDataList) { + final var numNfts = metaDataList.size(); + validateCommon(fungibleCount, numNfts, TokensConfig::nftsMaxBatchSizeMint); + + final var tokensConfig = configProvider.getConfiguration().getConfigData(TokensConfig.class); + final var maxNftMetadataBytes = tokensConfig.nftsMaxMetadataBytes(); + if (fungibleCount <= 0 && numNfts > 0) { + validateMetaData(metaDataList, maxNftMetadataBytes); + } + } + + /** + * Validate the transaction data for a token mint operation + * + * @param fungibleCount the number of fungible tokens to burn + * @param nftSerialNums the list of NFT serial numbers to burn + * @throws HandleException if the transaction data is invalid + */ + public void validateBurn(final long fungibleCount, final List nftSerialNums) { + validateCommon(fungibleCount, nftSerialNums.size(), TokensConfig::nftsMaxBatchSizeBurn); + } + + @SuppressWarnings("unused") + // @future('6389'): This method will be used when token wipe is implemented + public void validateWipe(final long fungibleCount, final List nftSerialNums) { + validateCommon(fungibleCount, nftSerialNums.size(), TokensConfig::nftsMaxBatchSizeWipe); + } + + /** + * Perform common validation checks for token operations mint, wipe, and burn given the attributes of the transaction. * * @param fungibleCount The number of fungible common token to mint/wipe/burn. - * @param metaDataList either metadata of the nfts being minted or serialNumber list of the - * burn/wipe operations. + * @param nftCount the number of NFTs the operation will be performed on. + * @param batchSizeGetter The function to get the corresponding batch size for the token operation. */ - public void validate(final long fungibleCount, final List metaDataList) { - final var nftCount = metaDataList.size(); + private void validateCommon( + final long fungibleCount, final int nftCount, @NonNull final ToIntFunction batchSizeGetter) { final var tokensConfig = configProvider.getConfiguration().getConfigData(TokensConfig.class); // Get needed configurations - final var maxNftMintBatchSize = tokensConfig.nftsMaxBatchSizeMint(); final var nftsAreEnabled = tokensConfig.nftsAreEnabled(); - final var maxNftMetadataBytes = tokensConfig.nftsMaxMetadataBytes(); + final var maxNftBatchOpSize = batchSizeGetter.applyAsInt(tokensConfig); // validate nft count and fungible count are valid - validateCounts(nftCount, fungibleCount, nftsAreEnabled, maxNftMintBatchSize); - // validate metadata length if only nft count is set - if (fungibleCount <= 0 && nftCount > 0) { - validateMetaData(metaDataList, maxNftMetadataBytes); - } + validateCounts(nftCount, fungibleCount, nftsAreEnabled, maxNftBatchOpSize); } /** - * Validate the fungible amount and metadata size for token operations mint/burn. + * Validate the fungible amount and metadata size for a token mint or burn operation. * @param nftCount The number of nfts to mint/burn. * @param fungibleCount The amount of fungible common token to mint/burn. - * @param nftsAreEnabled Whether nfts are enabled based on config. - * @param maxBatchSize The max batch size for nft mint based on config. + * @param nftsAreEnabled Whether nfts are enabled (based on config). + * @param maxBatchSize The max batch size for the nft operation (based on config). */ private void validateCounts( final int nftCount, final long fungibleCount, final boolean nftsAreEnabled, final long maxBatchSize) { diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableNftStoreTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableNftStoreTest.java index 87b37674ec60..7efcaa0ba553 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableNftStoreTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableNftStoreTest.java @@ -23,6 +23,7 @@ import com.hedera.hapi.node.state.common.UniqueTokenId; import com.hedera.hapi.node.state.token.Nft; import com.hedera.node.app.service.token.impl.WritableNftStore; +import com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler; import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoTokenHandlerTestBase; import java.util.Collections; import java.util.Set; @@ -109,4 +110,46 @@ void getsSizeOfState() { assertEquals(1, writableNftStore.sizeOfState()); assertEquals(Set.of(id), writableNftStore.modifiedNfts()); } + + @Test + void removesByUniqueTokenId() { + // Set up the NFT state with an existing NFT + final var nftToRemove = + UniqueTokenId.newBuilder().tokenTypeNumber(1).serialNumber(1).build(); + writableNftState = emptyWritableNftStateBuilder() + .value( + nftToRemove, + Nft.newBuilder().id(nftToRemove).ownerNumber(12345).build()) + .build(); + assertTrue(writableNftState.contains(nftToRemove)); + given(writableStates.get(NFTS)).willReturn(writableNftState); + writableNftStore = new WritableNftStore(writableStates); + assertNotNull(writableNftStore.get(nftToRemove)); + + writableNftStore.remove(nftToRemove); + + // Assert the NFT is removed + assertNull(writableNftStore.get(nftToRemove)); + } + + @Test + void removesByTokenIdAndSerialNum() { + // Set up the NFT state with an existing NFT + final var nftToRemove = + UniqueTokenId.newBuilder().tokenTypeNumber(1).serialNumber(1).build(); + writableNftState = emptyWritableNftStateBuilder() + .value( + nftToRemove, + Nft.newBuilder().id(nftToRemove).ownerNumber(12345).build()) + .build(); + assertTrue(writableNftState.contains(nftToRemove)); + given(writableStates.get(NFTS)).willReturn(writableNftState); + writableNftStore = new WritableNftStore(writableStates); + assertNotNull(writableNftStore.get(nftToRemove)); + + writableNftStore.remove(BaseTokenHandler.asToken(nftToRemove.tokenTypeNumber()), nftToRemove.serialNumber()); + + // Assert the NFT is removed + assertNull(writableNftStore.get(nftToRemove)); + } } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerParityTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerParityTest.java deleted file mode 100644 index 0b53f2a33fbe..000000000000 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerParityTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.service.token.impl.test.handlers; - -import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; -import static com.hedera.node.app.spi.fixtures.Assertions.assertThrowsPreCheck; -import static com.hedera.test.factories.scenarios.TokenBurnScenarios.BURN_FOR_TOKEN_WITHOUT_SUPPLY; -import static com.hedera.test.factories.scenarios.TokenBurnScenarios.BURN_WITH_MISSING_TOKEN; -import static com.hedera.test.factories.scenarios.TokenBurnScenarios.BURN_WITH_SUPPLY_KEYED_TOKEN; -import static com.hedera.test.factories.scenarios.TxnHandlingScenario.TOKEN_SUPPLY_KT; -import static com.hedera.test.factories.txns.SignedTxnFactory.DEFAULT_PAYER_KT; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.contains; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.hedera.node.app.service.token.ReadableTokenStore; -import com.hedera.node.app.service.token.impl.handlers.TokenBurnHandler; -import com.hedera.node.app.service.token.impl.test.handlers.util.ParityTestBase; -import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; -import com.hedera.node.app.spi.workflows.PreCheckException; -import org.junit.jupiter.api.Test; - -class TokenBurnHandlerParityTest extends ParityTestBase { - private final TokenBurnHandler subject = new TokenBurnHandler(); - - @Test - void getsTokenBurnWithValidId() throws PreCheckException { - final var theTxn = txnFrom(BURN_WITH_SUPPLY_KEYED_TOKEN); - - final var context = new FakePreHandleContext(readableAccountStore, theTxn); - context.registerStore(ReadableTokenStore.class, readableTokenStore); - subject.preHandle(context); - - assertEquals(context.payerKey(), DEFAULT_PAYER_KT.asPbjKey()); - assertEquals(1, context.requiredNonPayerKeys().size()); - assertThat(context.requiredNonPayerKeys(), contains(TOKEN_SUPPLY_KT.asPbjKey())); - } - - @Test - void getsTokenBurnWithMissingToken() throws PreCheckException { - final var theTxn = txnFrom(BURN_WITH_MISSING_TOKEN); - - final var context = new FakePreHandleContext(readableAccountStore, theTxn); - context.registerStore(ReadableTokenStore.class, readableTokenStore); - assertThrowsPreCheck(() -> subject.preHandle(context), INVALID_TOKEN_ID); - } - - @Test - void getsTokenBurnWithoutSupplyKey() throws PreCheckException { - final var theTxn = txnFrom(BURN_FOR_TOKEN_WITHOUT_SUPPLY); - - final var context = new FakePreHandleContext(readableAccountStore, theTxn); - context.registerStore(ReadableTokenStore.class, readableTokenStore); - subject.preHandle(context); - - assertEquals(context.payerKey(), DEFAULT_PAYER_KT.asPbjKey()); - assertEquals(0, context.requiredNonPayerKeys().size()); - } -} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java new file mode 100644 index 000000000000..0fe7344b7b3b --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java @@ -0,0 +1,888 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.BATCH_SIZE_LIMIT_EXCEEDED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.FAIL_INVALID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INSUFFICIENT_TOKEN_BALANCE; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_NFT_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_BURN_AMOUNT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_BURN_METADATA; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSACTION_BODY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TREASURY_ACCOUNT_FOR_TOKEN; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_SUPPLY_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_PAUSED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_NOT_ASSOCIATED_TO_ACCOUNT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_WAS_DELETED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TREASURY_MUST_OWN_BURNED_NFT; +import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ACCOUNTS_KEY; +import static com.hedera.node.app.spi.fixtures.Assertions.assertThrowsPreCheck; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static com.hedera.test.factories.scenarios.TokenBurnScenarios.BURN_FOR_TOKEN_WITHOUT_SUPPLY; +import static com.hedera.test.factories.scenarios.TokenBurnScenarios.BURN_WITH_MISSING_TOKEN; +import static com.hedera.test.factories.scenarios.TokenBurnScenarios.BURN_WITH_SUPPLY_KEYED_TOKEN; +import static com.hedera.test.factories.scenarios.TxnHandlingScenario.TOKEN_SUPPLY_KT; +import static com.hedera.test.factories.txns.SignedTxnFactory.DEFAULT_PAYER_KT; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.lenient; +import static org.mockito.Mockito.mock; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenType; +import com.hedera.hapi.node.base.TransactionID; +import com.hedera.hapi.node.state.common.UniqueTokenId; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.hapi.node.state.token.Nft; +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.state.token.TokenRelation; +import com.hedera.hapi.node.token.TokenBurnTransactionBody; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.service.mono.utils.EntityNumPair; +import com.hedera.node.app.service.token.ReadableTokenStore; +import com.hedera.node.app.service.token.impl.TokenServiceImpl; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.WritableNftStore; +import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; +import com.hedera.node.app.service.token.impl.WritableTokenStore; +import com.hedera.node.app.service.token.impl.handlers.TokenBurnHandler; +import com.hedera.node.app.service.token.impl.test.handlers.util.ParityTestBase; +import com.hedera.node.app.service.token.impl.util.IdConvenienceUtils; +import com.hedera.node.app.service.token.impl.validators.TokenSupplyChangeOpsValidator; +import com.hedera.node.app.spi.fixtures.state.MapWritableKVState; +import com.hedera.node.app.spi.fixtures.state.MapWritableStates; +import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.VersionedConfiguration; +import com.hedera.node.config.data.TokensConfig; +import java.util.HashMap; +import java.util.Map; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class TokenBurnHandlerTest extends ParityTestBase { + private static final AccountID ACCOUNT_1339 = IdConvenienceUtils.fromAccountNum(1339); + private static final TokenID TOKEN_123 = IdConvenienceUtils.fromTokenNum(123); + + private ConfigProvider configProvider = mock(ConfigProvider.class); + private TokenSupplyChangeOpsValidator validator = new TokenSupplyChangeOpsValidator(configProvider); + private final TokenBurnHandler subject = new TokenBurnHandler(validator); + + @Nested + class PureChecks { + @Test + void noTokenPresent() { + final var txn = newBurnTxn(null, 1); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TOKEN_ID)); + } + + @Test + void fungibleAndNonFungibleGiven() { + final var txn = newBurnTxn(TOKEN_123, 1, 1L); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TRANSACTION_BODY)); + } + + @Test + void nonPositiveFungibleAmountGiven() { + final var txn = newBurnTxn(TOKEN_123, -1); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TOKEN_BURN_AMOUNT)); + } + + @Test + void invalidNftSerialNumber() { + final var txn = newBurnTxn(TOKEN_123, 0, 1L, 2L, 0L); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_NFT_ID)); + } + } + + @Nested + // Tests that check prehandle parity with old prehandle code + class PreHandle { + @SuppressWarnings("DataFlowIssue") + @Test + void nullArgsThrows() { + assertThatThrownBy(() -> subject.preHandle(null)).isInstanceOf(NullPointerException.class); + } + + @Test + void parity_getsTokenBurnWithValidId() throws PreCheckException { + final var theTxn = txnFrom(BURN_WITH_SUPPLY_KEYED_TOKEN); + + final var context = new FakePreHandleContext(readableAccountStore, theTxn); + context.registerStore(ReadableTokenStore.class, readableTokenStore); + subject.preHandle(context); + + assertEquals(context.payerKey(), DEFAULT_PAYER_KT.asPbjKey()); + assertEquals(1, context.requiredNonPayerKeys().size()); + assertThat(context.requiredNonPayerKeys(), contains(TOKEN_SUPPLY_KT.asPbjKey())); + } + + @Test + void parity_getsTokenBurnWithMissingToken() throws PreCheckException { + final var theTxn = txnFrom(BURN_WITH_MISSING_TOKEN); + + final var context = new FakePreHandleContext(readableAccountStore, theTxn); + context.registerStore(ReadableTokenStore.class, readableTokenStore); + assertThrowsPreCheck(() -> subject.preHandle(context), INVALID_TOKEN_ID); + } + + @Test + void parity_getsTokenBurnWithoutSupplyKey() throws PreCheckException { + final var theTxn = txnFrom(BURN_FOR_TOKEN_WITHOUT_SUPPLY); + + final var context = new FakePreHandleContext(readableAccountStore, theTxn); + context.registerStore(ReadableTokenStore.class, readableTokenStore); + subject.preHandle(context); + + assertEquals(context.payerKey(), DEFAULT_PAYER_KT.asPbjKey()); + assertEquals(0, context.requiredNonPayerKeys().size()); + } + } + + @Nested + class Handle { + private WritableTokenStore writableTokenStore; + private WritableNftStore writableNftStore; + + @SuppressWarnings("DataFlowIssue") + @Test + void nullArg() { + assertThatThrownBy(() -> subject.handle(null)).isInstanceOf(NullPointerException.class); + } + + @Test + void invalidFungibleAmount() { + mockConfig(); + final var txn = newBurnTxn(TOKEN_123, -1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_TOKEN_BURN_AMOUNT)); + } + + @Test + void tokenIdNotFound() { + mockConfig(); + writableTokenStore = newWritableStoreWithTokens(); + final var txn = newBurnTxn(IdConvenienceUtils.fromTokenNum(999), 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_TOKEN_ID)); + } + + @Test + void tokenIsDeleted() { + mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.FUNGIBLE_COMMON) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .deleted(true) // Intentionally deleted + .build()); + + final var txn = newBurnTxn(TOKEN_123, 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_WAS_DELETED)); + } + + @Test + void tokenIsPaused() { + mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .paused(true) // Intentionally paused + .build()); + final var txn = newBurnTxn(TOKEN_123, 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_IS_PAUSED)); + } + + @Test + void tokenDoesntHaveSupplyKey() { + mockConfig(); + final var totalFungibleSupply = 5; + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.FUNGIBLE_COMMON) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey((Key) null) // Intentionally missing supply key + .totalSupply(totalFungibleSupply) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(totalFungibleSupply) + .build()); + final var txn = newBurnTxn(TOKEN_123, totalFungibleSupply + 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_HAS_NO_SUPPLY_KEY)); + } + + @Test + void tokenTreasuryRelDoesntExist() { + mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .build()); + // Intentionally has no token rels: + writableTokenRelStore = newWritableStoreWithTokenRels(); + final var txn = newBurnTxn(TOKEN_123, 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_NOT_ASSOCIATED_TO_ACCOUNT)); + } + + @Test + void fungibleTokenTreasuryAccountDoesntExist() { + mockConfig(); + // Intentionally has no treasury account: + writableAccountStore = newWritableStoreWithAccounts(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.FUNGIBLE_COMMON) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(10) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(10) + .build()); + final var txn = newBurnTxn(TOKEN_123, 10); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_TREASURY_ACCOUNT_FOR_TOKEN)); + } + + @Test + void fungibleAmountExceedsSupply() { + mockConfig(); + final var totalFungibleSupply = 5; + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.FUNGIBLE_COMMON) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(totalFungibleSupply) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(totalFungibleSupply) + .build()); + final var txn = newBurnTxn(TOKEN_123, totalFungibleSupply + 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_TOKEN_BURN_AMOUNT)); + } + + @Test + void fungibleAmountExceedsBalance() { + mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.FUNGIBLE_COMMON) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(10) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(8) + .build()); + // The token treasury has a balance of 8. The token supply is 10, so a fungible amount of 9 exceed the total + // supply of available tokens, but 9 is one more than the current treasury balance, so this scenario should + // throw an exception + final var txn = newBurnTxn(TOKEN_123, 9); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INSUFFICIENT_TOKEN_BALANCE)); + } + + @Test + void fungibleAmountBurnedWithLeftoverTreasuryBalance() { + mockConfig(); + writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(1) + .build()); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.FUNGIBLE_COMMON) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(10) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(9) + .build()); + final var txn = newBurnTxn(TOKEN_123, 8); + final var context = mockContext(txn); + + subject.handle(context); + + final var updatedToken = writableTokenStore.get(TOKEN_123); + // Total supply of 10 is reduced by the burn of 8 units, so the new total supply should be 2 + Assertions.assertThat(updatedToken.totalSupply()).isEqualTo(2); + final var updatedTreasuryRel = writableTokenRelStore.get(ACCOUNT_1339, TOKEN_123); + Assertions.assertThat(updatedTreasuryRel.balance()).isEqualTo(1); + final var updatedTreasuryAcct = writableAccountStore.get(ACCOUNT_1339); + Assertions.assertThat(updatedTreasuryAcct.numberTreasuryTitles()).isEqualTo(1); + // There is still a positive balance in the treasury account, so its positive balances shouldn't change + Assertions.assertThat(updatedTreasuryAcct.numberPositiveBalances()).isEqualTo(1); + } + + @Test + void fungibleAmountBurnedWithZeroTreasuryBalance() { + mockConfig(); + writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(1) + .build()); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.FUNGIBLE_COMMON) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(10) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(8) + .build()); + final var txn = newBurnTxn(TOKEN_123, 8); + final var context = mockContext(txn); + + subject.handle(context); + + final var updatedToken = writableTokenStore.get(TOKEN_123); + // Total supply of 10 is reduced by the burn of 8 units, so the new total supply should be 2 + Assertions.assertThat(updatedToken.totalSupply()).isEqualTo(2); + final var updatedTreasuryRel = writableTokenRelStore.get(ACCOUNT_1339, TOKEN_123); + Assertions.assertThat(updatedTreasuryRel.balance()).isZero(); + final var updatedTreasuryAcct = writableAccountStore.get(ACCOUNT_1339); + // The treasury account is still listed as the treasury for the token, so its number of treasury titles + // shouldn't decrease + Assertions.assertThat(updatedTreasuryAcct.numberTreasuryTitles()).isEqualTo(1); + // There is no balance left in the treasury account, so its positive balances should be reduced + Assertions.assertThat(updatedTreasuryAcct.numberPositiveBalances()).isZero(); + } + + @Test + void nftsGivenButNotEnabled() { + mockConfig(100, false, 100); + validator = new TokenSupplyChangeOpsValidator(configProvider); + + final var txn = newBurnTxn(TOKEN_123, 0, 1L); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void nftSerialCountExceedsBatchSize() { + mockConfig(1, true, 100); + validator = new TokenSupplyChangeOpsValidator(configProvider); + + final var txn = newBurnTxn(TOKEN_123, 0, 1L, 2L); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(BATCH_SIZE_LIMIT_EXCEEDED)); + } + + @Test + void invalidNftSerial() { + mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(10) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .build()); + writableNftStore = newWritableStoreWithNfts(); + final var txn = newBurnTxn(TOKEN_123, 0, -1L); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_NFT_ID)); + } + + @Test + void nftSerialNotFound() { + mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(10) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(10) + .build()); + writableNftStore = new WritableNftStore(new MapWritableStates( + Map.of("NFTS", MapWritableKVState.builder("NFTS").build()))); + + final var txn = newBurnTxn(TOKEN_123, 0, 1L); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_NFT_ID)); + } + + @Test + void nftSerialNumsEmpty() { + mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(10) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(10) + .build()); + final var txn = newBurnTxn(TOKEN_123, 0); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_TOKEN_BURN_METADATA)); + } + + @Test + void nftNotOwnedByTreasury() { + mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(10) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(10) + .build()); + // this owner number isn't the treasury + writableNftStore = newWritableStoreWithNfts(Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_123.tokenNum()) + .serialNumber(1L) + .build()) + .ownerNumber(999) + .build()); + + final var txn = newBurnTxn(TOKEN_123, 0, 1L); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(TREASURY_MUST_OWN_BURNED_NFT)); + } + + @Test + void nftTreasuryAccountDoesntExist() { + mockConfig(); + // Intentionally has no treasury account: + writableAccountStore = newWritableStoreWithAccounts(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(10) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(10) + .build()); + writableNftStore = newWritableStoreWithNfts(Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_123.tokenNum()) + .serialNumber(1L) + .build()) + .ownerNumber(0) + .build()); + final var txn = newBurnTxn(TOKEN_123, 0, 1L); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_TREASURY_ACCOUNT_FOR_TOKEN)); + } + + @Test + void numNftSerialsExceedsNftSupply() { + mockConfig(); + writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(1) + .build()); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(1) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(1) + .build()); + writableNftStore = newWritableStoreWithNfts( + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_123.tokenNum()) + .serialNumber(1L) + .build()) + .ownerNumber(0) + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_123.tokenNum()) + .serialNumber(2L) + .build()) + .ownerNumber(0) + .build()); + final var txn = newBurnTxn(TOKEN_123, 0, 1L, 2L); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(FAIL_INVALID)); + } + + @Test + void nftSerialsBurnedWithLeftoverTreasuryBalance() { + mockConfig(); + writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(1) + .numberOwnedNfts(3) + .build()); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(3) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(3) + .build()); + writableNftStore = newWritableStoreWithNfts( + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_123.tokenNum()) + .serialNumber(1L) + .build()) + .ownerNumber(0) + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_123.tokenNum()) + .serialNumber(2L) + .build()) + .ownerNumber(0) + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_123.tokenNum()) + .serialNumber(3L) + .build()) + .ownerNumber(0) + .build()); + final var txn = newBurnTxn(TOKEN_123, 0, 1L, 2L); + final var context = mockContext(txn); + + subject.handle(context); + final var treasuryAcct = writableAccountStore.get(ACCOUNT_1339); + Assertions.assertThat(treasuryAcct.numberTreasuryTitles()).isEqualTo(1); + // The treasury still owns at least one of the NFTs, so its positive balances shouldn't change + Assertions.assertThat(treasuryAcct.numberPositiveBalances()).isEqualTo(1); + Assertions.assertThat(treasuryAcct.numberOwnedNfts()).isEqualTo(1); + final var treasuryRel = writableTokenRelStore.get(ACCOUNT_1339, TOKEN_123); + Assertions.assertThat(treasuryRel.balance()).isEqualTo(1); + Assertions.assertThat(writableNftStore.get(TOKEN_123, 1L)).isNull(); + Assertions.assertThat(writableNftStore.get(TOKEN_123, 2L)).isNull(); + // Only serials 1 and 2 were removed, not serial 3 + Assertions.assertThat(writableNftStore.get(TOKEN_123, 3L)).isNotNull(); + } + + @Test + void nftSerialsBurnedWithNoLeftoverTreasuryBalance() { + mockConfig(); + writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(1) + .numberOwnedNfts(3) + .build()); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(10) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(3) + .build()); + writableNftStore = newWritableStoreWithNfts( + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_123.tokenNum()) + .serialNumber(1L) + .build()) + .ownerNumber(0) + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_123.tokenNum()) + .serialNumber(2L) + .build()) + .ownerNumber(0) + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_123.tokenNum()) + .serialNumber(3L) + .build()) + .ownerNumber(0) + .build()); + final var txn = newBurnTxn(TOKEN_123, 0, 1L, 2L, 3L); + final var context = mockContext(txn); + + subject.handle(context); + final var treasuryAcct = writableAccountStore.get(ACCOUNT_1339); + Assertions.assertThat(treasuryAcct).isNotNull(); + Assertions.assertThat(treasuryAcct.numberTreasuryTitles()).isEqualTo(1); + // The treasury no longer owns any NFTs, so its positive balances should decrease by 1 + Assertions.assertThat(treasuryAcct.numberPositiveBalances()).isZero(); + Assertions.assertThat(treasuryAcct.numberOwnedNfts()).isZero(); + final var treasuryRel = writableTokenRelStore.get(ACCOUNT_1339, TOKEN_123); + Assertions.assertThat(treasuryRel).isNotNull(); + Assertions.assertThat(treasuryRel.balance()).isZero(); + Assertions.assertThat(writableNftStore.get(TOKEN_123, 1L)).isNull(); + Assertions.assertThat(writableNftStore.get(TOKEN_123, 2L)).isNull(); + Assertions.assertThat(writableNftStore.get(TOKEN_123, 3L)).isNull(); + } + + @Test + void duplicateNftSerials() { + // This is a success case, and should be identical to the case without no duplicates above + mockConfig(); + writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(1) + .build()); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() + .tokenNumber(TOKEN_123.tokenNum()) + .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .treasuryAccountNumber(ACCOUNT_1339.accountNumOrThrow()) + .supplyKey(TOKEN_SUPPLY_KT.asPbjKey()) + .totalSupply(10) + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(TokenRelation.newBuilder() + .accountNumber(ACCOUNT_1339.accountNumOrThrow()) + .tokenNumber(TOKEN_123.tokenNum()) + .balance(3) + .build()); + writableNftStore = newWritableStoreWithNfts( + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_123.tokenNum()) + .serialNumber(1L) + .build()) + .ownerNumber(0) + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_123.tokenNum()) + .serialNumber(2L) + .build()) + .ownerNumber(0) + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_123.tokenNum()) + .serialNumber(3L) + .build()) + .ownerNumber(0) + .build()); + final var txn = newBurnTxn(TOKEN_123, 0, 1L, 2L, 3L, 1L, 2L, 3L, 3L, 1L, 1L, 2L); + final var context = mockContext(txn); + + subject.handle(context); + final var treasuryAcct = writableAccountStore.get(ACCOUNT_1339); + Assertions.assertThat(treasuryAcct).isNotNull(); + Assertions.assertThat(treasuryAcct.numberTreasuryTitles()).isEqualTo(1); + // The treasury no longer owns any NFTs, so its positive balances should decrease by 1 + Assertions.assertThat(treasuryAcct.numberPositiveBalances()).isZero(); + final var treasuryRel = writableTokenRelStore.get(ACCOUNT_1339, TOKEN_123); + Assertions.assertThat(treasuryRel).isNotNull(); + Assertions.assertThat(treasuryRel.balance()).isZero(); + Assertions.assertThat(writableNftStore.get(TOKEN_123, 1L)).isNull(); + Assertions.assertThat(writableNftStore.get(TOKEN_123, 2L)).isNull(); + Assertions.assertThat(writableNftStore.get(TOKEN_123, 3L)).isNull(); + } + + private HandleContext mockContext(TransactionBody txn) { + final var context = mock(HandleContext.class); + + given(context.body()).willReturn(txn); + + given(context.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + given(context.writableStore(WritableTokenStore.class)).willReturn(writableTokenStore); + given(context.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); + given(context.writableStore(WritableNftStore.class)).willReturn(writableNftStore); + + return context; + } + + private void mockConfig() { + mockConfig(100, true, 100); + } + + private void mockConfig(final int maxBatchSize, final boolean nftsEnabled, final int maxMetadataBytes) { + final var mockTokensConfig = mock(TokensConfig.class); + lenient().when(mockTokensConfig.nftsMaxBatchSizeBurn()).thenReturn(maxBatchSize); + lenient().when(mockTokensConfig.nftsAreEnabled()).thenReturn(nftsEnabled); + lenient().when(mockTokensConfig.nftsMaxMetadataBytes()).thenReturn(maxMetadataBytes); + + final var mockConfig = mock(VersionedConfiguration.class); + lenient().when(mockConfig.getConfigData(TokensConfig.class)).thenReturn(mockTokensConfig); + + given(configProvider.getConfiguration()).willReturn(mockConfig); + } + + protected WritableTokenRelationStore newWritableStoreWithTokenRels(final TokenRelation... tokenRels) { + final var backingMap = new HashMap(); + for (final TokenRelation tokenRel : tokenRels) { + backingMap.put(EntityNumPair.fromLongs(tokenRel.accountNumber(), tokenRel.tokenNumber()), tokenRel); + } + + final var wrappingState = new MapWritableKVState<>(ACCOUNTS_KEY, backingMap); + return new WritableTokenRelationStore( + new MapWritableStates(Map.of(TokenServiceImpl.TOKEN_RELS_KEY, wrappingState))); + } + + private WritableNftStore newWritableStoreWithNfts(Nft... nfts) { + final var nftStateBuilder = MapWritableKVState.builder(TokenServiceImpl.NFTS_KEY); + for (final Nft nft : nfts) { + nftStateBuilder.value(nft.id(), nft); + } + return new WritableNftStore( + new MapWritableStates(Map.of(TokenServiceImpl.NFTS_KEY, nftStateBuilder.build()))); + } + } + + private TransactionBody newBurnTxn(TokenID token, long fungibleAmount, Long... nftSerialNums) { + TokenBurnTransactionBody.Builder burnTxnBodyBuilder = TokenBurnTransactionBody.newBuilder(); + if (token != null) burnTxnBodyBuilder.token(token); + burnTxnBodyBuilder.amount(fungibleAmount); + burnTxnBodyBuilder.serialNumbers(nftSerialNums); + return TransactionBody.newBuilder() + .transactionID( + TransactionID.newBuilder().accountID(ACCOUNT_1339).build()) + .tokenBurn(burnTxnBodyBuilder) + .build(); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDeleteHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDeleteHandlerTest.java index b930e286b3ac..b58509fb5593 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDeleteHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDeleteHandlerTest.java @@ -20,9 +20,6 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_IMMUTABLE; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_PAUSED; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_WAS_DELETED; -import static com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbj; -import static com.hedera.node.app.service.token.impl.TokenServiceImpl.TOKENS_KEY; -import static com.hedera.node.app.service.token.impl.test.handlers.util.AdapterUtils.mockWritableStates; import static com.hedera.node.app.spi.fixtures.Assertions.assertThrowsPreCheck; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; import static com.hedera.test.factories.scenarios.TokenDeleteScenarios.DELETE_WITH_KNOWN_TOKEN; @@ -41,7 +38,6 @@ import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.token.TokenDeleteTransactionBody; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.WritableTokenStore; @@ -49,13 +45,10 @@ import com.hedera.node.app.service.token.impl.test.handlers.util.ParityTestBase; import com.hedera.node.app.service.token.impl.test.util.SigReqAdapterUtils; import com.hedera.node.app.service.token.impl.util.IdConvenienceUtils; -import com.hedera.node.app.spi.fixtures.state.MapWritableKVState; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; -import java.util.HashMap; -import java.util.Map; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; @@ -222,17 +215,6 @@ private HandleContext mockContext() { return context; } - - private WritableTokenStore newWritableStoreWithTokens(Token... tokens) { - final var backingMap = new HashMap(); - for (final Token token : tokens) { - backingMap.put( - EntityNum.fromTokenId(fromPbj(IdConvenienceUtils.fromTokenNum(token.tokenNumber()))), token); - } - - final var wrappingState = new MapWritableKVState<>(TOKENS_KEY, backingMap); - return new WritableTokenStore(mockWritableStates(Map.of(TOKENS_KEY, wrappingState))); - } } private TransactionBody newDissociateTxn(TokenID token) { diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java index bbd58d77a4d0..2cb2436e36cc 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java @@ -25,12 +25,6 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_ID_REPEATED_IN_TOKEN_LIST; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_PAUSED; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_NOT_ASSOCIATED_TO_ACCOUNT; -import static com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbj; -import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ACCOUNTS_KEY; -import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ALIASES_KEY; -import static com.hedera.node.app.service.token.impl.TokenServiceImpl.TOKENS_KEY; -import static com.hedera.node.app.service.token.impl.test.handlers.util.AdapterUtils.mockStates; -import static com.hedera.node.app.service.token.impl.test.handlers.util.AdapterUtils.mockWritableStates; import static com.hedera.node.app.spi.fixtures.Assertions.assertThrowsPreCheck; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; import static com.hedera.test.factories.scenarios.TokenDissociateScenarios.TOKEN_DISSOCIATE_WITH_CUSTOM_PAYER_PAID_KNOWN_TARGET; @@ -60,24 +54,19 @@ import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.hapi.node.token.TokenDissociateTransactionBody; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.token.ReadableTokenStore; -import com.hedera.node.app.service.token.impl.ReadableTokenStoreImpl; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; import com.hedera.node.app.service.token.impl.handlers.TokenDissociateFromAccountHandler; import com.hedera.node.app.service.token.impl.test.handlers.util.ParityTestBase; import com.hedera.node.app.service.token.impl.util.IdConvenienceUtils; -import com.hedera.node.app.spi.fixtures.state.MapWritableKVState; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.validation.ExpiryValidator; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; import java.time.Instant; -import java.util.HashMap; import java.util.List; -import java.util.Map; import org.assertj.core.api.Assertions; import org.hamcrest.Matchers; import org.junit.jupiter.api.Nested; @@ -586,28 +575,6 @@ void multipleTokenRelsAreRemoved() { final var token666Rel = writableTokenRelStore.get(ACCOUNT_1339, TOKEN_666_ID); Assertions.assertThat(token666Rel).isNull(); } - - private ReadableTokenStore newReadableStoreWithTokens(Token... tokens) { - final var backingMap = new HashMap(); - for (final Token token : tokens) { - backingMap.put( - EntityNum.fromTokenId(fromPbj(IdConvenienceUtils.fromTokenNum(token.tokenNumber()))), token); - } - - final var wrappingState = new MapWritableKVState<>(TOKENS_KEY, backingMap); - return new ReadableTokenStoreImpl(mockStates(Map.of(TOKENS_KEY, wrappingState))); - } - - private WritableAccountStore newWritableStoreWithAccounts(Account... accounts) { - final var backingMap = new HashMap(); - for (final Account account : accounts) { - backingMap.put(IdConvenienceUtils.fromAccountNum(account.accountNumber()), account); - } - - final var wrappingState = new MapWritableKVState<>(ACCOUNTS_KEY, backingMap); - return new WritableAccountStore(mockWritableStates(Map.of( - ACCOUNTS_KEY, wrappingState, ALIASES_KEY, new MapWritableKVState<>(ALIASES_KEY, new HashMap<>())))); - } } private HandleContext mockContext() { diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/ParityTestBase.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/ParityTestBase.java index 0bd73469bcc1..9e460d223304 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/ParityTestBase.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/ParityTestBase.java @@ -16,16 +16,32 @@ package com.hedera.node.app.service.token.impl.test.handlers.util; +import static com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbj; import static com.hedera.node.app.service.mono.pbj.PbjConverter.toPbj; +import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ACCOUNTS_KEY; +import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ALIASES_KEY; +import static com.hedera.node.app.service.token.impl.TokenServiceImpl.TOKENS_KEY; +import static com.hedera.node.app.service.token.impl.test.handlers.util.AdapterUtils.mockStates; +import static com.hedera.node.app.service.token.impl.test.handlers.util.AdapterUtils.mockWritableStates; +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableTokenStore; +import com.hedera.node.app.service.token.impl.ReadableTokenStoreImpl; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; +import com.hedera.node.app.service.token.impl.WritableTokenStore; import com.hedera.node.app.service.token.impl.test.util.SigReqAdapterUtils; +import com.hedera.node.app.service.token.impl.util.IdConvenienceUtils; +import com.hedera.node.app.spi.fixtures.state.MapWritableKVState; import com.hedera.test.factories.scenarios.TxnHandlingScenario; +import java.util.HashMap; +import java.util.Map; import org.junit.jupiter.api.BeforeEach; public class ParityTestBase { @@ -50,4 +66,34 @@ protected TransactionBody txnFrom(final TxnHandlingScenario scenario) { throw new RuntimeException(e); } } + + private MapWritableKVState newTokenStateFromTokens(Token... tokens) { + final var backingMap = new HashMap(); + for (final Token token : tokens) { + backingMap.put(EntityNum.fromTokenId(fromPbj(IdConvenienceUtils.fromTokenNum(token.tokenNumber()))), token); + } + + return new MapWritableKVState<>(TOKENS_KEY, backingMap); + } + + protected ReadableTokenStore newReadableStoreWithTokens(Token... tokens) { + final var wrappedState = newTokenStateFromTokens(tokens); + return new ReadableTokenStoreImpl(mockStates(Map.of(TOKENS_KEY, wrappedState))); + } + + protected WritableTokenStore newWritableStoreWithTokens(Token... tokens) { + final var wrappedState = newTokenStateFromTokens(tokens); + return new WritableTokenStore(mockWritableStates(Map.of(TOKENS_KEY, wrappedState))); + } + + protected WritableAccountStore newWritableStoreWithAccounts(Account... accounts) { + final var backingMap = new HashMap(); + for (final Account account : accounts) { + backingMap.put(IdConvenienceUtils.fromAccountNum(account.accountNumber()), account); + } + + final var wrappingState = new MapWritableKVState<>(ACCOUNTS_KEY, backingMap); + return new WritableAccountStore(mockWritableStates(Map.of( + ACCOUNTS_KEY, wrappingState, ALIASES_KEY, new MapWritableKVState<>(ALIASES_KEY, new HashMap<>())))); + } } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenHandlerHelperTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenHandlerHelperTest.java index 1ad84972a5fe..e5700f307a4e 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenHandlerHelperTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenHandlerHelperTest.java @@ -27,13 +27,13 @@ import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.node.app.service.token.ReadableAccountStore; +import com.hedera.node.app.service.token.ReadableTokenRelationStore; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.util.TokenHandlerHelper; import com.hedera.node.app.spi.validation.ExpiryValidator; import com.hedera.node.app.spi.workflows.HandleException; -import com.hedera.node.config.data.AutoRenewConfig; -import java.util.Set; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -47,17 +47,15 @@ class TokenHandlerHelperTest { AccountID.newBuilder().accountNum(2300L).build(); private static final TokenID TOKEN_ID_45 = TokenID.newBuilder().tokenNum(45).build(); - private static final AutoRenewConfig ALL_EXPIRY_DISABLED = new AutoRenewConfig(Set.of()); - private static final AutoRenewConfig CONTRACT_EXPIRY_ENABLED = new AutoRenewConfig(Set.of("CONTRACT")); - - private static final AutoRenewConfig ACCOUNT_EXPIRY_ENABLED = new AutoRenewConfig(Set.of("ACCOUNT")); - @Mock private ReadableAccountStore accountStore; @Mock private ReadableTokenStore tokenStore; + @Mock + private ReadableTokenRelationStore tokenRelStore; + @Mock private ExpiryValidator expiryValidator; @@ -260,4 +258,38 @@ void token_getIfUsable_usableToken() { final var result = getIfUsable(TOKEN_ID_45, tokenStore); Assertions.assertThat(result).isNotNull(); } + + @SuppressWarnings("DataFlowIssue") + @Test + void tokenRel_getIfUsable_nullArg() { + Assertions.assertThatThrownBy(() -> TokenHandlerHelper.getIfUsable(null, TOKEN_ID_45, tokenRelStore)) + .isInstanceOf(NullPointerException.class); + + Assertions.assertThatThrownBy(() -> getIfUsable(ACCT_2300, null, tokenRelStore)) + .isInstanceOf(NullPointerException.class); + + Assertions.assertThatThrownBy(() -> getIfUsable(ACCT_2300, TOKEN_ID_45, null)) + .isInstanceOf(NullPointerException.class); + } + + @Test + void tokenRel_getIfUsable_notFound() { + Assertions.assertThatThrownBy(() -> getIfUsable(ACCT_2300, TOKEN_ID_45, tokenRelStore)) + .isInstanceOf(HandleException.class) + .has(responseCode(ResponseCodeEnum.TOKEN_NOT_ASSOCIATED_TO_ACCOUNT)); + } + + @Test + void tokenRel_getIfUsable_usableTokenRel() { + BDDMockito.given(tokenRelStore.get(notNull(), notNull())) + .willReturn(TokenRelation.newBuilder() + .accountNumber(ACCT_2300.accountNumOrThrow()) + .tokenNumber(TOKEN_ID_45.tokenNum()) + .deleted(false) + .balance(0) + .build()); + + final var result = getIfUsable(ACCT_2300, TOKEN_ID_45, tokenRelStore); + Assertions.assertThat(result).isNotNull(); + } } From 7558ba5ef784ff51204a2b07b71ec32c3e25517f Mon Sep 17 00:00:00 2001 From: Iris Simon <122310714+iwsimon@users.noreply.github.com> Date: Tue, 13 Jun 2023 17:26:24 -0400 Subject: [PATCH 22/70] implement TokenGetInfoHandler (#7068) Signed-off-by: Iris Simon --- .../handlers/CryptoGetAccountInfoHandler.java | 16 +- .../impl/handlers/TokenGetInfoHandler.java | 131 ++++++- .../handlers/TokenGetInfoHandlerTest.java | 352 ++++++++++++++++++ 3 files changed, 488 insertions(+), 11 deletions(-) create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetInfoHandlerTest.java diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountInfoHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountInfoHandler.java index 67e2dd7cb522..8bac82c58739 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountInfoHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetAccountInfoHandler.java @@ -20,7 +20,6 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.FAIL_INVALID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; -import static com.hedera.hapi.node.base.ResponseType.ANSWER_ONLY; import static com.hedera.hapi.node.base.ResponseType.COST_ANSWER; import static com.hedera.hapi.node.base.TokenFreezeStatus.FREEZE_NOT_APPLICABLE; import static com.hedera.hapi.node.base.TokenFreezeStatus.FROZEN; @@ -123,12 +122,6 @@ public Response findResponse(@NonNull final QueryContext context, @NonNull final requireNonNull(context); requireNonNull(header); final var query = context.query(); - final var tokensConfig = context.configuration().getConfigData(TokensConfig.class); - final var ledgerConfig = context.configuration().getConfigData(LedgerConfig.class); - final var accountStore = context.createStore(ReadableAccountStore.class); - final var tokenRelationStore = context.createStore(ReadableTokenRelationStore.class); - final var tokenStore = context.createStore(ReadableTokenStore.class); - final var stakingInfoStore = context.createStore(ReadableStakingInfoStore.class); final var op = query.cryptoGetInfoOrThrow(); final var response = CryptoGetInfoResponse.newBuilder(); final var accountId = op.accountIDOrElse(AccountID.DEFAULT); @@ -136,6 +129,12 @@ public Response findResponse(@NonNull final QueryContext context, @NonNull final response.header(header); final var responseType = op.headerOrElse(QueryHeader.DEFAULT).responseType(); if (header.nodeTransactionPrecheckCode() == OK && responseType != COST_ANSWER) { + final var tokensConfig = context.configuration().getConfigData(TokensConfig.class); + final var ledgerConfig = context.configuration().getConfigData(LedgerConfig.class); + final var accountStore = context.createStore(ReadableAccountStore.class); + final var tokenRelationStore = context.createStore(ReadableTokenRelationStore.class); + final var tokenStore = context.createStore(ReadableTokenStore.class); + final var stakingInfoStore = context.createStore(ReadableStakingInfoStore.class); final var optionalInfo = infoForAccount( accountId, accountStore, @@ -160,8 +159,7 @@ public long estimatePendingRewards(Account account, @Nullable StakingNodeInfo st } else { response.header(ResponseHeader.newBuilder() .nodeTransactionPrecheckCode(FAIL_INVALID) - .responseType(ANSWER_ONLY) - .cost(0)); + .cost(0)); // FUTURE: from mono service, check in EET } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetInfoHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetInfoHandler.java index f8cae71675ac..dfe9e033736d 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetInfoHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetInfoHandler.java @@ -16,18 +16,41 @@ package com.hedera.node.app.service.token.impl.handlers; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; +import static com.hedera.hapi.node.base.ResponseType.COST_ANSWER; +import static com.hedera.hapi.node.base.TokenFreezeStatus.FREEZE_NOT_APPLICABLE; +import static com.hedera.hapi.node.base.TokenFreezeStatus.FROZEN; +import static com.hedera.hapi.node.base.TokenFreezeStatus.UNFROZEN; +import static com.hedera.hapi.node.base.TokenKycStatus.GRANTED; +import static com.hedera.hapi.node.base.TokenKycStatus.KYC_NOT_APPLICABLE; +import static com.hedera.hapi.node.base.TokenKycStatus.REVOKED; +import static com.hedera.hapi.node.base.TokenPauseStatus.PAUSED; +import static com.hedera.hapi.node.base.TokenPauseStatus.PAUSE_NOT_APPLICABLE; +import static com.hedera.hapi.node.base.TokenPauseStatus.UNPAUSED; +import static com.hedera.node.app.spi.key.KeyUtils.isEmpty; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck; import static java.util.Objects.requireNonNull; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Duration; import com.hedera.hapi.node.base.HederaFunctionality; import com.hedera.hapi.node.base.QueryHeader; import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.base.Timestamp; +import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.token.TokenGetInfoResponse; +import com.hedera.hapi.node.token.TokenInfo; import com.hedera.hapi.node.transaction.Query; import com.hedera.hapi.node.transaction.Response; +import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.spi.workflows.PaidQueryHandler; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.QueryContext; +import com.hedera.node.config.data.LedgerConfig; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Optional; import javax.inject.Inject; import javax.inject.Singleton; @@ -58,13 +81,117 @@ public Response createEmptyResponse(@NonNull final ResponseHeader header) { @Override public void validate(@NonNull final QueryContext context) throws PreCheckException { requireNonNull(context); - throw new UnsupportedOperationException("Not implemented"); + final var query = context.query(); + final var tokenStore = context.createStore(ReadableTokenStore.class); + final var op = query.tokenGetInfoOrThrow(); + validateTruePreCheck(op.hasToken(), INVALID_TOKEN_ID); + + final var token = tokenStore.get(requireNonNull(op.token())); + validateFalsePreCheck(token == null, INVALID_TOKEN_ID); } @Override public Response findResponse(@NonNull final QueryContext context, @NonNull final ResponseHeader header) { requireNonNull(context); requireNonNull(header); - throw new UnsupportedOperationException("Not implemented"); + final var query = context.query(); + final var config = context.configuration().getConfigData(LedgerConfig.class); + final var tokenStore = context.createStore(ReadableTokenStore.class); + final var op = query.tokenGetInfoOrThrow(); + final var response = TokenGetInfoResponse.newBuilder(); + final var tokenID = op.tokenOrElse(TokenID.DEFAULT); + + final var responseType = op.headerOrElse(QueryHeader.DEFAULT).responseType(); + response.header(header); + if (header.nodeTransactionPrecheckCode() == OK && responseType != COST_ANSWER) { + final var optionalInfo = infoForToken(tokenID, tokenStore, config); + if (optionalInfo.isPresent()) { + response.tokenInfo(optionalInfo.get()); + } else { + response.header(ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(INVALID_TOKEN_ID) + .cost(0)); // FUTURE: from mono service, check in EET + } + } + + return Response.newBuilder().tokenGetInfo(response).build(); + } + + /** + * Returns the {@link TokenInfo} for the given {@link TokenID}, if it exists. + * + * @param tokenID + * the {@link TokenID} for which to return the {@link TokenInfo} + * @param readableTokenStore + * the {@link ReadableTokenStore} from which to retrieve the {@link TokenInfo} + * @param config + * the {@link LedgerConfig} containing the ledger ID + * @return the {@link TokenInfo} for the given {@link TokenID}, if it exists + */ + private Optional infoForToken( + @NonNull final TokenID tokenID, + @NonNull final ReadableTokenStore readableTokenStore, + @NonNull final LedgerConfig config) { + requireNonNull(tokenID); + requireNonNull(readableTokenStore); + requireNonNull(config); + + final var token = readableTokenStore.get(tokenID); + if (token == null) { + return Optional.empty(); + } else { + final var info = TokenInfo.newBuilder(); + info.ledgerId(config.id()); + info.tokenType(token.tokenType()); + info.supplyType(token.supplyType()); + info.tokenId(tokenID); + info.deleted(token.deleted()); + info.symbol(token.symbol()); + info.name(token.name()); + info.memo(token.memo()); + info.treasury(AccountID.newBuilder().accountNum(token.treasuryAccountNumber())); + info.totalSupply(token.totalSupply()); + info.maxSupply(token.maxSupply()); + info.decimals(token.decimals()); + info.expiry(Timestamp.newBuilder().seconds(token.expiry())); + if (!isEmpty(token.adminKey())) info.adminKey(token.adminKey()); + if (!isEmpty(token.supplyKey())) { + info.supplyKey(token.supplyKey()); + } + if (!isEmpty(token.wipeKey())) { + info.wipeKey(token.wipeKey()); + } + if (!isEmpty(token.feeScheduleKey())) { + info.feeScheduleKey(token.feeScheduleKey()); + } + + if (token.autoRenewAccountNumber() != 0) { + info.autoRenewAccount(AccountID.newBuilder().accountNum(token.autoRenewAccountNumber())); + info.autoRenewPeriod(Duration.newBuilder().seconds(token.autoRenewSecs())); + } + + if (!isEmpty(token.freezeKey())) { + info.freezeKey(token.freezeKey()); + info.defaultFreezeStatus(token.accountsFrozenByDefault() ? FROZEN : UNFROZEN); + } else { + info.defaultFreezeStatus(FREEZE_NOT_APPLICABLE); + } + if (!isEmpty(token.kycKey())) { + info.kycKey(token.kycKey()); + info.defaultKycStatus(token.accountsKycGrantedByDefault() ? GRANTED : REVOKED); + } else { + info.defaultKycStatus(KYC_NOT_APPLICABLE); + } + + if (!isEmpty(token.pauseKey())) { + info.pauseKey(token.pauseKey()); + info.pauseStatus(token.paused() ? PAUSED : UNPAUSED); + } else { + info.pauseStatus(PAUSE_NOT_APPLICABLE); + } + info.customFees(token.customFees()); + + return Optional.of(info.build()); + } } } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetInfoHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetInfoHandlerTest.java new file mode 100644 index 000000000000..125ed7ea0d3d --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetInfoHandlerTest.java @@ -0,0 +1,352 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseType.ANSWER_ONLY; +import static com.hedera.hapi.node.base.TokenFreezeStatus.FREEZE_NOT_APPLICABLE; +import static com.hedera.hapi.node.base.TokenFreezeStatus.FROZEN; +import static com.hedera.hapi.node.base.TokenFreezeStatus.UNFROZEN; +import static com.hedera.hapi.node.base.TokenKycStatus.GRANTED; +import static com.hedera.hapi.node.base.TokenKycStatus.KYC_NOT_APPLICABLE; +import static com.hedera.hapi.node.base.TokenKycStatus.REVOKED; +import static com.hedera.hapi.node.base.TokenPauseStatus.PAUSED; +import static com.hedera.hapi.node.base.TokenPauseStatus.PAUSE_NOT_APPLICABLE; +import static com.hedera.hapi.node.base.TokenPauseStatus.UNPAUSED; +import static com.hedera.node.app.service.token.impl.test.handlers.util.StateBuilderUtil.TOKENS; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThatCode; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mock.Strictness.LENIENT; +import static org.mockito.Mockito.when; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Duration; +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.QueryHeader; +import com.hedera.hapi.node.base.ResponseCodeEnum; +import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.base.Timestamp; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.token.TokenGetInfoQuery; +import com.hedera.hapi.node.token.TokenGetInfoResponse; +import com.hedera.hapi.node.token.TokenInfo; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.node.app.service.mono.utils.EntityNum; +import com.hedera.node.app.service.token.ReadableTokenStore; +import com.hedera.node.app.service.token.impl.ReadableTokenStoreImpl; +import com.hedera.node.app.service.token.impl.handlers.TokenGetInfoHandler; +import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoTokenHandlerTestBase; +import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.spi.workflows.QueryContext; +import com.hedera.node.config.converter.BytesConverter; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class TokenGetInfoHandlerTest extends CryptoTokenHandlerTestBase { + + @Mock(strictness = LENIENT) + private QueryContext context; + + private TokenGetInfoHandler subject; + + @BeforeEach + public void setUp() { + super.setUp(); + subject = new TokenGetInfoHandler(); + } + + @Test + void extractsHeader() { + final var query = createTokenGetInfoQuery(fungibleTokenId); + final var header = subject.extractHeader(query); + final var op = query.tokenGetInfoOrThrow(); + assertEquals(op.header(), header); + } + + @Test + void createsEmptyResponse() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.FAIL_FEE) + .build(); + final var response = subject.createEmptyResponse(responseHeader); + final var expectedResponse = Response.newBuilder() + .tokenGetInfo(TokenGetInfoResponse.newBuilder().header(responseHeader)) + .build(); + assertEquals(expectedResponse, response); + } + + @Test + void validatesQueryWhenValidToken() { + final var query = createTokenGetInfoQuery(fungibleTokenId); + given(context.query()).willReturn(query); + given(context.createStore(ReadableTokenStore.class)).willReturn(readableTokenStore); + + assertThatCode(() -> subject.validate(context)).doesNotThrowAnyException(); + } + + @Test + void validatesQueryIfInvalidToken() { + final var state = MapReadableKVState.builder(TOKENS).build(); + given(readableStates.get(TOKENS)).willReturn(state); + final var store = new ReadableTokenStoreImpl(readableStates); + + final var query = createTokenGetInfoQuery(fungibleTokenId); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableTokenStore.class)).thenReturn(store); + + assertThatThrownBy(() -> subject.validate(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_TOKEN_ID)); + } + + @Test + void validatesQueryIfInvalidTokenInTrans() { + final var state = MapReadableKVState.builder(TOKENS).build(); + given(readableStates.get(TOKENS)).willReturn(state); + final var store = new ReadableTokenStoreImpl(readableStates); + + final var query = createEmptyTokenGetInfoQuery(); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableTokenStore.class)).thenReturn(store); + + assertThatThrownBy(() -> subject.validate(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_TOKEN_ID)); + } + + @Test + void getsResponseIfFailedResponse() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.FAIL_FEE) + .build(); + + final var query = createTokenGetInfoQuery(fungibleTokenId); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableTokenStore.class)).thenReturn(readableTokenStore); + + final var config = new HederaTestConfigBuilder() + .withValue("tokens.maxRelsPerInfoQuery", 1000) + .getOrCreateConfig(); + given(context.configuration()).willReturn(config); + + final var response = subject.findResponse(context, responseHeader); + final var op = response.tokenGetInfoOrThrow(); + assertEquals(ResponseCodeEnum.FAIL_FEE, op.header().nodeTransactionPrecheckCode()); + } + + @Test + void getsResponseIfInvalidToken() { + final var state = MapReadableKVState.builder(TOKENS).build(); + given(readableStates.get(TOKENS)).willReturn(state); + final var store = new ReadableTokenStoreImpl(readableStates); + + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.OK) + .build(); + + final var query = createTokenGetInfoQuery(fungibleTokenId); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableTokenStore.class)).thenReturn(store); + + final var config = new HederaTestConfigBuilder() + .withValue("tokens.maxRelsPerInfoQuery", 1000) + .getOrCreateConfig(); + given(context.configuration()).willReturn(config); + + final var response = subject.findResponse(context, responseHeader); + final var op = response.tokenGetInfoOrThrow(); + assertNull(op.tokenInfo()); + assertEquals(ResponseCodeEnum.INVALID_TOKEN_ID, op.header().nodeTransactionPrecheckCode()); + } + + @Test + void getsResponseIfOkResponse() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.OK) + .build(); + final var expectedInfo = getExpectedInfo(); + + checkResponse(responseHeader, expectedInfo, readableTokenStore); + } + + @Test + void getsResponseIfOkWithAnswerOnlyHead() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.OK) + .responseType(ANSWER_ONLY) + .build(); + final var expectedInfo = getExpectedInfo(); + + checkResponse(responseHeader, expectedInfo, readableTokenStore); + } + + @Test + void getsResponseIfOkWithDefaultKey() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.OK) + .build(); + final var expectedInfo = getExpectInfoDefaultKeys(); + + fungibleToken = setFungibleTokenKeys(); + final var state = MapReadableKVState.builder(TOKENS) + .value(fungibleTokenNum, fungibleToken) + .build(); + given(readableStates.get(TOKENS)).willReturn(state); + final var store = new ReadableTokenStoreImpl(readableStates); + + checkResponse(responseHeader, expectedInfo, store); + } + + @Test + void getsResponseIfOkWithDefaultStatus() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.OK) + .build(); + final var expectedInfo = getExpectInfoDefaultStatus(); + + fungibleToken = setFungibleTokenDefaultStatus(); + final var state = MapReadableKVState.builder(TOKENS) + .value(fungibleTokenNum, fungibleToken) + .build(); + given(readableStates.get(TOKENS)).willReturn(state); + final var store = new ReadableTokenStoreImpl(readableStates); + + checkResponse(responseHeader, expectedInfo, store); + } + + private void checkResponse( + final ResponseHeader responseHeader, final TokenInfo expectedInfo, ReadableTokenStore readableTokenStore) { + final var query = createTokenGetInfoQuery(fungibleTokenId); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableTokenStore.class)).thenReturn(readableTokenStore); + + final var config = + new HederaTestConfigBuilder().withValue("ledger.id", "0x03").getOrCreateConfig(); + given(context.configuration()).willReturn(config); + + final var response = subject.findResponse(context, responseHeader); + final var tokenInfoResponse = response.tokenGetInfoOrThrow(); + assertEquals(ResponseCodeEnum.OK, tokenInfoResponse.header().nodeTransactionPrecheckCode()); + assertEquals(expectedInfo, tokenInfoResponse.tokenInfo()); + } + + private TokenInfo getExpectedInfo() { + return TokenInfo.newBuilder() + .ledgerId(new BytesConverter().convert("0x03")) + .tokenType(fungibleToken.tokenType()) + .supplyType(fungibleToken.supplyType()) + .tokenId(fungibleTokenId) + .deleted(fungibleToken.deleted()) + .symbol(fungibleToken.symbol()) + .name(fungibleToken.name()) + .memo(fungibleToken.memo()) + .treasury(AccountID.newBuilder().accountNum(fungibleToken.treasuryAccountNumber())) + .totalSupply(fungibleToken.totalSupply()) + .maxSupply(fungibleToken.maxSupply()) + .decimals(fungibleToken.decimals()) + .expiry(Timestamp.newBuilder().seconds(fungibleToken.expiry())) + .adminKey(fungibleToken.adminKey()) + .kycKey(fungibleToken.kycKey()) + .freezeKey(fungibleToken.freezeKey()) + .wipeKey(fungibleToken.wipeKey()) + .supplyKey(fungibleToken.supplyKey()) + .feeScheduleKey(fungibleToken.feeScheduleKey()) + .pauseKey(fungibleToken.pauseKey()) + .autoRenewPeriod(Duration.newBuilder().seconds(fungibleToken.autoRenewSecs())) + .autoRenewAccount(AccountID.newBuilder().accountNum(fungibleToken.autoRenewAccountNumber())) + .defaultFreezeStatus(fungibleToken.accountsFrozenByDefault() ? FROZEN : UNFROZEN) + .defaultKycStatus(fungibleToken.accountsKycGrantedByDefault() ? GRANTED : REVOKED) + .pauseStatus(fungibleToken.paused() ? PAUSED : UNPAUSED) + .customFees(fungibleToken.customFees()) + .build(); + } + + private TokenInfo getExpectInfoDefaultKeys() { + final var info = getExpectedInfo(); + return info.copyBuilder() + .supplyKey((Key) null) + .wipeKey((Key) null) + .freezeKey((Key) null) + .kycKey((Key) null) + .adminKey((Key) null) + .feeScheduleKey((Key) null) + .pauseKey((Key) null) + .defaultFreezeStatus(FREEZE_NOT_APPLICABLE) + .defaultKycStatus(KYC_NOT_APPLICABLE) + .pauseStatus(PAUSE_NOT_APPLICABLE) + .build(); + } + + private TokenInfo getExpectInfoDefaultStatus() { + final var info = getExpectedInfo(); + return info.copyBuilder() + .defaultFreezeStatus(FROZEN) + .defaultKycStatus(GRANTED) + .pauseStatus(PAUSED) + .build(); + } + + private Token setFungibleTokenKeys() { + return fungibleToken + .copyBuilder() + .supplyKey(Key.DEFAULT) + .wipeKey(Key.DEFAULT) + .freezeKey(Key.DEFAULT) + .kycKey(Key.DEFAULT) + .adminKey(Key.DEFAULT) + .feeScheduleKey(Key.DEFAULT) + .pauseKey(Key.DEFAULT) + .build(); + } + + private Token setFungibleTokenDefaultStatus() { + return fungibleToken + .copyBuilder() + .accountsFrozenByDefault(true) + .accountsKycGrantedByDefault(true) + .paused(true) + .build(); + } + + private Query createTokenGetInfoQuery(final TokenID tokenId) { + final var data = TokenGetInfoQuery.newBuilder() + .token(tokenId) + .header(QueryHeader.newBuilder().build()) + .build(); + + return Query.newBuilder().tokenGetInfo(data).build(); + } + + private Query createEmptyTokenGetInfoQuery() { + final var data = TokenGetInfoQuery.newBuilder() + .header(QueryHeader.newBuilder().build()) + .build(); + + return Query.newBuilder().tokenGetInfo(data).build(); + } +} From c177c537bdad06b82430922d88d6490781070813 Mon Sep 17 00:00:00 2001 From: artemananiev <33361937+artemananiev@users.noreply.github.com> Date: Tue, 13 Jun 2023 15:36:41 -0700 Subject: [PATCH 23/70] 7006: Old virtual map copies aren't released during to disk migration (#7007) Fixes: https://github.com/hashgraph/hedera-services/issues/7006 Reviewed-by: Ivan Malygin , Michael Tinker , Oleg Mazurov Signed-off-by: Artem Ananev --- .../state/migration/MapMigrationToDisk.java | 2 ++ .../state/migration/UniqueTokensMigrator.java | 18 +++++++++++++----- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/MapMigrationToDisk.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/MapMigrationToDisk.java index b51e439aeacd..bdda0bb6887d 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/MapMigrationToDisk.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/MapMigrationToDisk.java @@ -90,6 +90,7 @@ private static void migrateAccountsToDisk( onDiskAccounts.get().put(new EntityNumVirtualKey(num.longValue()), onDiskAccount); if (insertionsSoFar.incrementAndGet() % insertionsPerCopy == 0) { final var onDiskAccountsCopy = onDiskAccounts.get().copy(); + onDiskAccounts.get().release(); onDiskAccounts.set(onDiskAccountsCopy); } }), @@ -119,6 +120,7 @@ private static void migrateRelsToDisk( onDiskRels.get().put(EntityNumVirtualKey.fromPair(numPair), onDiskRel); if (insertionsSoFar.incrementAndGet() % insertionsPerCopy == 0) { final var onDiskRelCopy = onDiskRels.get().copy(); + onDiskRels.get().release(); onDiskRels.set(onDiskRelCopy); } }), diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/UniqueTokensMigrator.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/UniqueTokensMigrator.java index 82941d37ac1d..077019e47302 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/UniqueTokensMigrator.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/migration/UniqueTokensMigrator.java @@ -28,6 +28,7 @@ import com.swirlds.merkle.map.MerkleMap; import com.swirlds.virtualmap.VirtualMap; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -49,19 +50,26 @@ public static void migrateFromUniqueTokenMerkleMap(final ServicesState initializ } final MerkleMap legacyUniqueTokens = currentData.merkleMap(); - final VirtualMap vmUniqueTokens = - virtualMapFactory.newVirtualizedUniqueTokenStorage(); + final AtomicReference> virtualMapRef = + new AtomicReference<>(virtualMapFactory.newVirtualizedUniqueTokenStorage()); final AtomicInteger count = new AtomicInteger(); forEach(MerkleMapLike.from(legacyUniqueTokens), (entityNumPair, legacyToken) -> { final var numSerialPair = entityNumPair.asTokenNumAndSerialPair(); final var newTokenKey = new UniqueTokenKey(numSerialPair.getLeft(), numSerialPair.getRight()); final var newTokenValue = UniqueTokenValue.from(legacyToken); - vmUniqueTokens.put(newTokenKey, newTokenValue); - count.incrementAndGet(); + virtualMapRef.get().put(newTokenKey, newTokenValue); + final int currentCount = count.incrementAndGet(); + // Create a new virtual map copy every few tokens to make sure they can be flushed to disk + if (currentCount % 10000 == 0) { + final VirtualMap currentCopy = virtualMapRef.get(); + virtualMapRef.set(currentCopy.copy()); + currentCopy.release(); + // Future work: may need to wait until currentCopy is actually flushed to disk + } }); - initializingState.setChild(StateChildIndices.UNIQUE_TOKENS, vmUniqueTokens); + initializingState.setChild(StateChildIndices.UNIQUE_TOKENS, virtualMapRef.get()); LOG.info("Migrated {} unique tokens", count.get()); } From 7522a7b28642d14f50c331bc926e658fc50c1d3a Mon Sep 17 00:00:00 2001 From: Neeharika Sompalli <52669918+Neeharika-Sompalli@users.noreply.github.com> Date: Tue, 13 Jun 2023 22:52:12 -0500 Subject: [PATCH 24/70] TokenCreate `handle()` implementation (#7041) Signed-off-by: Neeharika-Sompalli --- .../SingleTransactionRecordBuilder.java | 13 +- .../dispatcher/MonoTransactionDispatcher.java | 33 +- .../MonoTransactionDispatcherTest.java | 30 + .../token/impl/handlers/BaseTokenHandler.java | 142 +++ .../TokenAssociateToAccountHandler.java | 106 +- .../impl/handlers/TokenCreateHandler.java | 223 +++- .../TokenDissociateFromAccountHandler.java | 2 +- .../records/TokenCreateRecordBuilder.java | 38 + .../token/impl/util/TokenHandlerHelper.java | 17 +- .../impl/validators/CustomFeesValidator.java | 90 +- .../validators/TokenAttributesValidator.java | 166 +++ .../impl/validators/TokenCreateValidator.java | 197 ++++ .../TokenSupplyChangeOpsValidator.java | 3 +- .../CryptoDeleteAllowanceHandlerTest.java | 3 +- .../handlers/TokenCreateHandleParityTest.java | 19 +- .../test/handlers/TokenCreateHandlerTest.java | 972 ++++++++++++++++++ ...TokenDissociateFromAccountHandlerTest.java | 19 +- .../util/CryptoTokenHandlerTestBase.java | 4 + .../test/util/TokenHandlerHelperTest.java | 86 +- .../validators/CustomFeesValidatorTest.java | 328 +++++- .../TokenAttributesValidatorTest.java | 397 +++++++ 21 files changed, 2655 insertions(+), 233 deletions(-) create mode 100644 hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/records/TokenCreateRecordBuilder.java create mode 100644 hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenAttributesValidator.java create mode 100644 hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandlerTest.java create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/TokenAttributesValidatorTest.java diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/SingleTransactionRecordBuilder.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/SingleTransactionRecordBuilder.java index df78e68254b9..1ad00671adc0 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/SingleTransactionRecordBuilder.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/records/SingleTransactionRecordBuilder.java @@ -47,6 +47,7 @@ import com.hedera.node.app.service.consensus.impl.records.ConsensusSubmitMessageRecordBuilder; import com.hedera.node.app.service.file.impl.records.CreateFileRecordBuilder; import com.hedera.node.app.service.token.impl.records.CryptoCreateRecordBuilder; +import com.hedera.node.app.service.token.impl.records.TokenCreateRecordBuilder; import com.hedera.node.app.service.token.impl.records.TokenMintRecordBuilder; import com.hedera.node.app.service.util.impl.records.PrngRecordBuilder; import com.hedera.node.app.spi.HapiUtils; @@ -71,7 +72,8 @@ public class SingleTransactionRecordBuilder CreateFileRecordBuilder, CryptoCreateRecordBuilder, PrngRecordBuilder, - TokenMintRecordBuilder { + TokenMintRecordBuilder, + TokenCreateRecordBuilder { // base transaction data private Transaction transaction; private Bytes transactionBytes; @@ -319,6 +321,15 @@ public AccountID accountID() { return accountID; } + /** + * @deprecated this method is only used temporarily during the migration + */ + @Deprecated(forRemoval = true) + @Nullable + public TokenID tokenID() { + return tokenID; + } + public SingleTransactionRecordBuilder fileID(FileID fileID) { this.fileID = fileID; return this; diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcher.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcher.java index 2b5c3be9d605..66367c6e0f20 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcher.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcher.java @@ -19,7 +19,7 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.MAX_ENTITIES_IN_PRICE_REGIME_HAVE_BEEN_CREATED; import static java.util.Objects.requireNonNull; -import com.hedera.hapi.node.transaction.TransactionRecord.EntropyOneOfType; +import com.hedera.hapi.node.transaction.TransactionRecord; import com.hedera.node.app.records.SingleTransactionRecordBuilder; import com.hedera.node.app.service.mono.context.SideEffectsTracker; import com.hedera.node.app.service.mono.context.TransactionContext; @@ -76,14 +76,18 @@ public MonoTransactionDispatcher( public void dispatchHandle(@NonNull final HandleContext context) { final var txBody = context.body(); switch (txBody.data().kind()) { + // ------------------ topic -------------------------- case CONSENSUS_CREATE_TOPIC -> dispatchConsensusCreateTopic(context); case CONSENSUS_UPDATE_TOPIC -> dispatchConsensusUpdateTopic(context); case CONSENSUS_DELETE_TOPIC -> dispatchConsensusDeleteTopic(context); case CONSENSUS_SUBMIT_MESSAGE -> dispatchConsensusSubmitMessage(context); + // ------------------ crypto -------------------------- case CRYPTO_CREATE_ACCOUNT -> dispatchCryptoCreate(context); case CRYPTO_DELETE -> dispatchCryptoDelete(context); case CRYPTO_UPDATE_ACCOUNT -> dispatchCryptoUpdate(context); - case FREEZE -> dispatchFreeze(context); + case CRYPTO_APPROVE_ALLOWANCE -> dispatchCryptoApproveAllowance(context); + case CRYPTO_DELETE_ALLOWANCE -> dispatchCryptoDeleteAllowance(context); + // ------------------ token -------------------------- case TOKEN_ASSOCIATE -> dispatchTokenAssociate(context); case TOKEN_DISSOCIATE -> dispatchTokenDissociate(context); case TOKEN_FREEZE -> dispatchTokenFreeze(context); @@ -92,13 +96,18 @@ public void dispatchHandle(@NonNull final HandleContext context) { case TOKEN_REVOKE_KYC -> dispatchTokenRevokeKycFromAccount(context); case TOKEN_PAUSE -> dispatchTokenPause(context); case TOKEN_UNPAUSE -> dispatchTokenUnpause(context); + case TOKEN_CREATION -> dispatchTokenCreate(context); case TOKEN_FEE_SCHEDULE_UPDATE -> dispatchTokenFeeScheduleUpdate(context); case TOKEN_DELETION -> dispatchTokenDeletion(context); case TOKEN_BURN -> dispatchTokenBurn(context); + // ------------------ admin -------------------------- + case FREEZE -> dispatchFreeze(context); + // ------------------ util -------------------------- case UTIL_PRNG -> dispatchPrng(context); default -> throw new IllegalArgumentException(TYPE_NOT_SUPPORTED); } } + // For all the below methods, commit is not called from stores, as it is responsibility of // handle workflow to call commit on WritableKVState. private void dispatchConsensusCreateTopic(@NonNull final HandleContext handleContext) { @@ -227,9 +236,9 @@ private void dispatchPrng(@NonNull final HandleContext handleContext) { private void finishUtilPrng(@NonNull final HandleContext handleContext) { final var recordBuilder = handleContext.recordBuilder(SingleTransactionRecordBuilder.class); final var entropy = recordBuilder.entropy(); - if (entropy.kind() == EntropyOneOfType.PRNG_NUMBER) { + if (entropy.kind() == TransactionRecord.EntropyOneOfType.PRNG_NUMBER) { sideEffectsTracker.trackRandomNumber((Integer) entropy.value()); - } else if (entropy.kind() == EntropyOneOfType.PRNG_BYTES) { + } else if (entropy.kind() == TransactionRecord.EntropyOneOfType.PRNG_BYTES) { sideEffectsTracker.trackRandomBytes(PbjConverter.asBytes((Bytes) entropy.value())); } } @@ -240,6 +249,12 @@ private void dispatchTokenFeeScheduleUpdate(@NonNull final HandleContext handleC handler.handle(handleContext); } + private void dispatchTokenCreate(@NonNull final HandleContext handleContext) { + requireNonNull(handleContext); + final var handler = handlers.tokenCreateHandler(); + handler.handle(handleContext); + } + private void dispatchTokenDeletion(@NonNull final HandleContext handleContext) { final var handler = handlers.tokenDeleteHandler(); handler.handle(handleContext); @@ -249,4 +264,14 @@ private void dispatchTokenBurn(@NonNull final HandleContext handleContext) { final var handler = handlers.tokenBurnHandler(); handler.handle(handleContext); } + + private void dispatchCryptoApproveAllowance(final HandleContext handleContext) { + final var handler = handlers.cryptoApproveAllowanceHandler(); + handler.handle(handleContext); + } + + private void dispatchCryptoDeleteAllowance(final HandleContext handleContext) { + final var handler = handlers.cryptoDeleteAllowanceHandler(); + handler.handle(handleContext); + } } diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java index 29ca0b7b4678..720d2b5e717e 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java @@ -658,6 +658,18 @@ void dispatchesTokenBurnAsExpected() { verify(handleContext).body(); } + @Test + void dispatchesTokenCreateAsExpected() { + final var txnBody = TransactionBody.newBuilder() + .tokenCreation(TokenCreateTransactionBody.DEFAULT) + .build(); + given(handleContext.body()).willReturn(txnBody); + + dispatcher.dispatchHandle(handleContext); + + verify(handleContext).body(); + } + @Test void dispatchesCryptoCreateAsExpected() { final var txnBody = TransactionBody.newBuilder() @@ -697,6 +709,24 @@ void dispatchesCryptoDeleteAsExpected() { dispatcher.dispatchHandle(handleContext); } + @Test + void dispatchesCryptoApproveAllowanceAsExpected() { + final var txnBody = TransactionBody.newBuilder() + .cryptoApproveAllowance(CryptoApproveAllowanceTransactionBody.DEFAULT) + .build(); + given(handleContext.body()).willReturn(txnBody); + dispatcher.dispatchHandle(handleContext); + } + + @Test + void dispatchesCryptoDeleteAllowanceAsExpected() { + final var txnBody = TransactionBody.newBuilder() + .cryptoDeleteAllowance(CryptoDeleteAllowanceTransactionBody.DEFAULT) + .build(); + given(handleContext.body()).willReturn(txnBody); + dispatcher.dispatchHandle(handleContext); + } + @Test void dispatchesFreezeAsExpected() { final var txnBody = TransactionBody.newBuilder() diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java index ff627da810c2..50bd9c948c58 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java @@ -18,20 +18,29 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.*; import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.util.IdConvenienceUtils.isValidTokenNum; import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; import static java.util.Objects.requireNonNull; +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.base.TokenSupplyType; +import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; import com.hedera.node.app.service.token.impl.WritableTokenStore; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class BaseTokenHandler { + private static final Logger log = LoggerFactory.getLogger(BaseTokenHandler.class); /** * Mints fungible tokens. This method is called in both token create and mint. * @param token the new or existing token to mint @@ -139,6 +148,139 @@ protected void changeSupply( tokenRelationStore.put(copyTreasuryRel.build()); } + /** + * Creates {@link TokenRelation} object for each token association to account and links tokens to the account. + * This is used in both token associate logic and also token create logic + * @param account the account to link the tokens to + * @param tokens the tokens to link to the account + * @param accountStore the account store + * @param tokenRelStore the token relation store + */ + protected void createAndLinkTokenRels( + @NonNull final Account account, + @NonNull final List tokens, + @NonNull final WritableAccountStore accountStore, + @NonNull final WritableTokenRelationStore tokenRelStore) { + // create list of token relations to be added + final var newTokenRels = createTokenRelsToAccount(account, tokens); + // Link the new token relations to the account + linkTokenRels(account, newTokenRels, tokenRelStore); + + // Now replace the account's old head token number with the new head token number. This is + // how we link the new tokenRels to the account + final var firstOfNewTokenRels = newTokenRels.get(0); + final var updatedAcct = account.copyBuilder() + // replace the head token number with the first token number of the new tokenRels + .headTokenNumber(firstOfNewTokenRels.tokenNumber()) + // and also update the account's total number of token associations + .numberAssociations(account.numberAssociations() + newTokenRels.size()) + .build(); + + // Save the results + accountStore.put(updatedAcct); + newTokenRels.forEach(tokenRelStore::put); + } + + /** + * Link all the new token relations created for the account together, and then link them to the account. + * @param account the account to link the tokens to + * @param newTokenRels the new token relations to link to the account + * @param tokenRelStore the token relation store + */ + private void linkTokenRels( + @NonNull final Account account, + @NonNull final List newTokenRels, + @NonNull final WritableTokenRelationStore tokenRelStore) { + // Now all the NEW token relations are linked together, but they are not yet linked to the account. First, + // compute where the account's current head token number should go in the linked list of tokens + final var currentHeadTokenNum = account.headTokenNumber(); + // NOTE: if currentHeadTokenNum is less than 1, it means the account isn't associated with any tokens yet, so + // we'll just set the head to the first token, i.e. the first token ID list from the transaction (since the new + // tokenRels are all linked, and in the order of the token IDs as they appeared in the original list) + if (isValidTokenNum(currentHeadTokenNum)) { + // The account is already associated with some tokens, so we need to insert the new + // tokenRels at the beginning of the list of existing token numbers first. We start by + // retrieving the token rel object with the currentHeadTokenNum at the head of the + // account + final var headTokenRel = tokenRelStore.get( + AccountID.newBuilder().accountNum(account.accountNumber()).build(), + TokenID.newBuilder().tokenNum(currentHeadTokenNum).build()); + if (headTokenRel != null) { + // Recreate the current head token's tokenRel, but with its previous pointer set to + // the last of the new tokenRels. This links the new token rels to the rest of the + // token rels connected via the old head token rel + final var lastOfNewTokenRels = newTokenRels.remove(newTokenRels.size() - 1); + final var headTokenAsNonHeadTokenRel = headTokenRel + .copyBuilder() + .previousToken(lastOfNewTokenRels.tokenNumber()) + .build(); // the old head token rel is no longer the head + + // Also connect the last of the new tokenRels to the old head token rel + newTokenRels.add(lastOfNewTokenRels + .copyBuilder() + .nextToken(headTokenAsNonHeadTokenRel.tokenNumber()) + .build()); + tokenRelStore.put(headTokenAsNonHeadTokenRel); + } else { + // This shouldn't happen, but if it does we'll log the error and continue with creating the token + // associations + log.error( + "Unable to get head tokenRel for account {}, token {}! Linked-list relations are likely in a bad state", + account.accountNumber(), + currentHeadTokenNum); + } + } + } + + /** + * Creates list of {@link TokenRelation}s for each token association to account. + * @param account the account to link the tokens to + * @param tokens the tokens to link to the account + * @return the list of token relations to be added + */ + private List createTokenRelsToAccount( + @NonNull final Account account, @NonNull final List tokens) { + final var newTokenRels = new ArrayList(); + for (int i = 0; i < tokens.size(); i++) { + final var token = tokens.get(i); + // Link each of the new token IDs together in a doubly-linked list way by setting each + // token relation's previous and next token IDs. + + // Compute the previous and next token IDs. Unfortunately `TokenRelation` doesn't + // allow for null values, so a value of '0' will have to indicate a null pointer to + // the previous or next token (since no token number 0 can exist) + long prevTokenId = 0; + long nextTokenId = 0; + if (i - 1 >= 0) { // if there is a previous token + prevTokenId = Optional.ofNullable(tokens.get(i - 1)) + .map(Token::tokenNumber) + .orElse(0L); + } + if (i + 1 < tokens.size()) { // if there is a next token + nextTokenId = Optional.ofNullable(tokens.get(i + 1)) + .map(Token::tokenNumber) + .orElse(0L); + } + + // Create the new token relation + final var isFrozen = token.hasFreezeKey() && token.accountsFrozenByDefault(); + final var kycGranted = !token.hasKycKey(); + final var newTokenRel = new TokenRelation( + token.tokenNumber(), + account.accountNumber(), + 0, + isFrozen, + kycGranted, + false, + false, + prevTokenId, + nextTokenId); + newTokenRels.add(newTokenRel); + } + return newTokenRels; + } + + /* ------------------------- Helper functions ------------------------- */ @NonNull public static TokenID asToken(final long num) { return TokenID.newBuilder().tokenNum(num).build(); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAssociateToAccountHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAssociateToAccountHandler.java index 567846bcb3a4..fa830a562503 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAssociateToAccountHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAssociateToAccountHandler.java @@ -21,7 +21,6 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_ID_REPEATED_IN_TOKEN_LIST; -import static com.hedera.node.app.service.token.impl.util.IdConvenienceUtils.isValidTokenNum; import static com.hedera.node.app.service.token.impl.util.TokenHandlerHelper.getIfUsable; import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; import static java.util.Objects.requireNonNull; @@ -32,7 +31,6 @@ import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.Token; -import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.hapi.node.token.TokenAssociateTransactionBody; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.WritableAccountStore; @@ -49,19 +47,15 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Optional; import javax.inject.Inject; import javax.inject.Singleton; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * This class contains all workflow-related functionality regarding {@link * HederaFunctionality#TOKEN_ASSOCIATE_TO_ACCOUNT}. */ @Singleton -public class TokenAssociateToAccountHandler implements TransactionHandler { - private static final Logger log = LoggerFactory.getLogger(TokenAssociateToAccountHandler.class); +public class TokenAssociateToAccountHandler extends BaseTokenHandler implements TransactionHandler { @Inject public TokenAssociateToAccountHandler() { @@ -96,104 +90,6 @@ public void handle(@NonNull final HandleContext context) throws HandleException createAndLinkTokenRels(validated.account(), validated.tokens(), accountStore, tokenRelStore); } - private void createAndLinkTokenRels( - @NonNull final Account account, - @NonNull final List tokens, - @NonNull final WritableAccountStore accountStore, - @NonNull final WritableTokenRelationStore tokenRelStore) { - final var newTokenRels = new ArrayList(); - for (int i = 0; i < tokens.size(); i++) { - final var token = tokens.get(i); - // Link each of the new token IDs together in a doubly-linked list way by setting each - // token relation's previous and next token IDs. - - // Compute the previous and next token IDs. Unfortunately `TokenRelation` doesn't - // allow for null values, so a value of '0' will have to indicate a null pointer to - // the previous or next token (since no token number 0 can exist) - long prevTokenId = 0; - long nextTokenId = 0; - if (i - 1 >= 0) { // if there is a previous token - prevTokenId = Optional.ofNullable(tokens.get(i - 1)) - .map(Token::tokenNumber) - .orElse(0L); - } - if (i + 1 < tokens.size()) { // if there is a next token - nextTokenId = Optional.ofNullable(tokens.get(i + 1)) - .map(Token::tokenNumber) - .orElse(0L); - } - - // Create the new token relation - final var isFrozen = token.hasFreezeKey() && token.accountsFrozenByDefault(); - final var kycGranted = !token.hasKycKey(); - final var newTokenRel = new TokenRelation( - token.tokenNumber(), - account.accountNumber(), - 0, - isFrozen, - kycGranted, - false, - false, - prevTokenId, - nextTokenId); - newTokenRels.add(newTokenRel); - } - - // Now all the NEW token relations are linked together, but they are not yet linked to the account. First, - // compute where the account's current head token number should go in the linked list of tokens - final var currentHeadTokenNum = account.headTokenNumber(); - // NOTE: if currentHeadTokenNum is less than 1, it means the account isn't associated with any tokens yet, so - // we'll just set the head to the first token, i.e. the first token ID list from the transaction (since the new - // tokenRels are all linked, and in the order of the token IDs as they appeared in the original list) - if (isValidTokenNum(currentHeadTokenNum)) { - // The account is already associated with some tokens, so we need to insert the new - // tokenRels at the beginning of the list of existing token numbers first. We start by - // retrieving the token rel object with the currentHeadTokenNum at the head of the - // account - final var headTokenRel = tokenRelStore.get( - AccountID.newBuilder().accountNum(account.accountNumber()).build(), - TokenID.newBuilder().tokenNum(currentHeadTokenNum).build()); - if (headTokenRel != null) { - // Recreate the current head token's tokenRel, but with its previous pointer set to - // the last of the new tokenRels. This links the new token rels to the rest of the - // token rels connected via the old head token rel - final var lastOfNewTokenRels = newTokenRels.remove(newTokenRels.size() - 1); - final var headTokenAsNonHeadTokenRel = headTokenRel - .copyBuilder() - .previousToken(lastOfNewTokenRels.tokenNumber()) - .build(); // the old head token rel is no longer the head - - // Also connect the last of the new tokenRels to the old head token rel - newTokenRels.add(lastOfNewTokenRels - .copyBuilder() - .nextToken(headTokenAsNonHeadTokenRel.tokenNumber()) - .build()); - tokenRelStore.put(headTokenAsNonHeadTokenRel); - } else { - // This shouldn't happen, but if it does we'll log the error and continue with creating the token - // associations - log.error( - "Unable to get head tokenRel for account {}, token {}! Linked-list relations are likely in a bad state", - account.accountNumber(), - currentHeadTokenNum); - } - } - - // Now replace the account's old head token number with the new head token number. This is - // how we link the new tokenRels to the account - final var firstOfNewTokenRels = newTokenRels.get(0); - final var updatedAcct = account.copyBuilder() - // replace the head token number with the first token number of the new tokenRels - .headTokenNumber(firstOfNewTokenRels.tokenNumber()) - // and also update the account's total number of token associations - .numberAssociations(account.numberAssociations() + newTokenRels.size()) - .build(); - - // Save the results - accountStore.put(updatedAcct); - newTokenRels.forEach(tokenRelStore::put); - } - /** * Performs checks independent of state or context */ diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenCreateHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenCreateHandler.java index 922ab8513aa5..c89f4abffcd7 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenCreateHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenCreateHandler.java @@ -19,19 +19,38 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_AUTORENEW_ACCOUNT; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_CUSTOM_FEE_COLLECTOR; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TREASURY_ACCOUNT_FOR_TOKEN; +import static com.hedera.hapi.node.base.ResponseCodeEnum.MAX_ENTITIES_IN_PRICE_REGIME_HAVE_BEEN_CREATED; +import static com.hedera.node.app.spi.validation.ExpiryMeta.NA; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; import static java.util.Collections.emptyList; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Duration; import com.hedera.hapi.node.base.HederaFunctionality; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.token.TokenCreateTransactionBody; import com.hedera.hapi.node.transaction.CustomFee; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.service.token.ReadableAccountStore; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; +import com.hedera.node.app.service.token.impl.WritableTokenStore; +import com.hedera.node.app.service.token.impl.records.TokenCreateRecordBuilder; +import com.hedera.node.app.service.token.impl.util.TokenHandlerHelper; +import com.hedera.node.app.service.token.impl.validators.CustomFeesValidator; +import com.hedera.node.app.service.token.impl.validators.TokenCreateValidator; +import com.hedera.node.app.spi.validation.ExpiryMeta; import com.hedera.node.app.spi.workflows.HandleContext; -import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.PreHandleContext; import com.hedera.node.app.spi.workflows.TransactionHandler; +import com.hedera.node.config.data.EntitiesConfig; +import com.hedera.node.config.data.TokensConfig; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.List; +import java.util.Set; import javax.inject.Inject; import javax.inject.Singleton; @@ -40,16 +59,28 @@ * HederaFunctionality#TOKEN_CREATE}. */ @Singleton -public class TokenCreateHandler implements TransactionHandler { +public class TokenCreateHandler extends BaseTokenHandler implements TransactionHandler { + private final CustomFeesValidator customFeesValidator; + private final TokenCreateValidator tokenCreateValidator; + @Inject - public TokenCreateHandler() { - // Exists for injection + public TokenCreateHandler( + @NonNull final CustomFeesValidator customFeesValidator, + @NonNull final TokenCreateValidator tokenCreateValidator) { + requireNonNull(customFeesValidator); + requireNonNull(tokenCreateValidator); + + this.customFeesValidator = customFeesValidator; + this.tokenCreateValidator = tokenCreateValidator; } @Override public void preHandle(@NonNull final PreHandleContext context) throws PreCheckException { requireNonNull(context); - final var tokenCreateTxnBody = context.body().tokenCreationOrThrow(); + final var txn = context.body(); + pureChecks(txn); + + final var tokenCreateTxnBody = txn.tokenCreationOrThrow(); if (tokenCreateTxnBody.hasTreasury()) { final var treasuryId = tokenCreateTxnBody.treasuryOrThrow(); context.requireKeyOrThrow(treasuryId, INVALID_TREASURY_ACCOUNT_FOR_TOKEN); @@ -66,8 +97,183 @@ public void preHandle(@NonNull final PreHandleContext context) throws PreCheckEx } @Override - public void handle(@NonNull final HandleContext context) throws HandleException { - throw new UnsupportedOperationException("Not implemented"); + public void pureChecks(@NonNull final TransactionBody txn) throws PreCheckException { + tokenCreateValidator.pureChecks(txn.tokenCreationOrThrow()); + } + + @Override + public void handle(@NonNull final HandleContext context) { + requireNonNull(context); + final var txn = context.body(); + final var op = txn.tokenCreationOrThrow(); + // Create or get needed config and stores + final var tokensConfig = context.configuration().getConfigData(TokensConfig.class); + final var accountStore = context.writableStore(WritableAccountStore.class); + final var tokenStore = context.writableStore(WritableTokenStore.class); + final var tokenRelationStore = context.writableStore(WritableTokenRelationStore.class); + + /* Validate if the current token can be created */ + validateTrue( + tokenStore.sizeOfState() + 1 <= tokensConfig.maxNumber(), + MAX_ENTITIES_IN_PRICE_REGIME_HAVE_BEEN_CREATED); + + // validate fields in the transaction body that involves checking with + // dynamic properties or state. + final var resolvedExpiryMeta = validateSemantics(context, accountStore, op, tokensConfig); + + // build a new token + final var newTokenNum = context.newEntityNum(); + final var newTokenId = TokenID.newBuilder().tokenNum(newTokenNum).build(); + final var newToken = buildToken(newTokenNum, op, resolvedExpiryMeta); + + // validate custom fees and get back list of fees with created token denomination + final var feesSetNeedingCollectorAutoAssociation = customFeesValidator.validateForCreation( + newToken, accountStore, tokenRelationStore, tokenStore, op.customFeesOrElse(emptyList())); + + // Put token into modifications map + tokenStore.put(newToken); + // associate token with treasury and collector ids of custom fees whose token denomination + // is set to sentinel value + associateAccounts(context, newToken, accountStore, tokenRelationStore, feesSetNeedingCollectorAutoAssociation); + + if (op.initialSupply() > 0) { + // Since we have associated treasury and needed fee collector accounts in the previous step, + // this relation should exist. Mint the provided initial supply of tokens + final var treasuryRel = tokenRelationStore.get(op.treasuryOrThrow(), newTokenId); + // This keeps modified token with minted balance into modifications in token store + mintFungible(newToken, treasuryRel, op.initialSupply(), true, accountStore, tokenStore, tokenRelationStore); + } + // Update record with newly created token id + final var recordBuilder = context.recordBuilder(TokenCreateRecordBuilder.class); + recordBuilder.tokenID(newTokenId); + } + + /** + * Associate treasury account and the collector accounts of custom fees whose token denomination + * is set to sentinel value, to use denomination as newly created token. + * @param newToken newly created token + * @param accountStore account store + * @param tokenRelStore token relation store + * @param requireCollectorAutoAssociation set of custom fees whose token denomination is set to sentinel value + */ + private void associateAccounts( + final HandleContext context, + final Token newToken, + final WritableAccountStore accountStore, + @NonNull final WritableTokenRelationStore tokenRelStore, + final Set requireCollectorAutoAssociation) { + final var tokensConfig = context.configuration().getConfigData(TokensConfig.class); + final var entitiesConfig = context.configuration().getConfigData(EntitiesConfig.class); + + // This should exist as it is validated in validateSemantics + final var treasury = accountStore.get(AccountID.newBuilder() + .accountNum(newToken.treasuryAccountNumber()) + .build()); + // Validate if token relation can be created between treasury and new token + // If this succeeds, create and link token relation. + tokenCreateValidator.validateAssociation(entitiesConfig, tokensConfig, treasury, newToken, tokenRelStore); + createAndLinkTokenRels(treasury, List.of(newToken), accountStore, tokenRelStore); + + for (final var customFee : requireCollectorAutoAssociation) { + // This should exist as it is validated in validateSemantics + final var collector = accountStore.get(customFee.feeCollectorAccountIdOrThrow()); + // Validate if token relation can be created between collector and new token + // If this succeeds, create and link token relation. + tokenCreateValidator.validateAssociation(entitiesConfig, tokensConfig, collector, newToken, tokenRelStore); + createAndLinkTokenRels(collector, List.of(newToken), accountStore, tokenRelStore); + } + } + + /** + * Create a new token with the given parameters. + * @param newTokenNum new token number + * @param op token creation transaction body + * @param resolvedExpiryMeta resolved expiry meta + * @return newly created token + */ + private Token buildToken( + final long newTokenNum, final TokenCreateTransactionBody op, final ExpiryMeta resolvedExpiryMeta) { + return new Token( + newTokenNum, + op.name(), + op.symbol(), + op.decimals(), + 0, // is this correct ? + op.treasury().accountNum(), + op.adminKey(), + op.kycKey(), + op.freezeKey(), + op.wipeKey(), + op.supplyKey(), + op.feeScheduleKey(), + op.pauseKey(), + 0, + false, + op.tokenType(), + op.supplyType(), + resolvedExpiryMeta.autoRenewNum(), + resolvedExpiryMeta.autoRenewPeriod(), + resolvedExpiryMeta.expiry(), + op.memo(), + op.maxSupply(), + false, + op.freezeDefault(), + false, + op.customFees()); + } + + /** + * Get the expiry metadata for the token to be created from the transaction body. + * @param consensusTime consensus time + * @param op token creation transaction body + * @return given expiry metadata + */ + private ExpiryMeta getExpiryMeta(final long consensusTime, @NonNull final TokenCreateTransactionBody op) { + final var impliedExpiry = + consensusTime + op.autoRenewPeriodOrElse(Duration.DEFAULT).seconds(); + return new ExpiryMeta( + impliedExpiry, + op.autoRenewPeriodOrElse(Duration.DEFAULT).seconds(), + // Shard and realm will be ignored if num is NA + op.hasAutoRenewAccount() ? op.autoRenewAccount().shardNum() : NA, + op.hasAutoRenewAccount() ? op.autoRenewAccount().realmNum() : NA, + op.hasAutoRenewAccount() ? op.autoRenewAccount().accountNumOrElse(NA) : NA); + } + + /** + * Validate the semantics of the token creation transaction body, that involves checking with + * dynamic properties or state. + * @param context handle context + * @param accountStore account store + * @param op token creation transaction body + * @param config tokens configuration + * @return resolved expiry metadata + */ + private ExpiryMeta validateSemantics( + @NonNull final HandleContext context, + @NonNull final ReadableAccountStore accountStore, + @NonNull final TokenCreateTransactionBody op, + @NonNull final TokensConfig config) { + requireNonNull(context); + requireNonNull(accountStore); + requireNonNull(op); + requireNonNull(config); + + // validate different token create fields + tokenCreateValidator.validate(context, accountStore, op, config); + + // validate expiration and auto-renew account if present + final var givenExpiryMeta = getExpiryMeta(context.consensusNow().getEpochSecond(), op); + final var resolvedExpiryMeta = context.expiryValidator().resolveCreationAttempt(false, givenExpiryMeta); + + // validate auto-renew account exists + if (resolvedExpiryMeta.autoRenewNum() != 0) { + final var id = AccountID.newBuilder() + .accountNum(resolvedExpiryMeta.autoRenewNum()) + .build(); + TokenHandlerHelper.getIfUsable(id, accountStore, context.expiryValidator(), INVALID_AUTORENEW_ACCOUNT); + } + return resolvedExpiryMeta; } /* --------------- Helper methods --------------- */ @@ -116,7 +322,8 @@ private void addCustomFeeCollectorKeys( * @param collector the ID of the collector * @param alwaysAdd if true, will always add the key */ - private void addAccount(final PreHandleContext context, final AccountID collector, final boolean alwaysAdd) + private void addAccount( + @NonNull final PreHandleContext context, @NonNull final AccountID collector, final boolean alwaysAdd) throws PreCheckException { if (alwaysAdd) { context.requireKeyOrThrow(collector, INVALID_CUSTOM_FEE_COLLECTOR); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDissociateFromAccountHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDissociateFromAccountHandler.java index 93b62ce9c223..bc96c2ac6c6c 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDissociateFromAccountHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDissociateFromAccountHandler.java @@ -209,7 +209,7 @@ private ValidatedResult validateSemantics( @NonNull final WritableTokenRelationStore tokenRelStore, @NonNull final ExpiryValidator expiryValidator) { // Check that the account is usable - final var acct = TokenHandlerHelper.getIfUsable(accountId, accountStore, expiryValidator); + final var acct = TokenHandlerHelper.getIfUsable(accountId, accountStore, expiryValidator, INVALID_ACCOUNT_ID); // Construct the dissociation for each token ID final var dissociations = new ArrayList(); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/records/TokenCreateRecordBuilder.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/records/TokenCreateRecordBuilder.java new file mode 100644 index 000000000000..ad86946aab39 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/records/TokenCreateRecordBuilder.java @@ -0,0 +1,38 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.records; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.TokenID; +import edu.umd.cs.findbugs.annotations.NonNull; + +/** + * A {@code RecordBuilder} specialization for tracking the side effects of a {@code TokenCreate} + * transaction. + */ +public interface TokenCreateRecordBuilder { + /** + * Tracks creation of a new token by number. Even if someday we support creating multiple + * tokens within a smart contract call, we will still only need to track one created token + * per child record. + * + * @param tokenID the {@link AccountID} of the new token + * @return this builder + */ + @NonNull + TokenCreateRecordBuilder tokenID(@NonNull TokenID tokenID); +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java index f2a686e58b4f..09726185f243 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java @@ -33,9 +33,6 @@ */ import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_DELETED; -import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_EXPIRED_AND_PENDING_REMOVAL; -import static com.hedera.hapi.node.base.ResponseCodeEnum.CONTRACT_EXPIRED_AND_PENDING_REMOVAL; -import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_PAUSED; @@ -46,6 +43,7 @@ import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.hapi.node.base.TokenID; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.Token; @@ -82,21 +80,18 @@ private TokenHandlerHelper() { public static Account getIfUsable( @NonNull final AccountID accountId, @NonNull final ReadableAccountStore accountStore, - @NonNull final ExpiryValidator expiryValidator) { + @NonNull final ExpiryValidator expiryValidator, + @NonNull final ResponseCodeEnum errorIfNotUsable) { requireNonNull(accountId); requireNonNull(accountStore); requireNonNull(expiryValidator); + requireNonNull(errorIfNotUsable); final var acct = accountStore.getAccountById(accountId); - validateTrue(acct != null, INVALID_ACCOUNT_ID); + validateTrue(acct != null, errorIfNotUsable); validateFalse(acct.deleted(), ACCOUNT_DELETED); - final var isSmartContract = acct.smartContract(); - validateFalse( - acct.expiredAndPendingRemoval(), - isSmartContract ? CONTRACT_EXPIRED_AND_PENDING_REMOVAL : ACCOUNT_EXPIRED_AND_PENDING_REMOVAL); - final var expiryStatus = expiryValidator.expirationStatus( - isSmartContract ? EntityType.CONTRACT : EntityType.ACCOUNT, false, acct.tinybarBalance()); + EntityType.ACCOUNT, acct.expiredAndPendingRemoval(), acct.tinybarBalance()); validateTrue(expiryStatus == OK, expiryStatus); return acct; diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CustomFeesValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CustomFeesValidator.java index ab922e74a94e..e8413f6cddca 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CustomFeesValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/CustomFeesValidator.java @@ -34,6 +34,7 @@ import com.hedera.node.app.service.token.ReadableTokenRelationStore; import com.hedera.node.app.service.token.impl.WritableTokenStore; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.HashSet; import java.util.List; import java.util.Set; import javax.inject.Inject; @@ -61,14 +62,33 @@ public CustomFeesValidator() { * @param customFees The custom fees to validate. * @return The set of custom fees that need to auto associate collector accounts. */ - public Set validateCreation( + public Set validateForCreation( @NonNull final Token createdToken, @NonNull final ReadableAccountStore accountStore, @NonNull final ReadableTokenRelationStore tokenRelationStore, @NonNull final WritableTokenStore tokenStore, @NonNull final List customFees) { - // This method will be implemented in TokenCreate handler PR - throw new UnsupportedOperationException("Not implemented yet. Will be implemented in TokenCreate PR"); + // It is possible that denominating tokenId is set to sentinel value of 0.0.0. + // In that scenario, the created token should be used as the denominating token. + // This is a valid scenario for fungible common tokens. + // For these custom fees we need to associate the collector with the token. + final Set fees = new HashSet<>(); + final var tokenType = createdToken.tokenType(); + for (final var fee : customFees) { + final var collector = accountStore.getAccountById(fee.feeCollectorAccountIdOrElse(AccountID.DEFAULT)); + validateTrue(collector != null, INVALID_CUSTOM_FEE_COLLECTOR); + + switch (fee.fee().kind()) { + case FIXED_FEE -> validateFixedFeeForCreation( + tokenType, fee, createdToken, tokenRelationStore, tokenStore, fees); + case FRACTIONAL_FEE -> validateTrue( + isFungibleCommon(tokenType), CUSTOM_FRACTIONAL_FEE_ONLY_ALLOWED_FOR_FUNGIBLE_COMMON); + case ROYALTY_FEE -> validateRoyaltyFeeForCreation(tokenType, fee, tokenRelationStore, tokenStore); + default -> throw new IllegalArgumentException( + "Unexpected value for custom fee type: " + fee.fee().kind()); + } + } + return fees; } /** @@ -106,7 +126,7 @@ public void validateForFeeScheduleUpdate( switch (fee.fee().kind()) { case FIXED_FEE -> { - final var fixedFee = fee.fixedFee(); + final var fixedFee = fee.fixedFeeOrThrow(); // validate any explicit token denomination set if (fixedFee.hasDenominatingTokenId()) { validateExplicitTokenDenomination( @@ -125,10 +145,11 @@ public void validateForFeeScheduleUpdate( // royalty fee can be only applied to non-fungible unique tokens validateTrue( isNonFungibleUnique(tokenType), CUSTOM_ROYALTY_FEE_ONLY_ALLOWED_FOR_NON_FUNGIBLE_UNIQUE); - if (fee.royaltyFee().hasFallbackFee() - && fee.royaltyFee().fallbackFee().hasDenominatingTokenId()) { - final var tokenNum = fee.royaltyFee() - .fallbackFee() + final var royaltyFee = fee.royaltyFeeOrThrow(); + if (royaltyFee.hasFallbackFee() + && royaltyFee.fallbackFeeOrThrow().hasDenominatingTokenId()) { + final var tokenNum = royaltyFee + .fallbackFeeOrThrow() .denominatingTokenId() .tokenNum(); final var tokenId = @@ -150,10 +171,10 @@ public void validateForFeeScheduleUpdate( * @param tokenStore The token store. */ private void validateExplicitTokenDenomination( - final AccountID feeCollectorNum, - final TokenID tokenNum, - final ReadableTokenRelationStore tokenRelationStore, - final WritableTokenStore tokenStore) { + @NonNull final AccountID feeCollectorNum, + @NonNull final TokenID tokenNum, + @NonNull final ReadableTokenRelationStore tokenRelationStore, + @NonNull final WritableTokenStore tokenStore) { final var denomToken = tokenStore.get(tokenNum); validateTrue(denomToken != null, INVALID_TOKEN_ID_IN_CUSTOM_FEES); validateTrue(isFungibleCommon(denomToken.tokenType()), CUSTOM_FEE_DENOMINATION_MUST_BE_FUNGIBLE_COMMON); @@ -177,4 +198,49 @@ private boolean isFungibleCommon(@NonNull final TokenType tokenType) { private boolean isNonFungibleUnique(@NonNull final TokenType tokenType) { return tokenType.equals(TokenType.NON_FUNGIBLE_UNIQUE); } + + private void validateFixedFeeForCreation( + @NonNull final TokenType tokenType, + @NonNull final CustomFee fee, + @NonNull final Token createdToken, + @NonNull final ReadableTokenRelationStore tokenRelationStore, + @NonNull final WritableTokenStore tokenStore, + @NonNull final Set feesWithCollectorsToAutoAssociate) { + final var fixedFee = fee.fixedFeeOrThrow(); + if (fixedFee.hasDenominatingTokenId()) { + // If the denominating token id is set to sentinel value 0.0.0, then the fee is + // denominated in the same token as the token being created. + // For these fees the collector should be auto-associated to the token. + if (fixedFee.denominatingTokenIdOrThrow().tokenNum() == 0L) { + validateTrue(isFungibleCommon(tokenType), CUSTOM_FEE_DENOMINATION_MUST_BE_FUNGIBLE_COMMON); + final var copy = fee.copyBuilder(); + copy.fixedFee(fixedFee.copyBuilder() + .denominatingTokenId(TokenID.newBuilder() + .tokenNum(createdToken.tokenNumber()) + .build())); + feesWithCollectorsToAutoAssociate.add(copy.build()); + } else { + validateExplicitTokenDenomination( + fee.feeCollectorAccountId(), fixedFee.denominatingTokenId(), tokenRelationStore, tokenStore); + } + } + } + + private void validateRoyaltyFeeForCreation( + @NonNull final TokenType tokenType, + @NonNull final CustomFee fee, + @NonNull final ReadableTokenRelationStore tokenRelationStore, + @NonNull final WritableTokenStore tokenStore) { + validateTrue(isNonFungibleUnique(tokenType), CUSTOM_ROYALTY_FEE_ONLY_ALLOWED_FOR_NON_FUNGIBLE_UNIQUE); + final var royaltyFee = fee.royaltyFeeOrThrow(); + if (royaltyFee.hasFallbackFee()) { + final var fallbackFee = royaltyFee.fallbackFeeOrThrow(); + if (fallbackFee.hasDenominatingTokenId()) { + final var denominatingTokenId = fallbackFee.denominatingTokenIdOrThrow(); + validateTrue(denominatingTokenId.tokenNum() != 0, CUSTOM_FEE_DENOMINATION_MUST_BE_FUNGIBLE_COMMON); + validateExplicitTokenDenomination( + fee.feeCollectorAccountId(), denominatingTokenId, tokenRelationStore, tokenStore); + } + } + } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenAttributesValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenAttributesValidator.java new file mode 100644 index 000000000000..dcf20df71c49 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenAttributesValidator.java @@ -0,0 +1,166 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.validators; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ADMIN_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_CUSTOM_FEE_SCHEDULE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_FREEZE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_KYC_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_PAUSE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_SUPPLY_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_WIPE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ZERO_BYTE_IN_STRING; +import static com.hedera.hapi.node.base.ResponseCodeEnum.MISSING_TOKEN_NAME; +import static com.hedera.hapi.node.base.ResponseCodeEnum.MISSING_TOKEN_SYMBOL; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_NAME_TOO_LONG; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_SYMBOL_TOO_LONG; +import static com.hedera.node.app.spi.key.KeyUtils.isValid; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static java.util.Objects.requireNonNull; + +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.KeyList; +import com.hedera.hapi.node.base.ResponseCodeEnum; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.TokensConfig; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.charset.StandardCharsets; +import javax.inject.Inject; +import javax.inject.Singleton; + +/** + * Provides validation for token fields like token type, token supply type, token symbol etc.,. + * It is used in pureChecks for token creation. + */ +@Singleton +public class TokenAttributesValidator { + private final ConfigProvider configProvider; + public static final Key IMMUTABILITY_SENTINEL_KEY = + Key.newBuilder().keyList(KeyList.DEFAULT).build(); + + @Inject + public TokenAttributesValidator(@NonNull final ConfigProvider configProvider) { + this.configProvider = configProvider; + } + + /** + * Validates the token symbol, if it is exists and is not empty or not too long. + * @param symbol the token symbol to validate + */ + public void validateTokenSymbol(@Nullable final String symbol) { + final var tokensConfig = configProvider.getConfiguration().getConfigData(TokensConfig.class); + tokenStringCheck(symbol, tokensConfig.maxSymbolUtf8Bytes(), MISSING_TOKEN_SYMBOL, TOKEN_SYMBOL_TOO_LONG); + } + + /** + * Validates the token name, if it is exists and is not empty or not too long. + * @param name the token name to validate + */ + public void validateTokenName(@Nullable final String name) { + final var tokensConfig = configProvider.getConfiguration().getConfigData(TokensConfig.class); + tokenStringCheck(name, tokensConfig.maxTokenNameUtf8Bytes(), MISSING_TOKEN_NAME, TOKEN_NAME_TOO_LONG); + } + + /** + * Given a token name or token symbol, validates that it is not null, not empty, not too long, and does not contain + * a zero byte. + * @param s the token name or symbol to validate + * @param maxLen the maximum number of UTF-8 bytes allowed in the token name or symbol + * @param onMissing the response code to use if the token name or symbol is null or empty + * @param onTooLong the response code to use if the token name or symbol is too long + */ + private void tokenStringCheck( + @Nullable final String s, + final int maxLen, + @NonNull final ResponseCodeEnum onMissing, + @NonNull final ResponseCodeEnum onTooLong) { + validateTrue(s != null, onMissing); + final int numUtf8Bytes = s.getBytes(StandardCharsets.UTF_8).length; + validateTrue(numUtf8Bytes != 0, onMissing); + validateTrue(numUtf8Bytes <= maxLen, onTooLong); + validateTrue(!s.contains("\u0000"), INVALID_ZERO_BYTE_IN_STRING); + } + + /** + * Validates the token keys, if it is exists and is not empty or not too long. + * For token admin key, allows empty {@link KeyList} to be set. It is used for removing keys. + * This method is both used in TokenCreate and TokenUpdate. + * + * @param hasAdminKey whether the token has an admin key + * @param adminKey the token admin key to validate + * @param hasKycKey whether the token has a KYC key + * @param kycKey the token KYC key to validate + * @param hasWipeKey whether the token has a wipe key + * @param wipeKey the token wipe key to validate + * @param hasSupplyKey whether the token has a supply key + * @param supplyKey the token supply key to validate + * @param hasFreezeKey whether the token has a freeze key + * @param freezeKey the token freeze key to validate + * @param hasFeeScheduleKey whether the token has a fee schedule key + * @param feeScheduleKey the token fee schedule key to validate + * @param hasPauseKey whether the token has a pause key + * @param pauseKey the token pause key to validate + */ + public void checkKeys( + final boolean hasAdminKey, + @Nullable final Key adminKey, + final boolean hasKycKey, + @Nullable final Key kycKey, + final boolean hasWipeKey, + @Nullable final Key wipeKey, + final boolean hasSupplyKey, + @Nullable final Key supplyKey, + final boolean hasFreezeKey, + @Nullable final Key freezeKey, + final boolean hasFeeScheduleKey, + @Nullable final Key feeScheduleKey, + final boolean hasPauseKey, + @Nullable final Key pauseKey) { + if (hasAdminKey && !isKeyRemoval(adminKey)) { + validateTrue(isValid(adminKey), INVALID_ADMIN_KEY); + } + if (hasKycKey) { + validateTrue(isValid(kycKey), INVALID_KYC_KEY); + } + if (hasWipeKey) { + validateTrue(isValid(wipeKey), INVALID_WIPE_KEY); + } + if (hasSupplyKey) { + validateTrue(isValid(supplyKey), INVALID_SUPPLY_KEY); + } + if (hasFreezeKey) { + validateTrue(isValid(freezeKey), INVALID_FREEZE_KEY); + } + if (hasFeeScheduleKey) { + validateTrue(isValid(feeScheduleKey), INVALID_CUSTOM_FEE_SCHEDULE_KEY); + } + if (hasPauseKey) { + validateTrue(isValid(pauseKey), INVALID_PAUSE_KEY); + } + } + + /** + * Checks if the given key is a key removal, if it is set as {@link #IMMUTABILITY_SENTINEL_KEY}. + * @param source the key to check + * @return true if the key is a key removal, false otherwise + */ + private static boolean isKeyRemoval(@NonNull final Key source) { + requireNonNull(source); + return IMMUTABILITY_SENTINEL_KEY.equals(source); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java new file mode 100644 index 000000000000..f8ec76952993 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java @@ -0,0 +1,197 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.validators; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.CUSTOM_FEES_LIST_TOO_LONG; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_DECIMALS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_INITIAL_SUPPLY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_MAX_SUPPLY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TREASURY_ACCOUNT_FOR_TOKEN; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_FREEZE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_SUPPLY_KEY; +import static com.hedera.hapi.node.base.TokenSupplyType.FINITE; +import static com.hedera.hapi.node.base.TokenSupplyType.INFINITE; +import static com.hedera.hapi.node.base.TokenType.FUNGIBLE_COMMON; +import static com.hedera.hapi.node.base.TokenType.NON_FUNGIBLE_UNIQUE; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; +import static com.hedera.node.app.spi.workflows.HandleException.validateFalse; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck; +import static java.util.Collections.emptyList; +import static java.util.Objects.requireNonNull; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.TokenSupplyType; +import com.hedera.hapi.node.base.TokenType; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.token.TokenCreateTransactionBody; +import com.hedera.node.app.service.token.ReadableAccountStore; +import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; +import com.hedera.node.app.service.token.impl.util.TokenHandlerHelper; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.config.data.EntitiesConfig; +import com.hedera.node.config.data.TokensConfig; +import edu.umd.cs.findbugs.annotations.NonNull; +import javax.inject.Inject; +import javax.inject.Singleton; + +/** + * Provides validations for TokenCreateTransactionBody + */ +@Singleton +public class TokenCreateValidator { + private final TokenAttributesValidator tokenAttributesValidator; + + @Inject + public TokenCreateValidator(@NonNull final TokenAttributesValidator tokenAttributesValidator) { + this.tokenAttributesValidator = tokenAttributesValidator; + } + + /** + * Validations needed in pre-handle for {@link TokenCreateTransactionBody} are done here + * @param op token create transaction body + * @throws PreCheckException if any of the validations fail + */ + public void pureChecks(@NonNull final TokenCreateTransactionBody op) throws PreCheckException { + requireNonNull(op); + final var initialSupply = op.initialSupply(); + final var maxSupply = op.maxSupply(); + final var decimals = op.decimals(); + final var supplyType = op.supplyType(); + final var tokenType = op.tokenType(); + + validateTokenType(tokenType, initialSupply, decimals); + validateSupplyType(supplyType, maxSupply); + + validateFalsePreCheck(maxSupply > 0 && initialSupply > maxSupply, INVALID_TOKEN_INITIAL_SUPPLY); + validateTruePreCheck(op.hasTreasury(), INVALID_TREASURY_ACCOUNT_FOR_TOKEN); + + if (tokenType == NON_FUNGIBLE_UNIQUE) { + validateTruePreCheck(op.hasSupplyKey(), TOKEN_HAS_NO_SUPPLY_KEY); + } + if (op.freezeDefault()) { + validateTruePreCheck(op.hasFreezeKey(), TOKEN_HAS_NO_FREEZE_KEY); + } + } + + /** + * All validations in handle needed for {@link TokenCreateTransactionBody} are done here + * @param context context + * @param accountStore account store + * @param op token create transaction body + * @param config tokens config + */ + public void validate( + @NonNull final HandleContext context, + @NonNull final ReadableAccountStore accountStore, + @NonNull final TokenCreateTransactionBody op, + @NonNull final TokensConfig config) { + TokenHandlerHelper.getIfUsable( + op.treasuryOrElse(AccountID.DEFAULT), + accountStore, + context.expiryValidator(), + INVALID_TREASURY_ACCOUNT_FOR_TOKEN); + + final var nftsAreEnabled = config.nftsAreEnabled(); + if (op.tokenType().equals(TokenType.NON_FUNGIBLE_UNIQUE)) { + validateTrue(nftsAreEnabled, NOT_SUPPORTED); + } + + context.attributeValidator().validateMemo(op.memo()); + tokenAttributesValidator.validateTokenSymbol(op.symbol()); + tokenAttributesValidator.validateTokenName(op.name()); + + tokenAttributesValidator.checkKeys( + op.hasAdminKey(), op.adminKey(), + op.hasKycKey(), op.kycKey(), + op.hasWipeKey(), op.wipeKey(), + op.hasSupplyKey(), op.supplyKey(), + op.hasFreezeKey(), op.freezeKey(), + op.hasFeeScheduleKey(), op.feeScheduleKey(), + op.hasPauseKey(), op.pauseKey()); + // validate custom fees length + validateTrue( + op.customFeesOrElse(emptyList()).size() <= config.maxCustomFeesAllowed(), CUSTOM_FEES_LIST_TOO_LONG); + } + + /** + * Validates initial supply and decimals based on token type + * @param type token type + * @param initialSupply initial supply + * @param decimals decimals + * @throws PreCheckException if validation fails + */ + private void validateTokenType(@NonNull final TokenType type, final long initialSupply, final int decimals) + throws PreCheckException { + validateTruePreCheck(type == FUNGIBLE_COMMON || type == NON_FUNGIBLE_UNIQUE, NOT_SUPPORTED); + if (type == FUNGIBLE_COMMON) { + validateTruePreCheck(initialSupply >= 0, INVALID_TOKEN_INITIAL_SUPPLY); + validateTruePreCheck(decimals >= 0, INVALID_TOKEN_DECIMALS); + } else { + validateTruePreCheck(initialSupply == 0, INVALID_TOKEN_INITIAL_SUPPLY); + validateTruePreCheck(decimals == 0, INVALID_TOKEN_DECIMALS); + } + } + + /** + * Validates supply type and max supply + * @param supplyType supply type + * @param maxSupply max supply + * @throws PreCheckException if validation fails + */ + private void validateSupplyType(final TokenSupplyType supplyType, final long maxSupply) throws PreCheckException { + validateTruePreCheck(supplyType == INFINITE || supplyType == FINITE, NOT_SUPPORTED); + if (supplyType == INFINITE) { + validateTruePreCheck(maxSupply == 0, INVALID_TOKEN_MAX_SUPPLY); + } else { + validateTruePreCheck(maxSupply > 0, INVALID_TOKEN_MAX_SUPPLY); + } + } + + /** + * Validates if the token and account already have relationship and if the account has reached the limit of + * associations. + * These checks need to be done before the token is created and associated to treasury or any custom + * fee collector accounts. + * @param entitiesConfig entities config + * @param tokensConfig tokens config + * @param account account to associate with + * @param token token to associate with + * @param tokenRelStore token relation store + */ + public void validateAssociation( + @NonNull final EntitiesConfig entitiesConfig, + @NonNull final TokensConfig tokensConfig, + @NonNull final Account account, + @NonNull final Token token, + @NonNull final WritableTokenRelationStore tokenRelStore) { + validateFalse( + entitiesConfig.limitTokenAssociations() + && account.numberAssociations() + 1 > tokensConfig.maxPerAccount(), + TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED); + validateTrue( + tokenRelStore.get(asAccount(account.accountNumber()), asToken(token.tokenNumber())) == null, + TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java index b9d436a57d41..607bc7d3f8e5 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java @@ -30,7 +30,8 @@ import javax.inject.Inject; /** - * This class contains validations to be done in handle for Token Mint and Token Burn operations + * This class contains validations to be done in handle for Token Mint and + * Token Burn operations in handle */ public class TokenSupplyChangeOpsValidator { private final ConfigProvider configProvider; diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteAllowanceHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteAllowanceHandlerTest.java index ff23c3e63b42..d90fb0bb97b2 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteAllowanceHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteAllowanceHandlerTest.java @@ -31,7 +31,6 @@ import com.hedera.hapi.node.token.CryptoDeleteAllowanceTransactionBody; import com.hedera.hapi.node.token.NftRemoveAllowance; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.config.VersionedConfigImpl; import com.hedera.node.app.service.token.impl.handlers.CryptoDeleteAllowanceHandler; import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoTokenHandlerTestBase; import com.hedera.node.app.service.token.impl.validators.DeleteAllowanceValidator; @@ -66,7 +65,7 @@ public void setUp() { givenStoresAndConfig(configProvider, handleContext); given(handleContext.configuration()).willReturn(configuration); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + given(configProvider.getConfiguration()).willReturn(versionedConfig); } @Test diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandleParityTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandleParityTest.java index 62eaa30ddfe5..80d1d14358d4 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandleParityTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandleParityTest.java @@ -65,19 +65,36 @@ import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.impl.handlers.TokenCreateHandler; import com.hedera.node.app.service.token.impl.test.util.SigReqAdapterUtils; +import com.hedera.node.app.service.token.impl.validators.CustomFeesValidator; +import com.hedera.node.app.service.token.impl.validators.TokenAttributesValidator; +import com.hedera.node.app.service.token.impl.validators.TokenCreateValidator; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.config.ConfigProvider; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +@ExtendWith(MockitoExtension.class) class TokenCreateHandleParityTest { + @Mock + private ConfigProvider configProvider; + private ReadableAccountStore accountStore; private TokenCreateHandler subject; + private CustomFeesValidator customFeesValidator; + private TokenAttributesValidator tokenFieldsValidator; + private TokenCreateValidator tokenCreateValidator; @BeforeEach void setUp() { + tokenFieldsValidator = new TokenAttributesValidator(configProvider); + customFeesValidator = new CustomFeesValidator(); + tokenCreateValidator = new TokenCreateValidator(tokenFieldsValidator); accountStore = SigReqAdapterUtils.wellKnownAccountStoreAt(); - subject = new TokenCreateHandler(); + subject = new TokenCreateHandler(customFeesValidator, tokenCreateValidator); } @Test diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandlerTest.java new file mode 100644 index 000000000000..1eb0a40a5e8c --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandlerTest.java @@ -0,0 +1,972 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.AUTORENEW_DURATION_NOT_IN_RANGE; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ADMIN_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_AUTORENEW_ACCOUNT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_CUSTOM_FEE_SCHEDULE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_EXPIRATION_TIME; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_FREEZE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_KYC_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_SUPPLY_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_DECIMALS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_INITIAL_SUPPLY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_MAX_SUPPLY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TREASURY_ACCOUNT_FOR_TOKEN; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_WIPE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ZERO_BYTE_IN_STRING; +import static com.hedera.hapi.node.base.ResponseCodeEnum.MISSING_TOKEN_NAME; +import static com.hedera.hapi.node.base.ResponseCodeEnum.MISSING_TOKEN_SYMBOL; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_FREEZE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_SUPPLY_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_NAME_TOO_LONG; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_SYMBOL_TOO_LONG; +import static com.hedera.node.app.service.token.impl.validators.TokenAttributesValidator.IMMUTABILITY_SENTINEL_KEY; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static com.hedera.test.utils.KeyUtils.A_COMPLEX_KEY; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatNoException; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mock.Strictness.LENIENT; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Duration; +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.Timestamp; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenSupplyType; +import com.hedera.hapi.node.base.TokenType; +import com.hedera.hapi.node.base.TransactionID; +import com.hedera.hapi.node.state.token.TokenRelation; +import com.hedera.hapi.node.token.TokenCreateTransactionBody; +import com.hedera.hapi.node.transaction.CustomFee; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.config.VersionedConfigImpl; +import com.hedera.node.app.records.SingleTransactionRecordBuilder; +import com.hedera.node.app.service.mono.config.HederaNumbers; +import com.hedera.node.app.service.mono.context.properties.GlobalDynamicProperties; +import com.hedera.node.app.service.mono.context.properties.PropertySource; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.handlers.TokenCreateHandler; +import com.hedera.node.app.service.token.impl.records.TokenCreateRecordBuilder; +import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoTokenHandlerTestBase; +import com.hedera.node.app.service.token.impl.validators.CustomFeesValidator; +import com.hedera.node.app.service.token.impl.validators.TokenAttributesValidator; +import com.hedera.node.app.service.token.impl.validators.TokenCreateValidator; +import com.hedera.node.app.spi.validation.AttributeValidator; +import com.hedera.node.app.spi.validation.ExpiryValidator; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.workflows.handle.validation.StandardizedAttributeValidator; +import com.hedera.node.app.workflows.handle.validation.StandardizedExpiryValidator; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class TokenCreateHandlerTest extends CryptoTokenHandlerTestBase { + @Mock(strictness = LENIENT) + private HandleContext handleContext; + + @Mock(strictness = LENIENT) + private ConfigProvider configProvider; + + @Mock(strictness = LENIENT) + private PropertySource compositeProps; + + @Mock(strictness = LENIENT) + private HederaNumbers hederaNumbers; + + @Mock(strictness = LENIENT) + private GlobalDynamicProperties dynamicProperties; + + private TokenCreateRecordBuilder recordBuilder; + private TokenCreateHandler subject; + private TransactionBody txn; + private CustomFeesValidator customFeesValidator; + private TokenAttributesValidator tokenFieldsValidator; + private TokenCreateValidator tokenCreateValidator; + private ExpiryValidator expiryValidator; + private AttributeValidator attributeValidator; + + private static final TokenID newTokenId = + TokenID.newBuilder().tokenNum(3000L).build(); + private static final Timestamp expiry = + Timestamp.newBuilder().seconds(1234600L).build(); + private final AccountID autoRenewAccountId = ownerId; + + @BeforeEach + public void setUp() { + super.setUp(); + refreshWritableStores(); + recordBuilder = new SingleTransactionRecordBuilder(consensusInstant); + tokenFieldsValidator = new TokenAttributesValidator(configProvider); + customFeesValidator = new CustomFeesValidator(); + tokenCreateValidator = new TokenCreateValidator(tokenFieldsValidator); + subject = new TokenCreateHandler(customFeesValidator, tokenCreateValidator); + givenStoresAndConfig(configProvider, handleContext); + } + + @Test + void handleWorksForFungibleCreate() { + setUpTxnContext(); + + assertThat(writableTokenStore.get(newTokenId)).isNull(); + assertThat(writableTokenRelStore.get(treasuryId, newTokenId)).isNull(); + + subject.handle(handleContext); + + assertThat(writableTokenStore.get(newTokenId)).isNotNull(); + final var token = writableTokenStore.get(newTokenId); + + assertThat(token.treasuryAccountNumber()).isEqualTo(treasuryId.accountNum()); + assertThat(token.tokenNumber()).isEqualTo(newTokenId.tokenNum()); + assertThat(token.totalSupply()).isEqualTo(1000L); + assertThat(token.tokenType()).isEqualTo(TokenType.FUNGIBLE_COMMON); + assertThat(token.expiry()) + .isEqualTo(consensusInstant.plusSeconds(autoRenewSecs).getEpochSecond()); + assertThat(token.freezeKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.kycKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.adminKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.wipeKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.supplyKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.feeScheduleKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.autoRenewSecs()).isEqualTo(autoRenewSecs); + assertThat(token.autoRenewAccountNumber()).isEqualTo(autoRenewAccountId.accountNum()); + assertThat(token.decimals()).isZero(); + assertThat(token.name()).isEqualTo("TestToken"); + assertThat(token.symbol()).isEqualTo("TT"); + assertThat(token.memo()).isEqualTo("test token"); + assertThat(token.customFees()).isEqualTo(List.of(withFixedFee(fixedFee), withFractionalFee(fractionalFee))); + + assertThat(writableTokenRelStore.get(treasuryId, newTokenId)).isNotNull(); + final var tokenRel = writableTokenRelStore.get(treasuryId, newTokenId); + + assertThat(tokenRel.balance()).isEqualTo(1000L); + assertThat(tokenRel.deleted()).isFalse(); + assertThat(tokenRel.tokenNumber()).isEqualTo(newTokenId.tokenNum()); + assertThat(tokenRel.accountNumber()).isEqualTo(treasuryId.accountNum()); + assertThat(tokenRel.kycGranted()).isFalse(); + assertThat(tokenRel.automaticAssociation()).isFalse(); + assertThat(tokenRel.frozen()).isFalse(); + assertThat(tokenRel.nextToken()).isZero(); + assertThat(tokenRel.previousToken()).isZero(); + } + + @Test + void handleWorksForFungibleCreateWithSelfDenominatedToken() { + setUpTxnContext(); + final var customFees = List.of( + withFixedFee(fixedFee.copyBuilder() + .denominatingTokenId(TokenID.newBuilder().tokenNum(0L).build()) + .build()), + withFractionalFee(fractionalFee)); + txn = new TokenCreateBuilder().withCustomFees(customFees).build(); + given(handleContext.body()).willReturn(txn); + + assertThat(writableTokenStore.get(newTokenId)).isNull(); + assertThat(writableTokenRelStore.get(treasuryId, newTokenId)).isNull(); + assertThat(writableTokenRelStore.get(payerId, newTokenId)).isNull(); + + subject.handle(handleContext); + + assertThat(writableTokenStore.get(newTokenId)).isNotNull(); + final var token = writableTokenStore.get(newTokenId); + + assertThat(token.treasuryAccountNumber()).isEqualTo(treasuryId.accountNum()); + assertThat(token.tokenNumber()).isEqualTo(newTokenId.tokenNum()); + assertThat(token.totalSupply()).isEqualTo(1000L); + assertThat(token.tokenType()).isEqualTo(TokenType.FUNGIBLE_COMMON); + assertThat(token.expiry()) + .isEqualTo(consensusInstant.plusSeconds(autoRenewSecs).getEpochSecond()); + assertThat(token.freezeKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.kycKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.adminKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.wipeKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.supplyKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.feeScheduleKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.autoRenewSecs()).isEqualTo(autoRenewSecs); + assertThat(token.autoRenewAccountNumber()).isEqualTo(autoRenewAccountId.accountNum()); + assertThat(token.decimals()).isZero(); + assertThat(token.name()).isEqualTo("TestToken"); + assertThat(token.symbol()).isEqualTo("TT"); + assertThat(token.memo()).isEqualTo("test token"); + assertThat(token.customFees()).isEqualTo(customFees); + + assertThat(writableTokenRelStore.get(treasuryId, newTokenId)).isNotNull(); + final var tokenRel = writableTokenRelStore.get(treasuryId, newTokenId); + + assertThat(tokenRel.balance()).isEqualTo(1000L); + assertThat(tokenRel.deleted()).isFalse(); + assertThat(tokenRel.tokenNumber()).isEqualTo(newTokenId.tokenNum()); + assertThat(tokenRel.accountNumber()).isEqualTo(treasuryId.accountNum()); + assertThat(tokenRel.kycGranted()).isFalse(); + assertThat(tokenRel.automaticAssociation()).isFalse(); + assertThat(tokenRel.frozen()).isFalse(); + assertThat(tokenRel.nextToken()).isZero(); + assertThat(tokenRel.previousToken()).isZero(); + + assertThat(writableTokenRelStore.get(payerId, newTokenId)).isNotNull(); + final var feeCollectorRel = writableTokenRelStore.get(payerId, newTokenId); + + assertThat(feeCollectorRel.balance()).isZero(); + assertThat(feeCollectorRel.deleted()).isFalse(); + assertThat(feeCollectorRel.tokenNumber()).isEqualTo(newTokenId.tokenNum()); + assertThat(feeCollectorRel.accountNumber()).isEqualTo(payerId.accountNum()); + assertThat(feeCollectorRel.kycGranted()).isFalse(); + assertThat(feeCollectorRel.automaticAssociation()).isFalse(); + assertThat(feeCollectorRel.frozen()).isFalse(); + assertThat(feeCollectorRel.nextToken()).isZero(); + assertThat(feeCollectorRel.previousToken()).isZero(); + } + + @Test + void failsIfAssociationLimitExceeded() { + setUpTxnContext(); + configuration = new HederaTestConfigBuilder() + .withValue("entities.limitTokenAssociations", "true") + .withValue("tokens.maxPerAccount", "0") + .getOrCreateConfig(); + given(handleContext.configuration()).willReturn(configuration); + + assertThat(writableTokenStore.get(newTokenId)).isNull(); + assertThat(writableTokenRelStore.get(treasuryId, newTokenId)).isNull(); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED)); + } + + @Test + void failsIfAssociationAlreadyExists() { + setUpTxnContext(); + configuration = new HederaTestConfigBuilder() + .withValue("entities.limitTokenAssociations", "true") + .withValue("tokens.maxPerAccount", "10") + .getOrCreateConfig(); + given(handleContext.configuration()).willReturn(configuration); + + assertThat(writableTokenStore.get(newTokenId)).isNull(); + assertThat(writableTokenRelStore.get(treasuryId, newTokenId)).isNull(); + + // Just to simulate existing token association , add to store. Only for testing + writableTokenRelStore.put(TokenRelation.newBuilder() + .tokenNumber(newTokenId.tokenNum()) + .accountNumber(treasuryId.accountNum()) + .balance(1000L) + .build()); + assertThat(writableTokenRelStore.get(treasuryId, newTokenId)).isNotNull(); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT)); + } + + @Test + void failsIfAssociationLimitExceededWhileAssociatingCollector() { + setUpTxnContext(); + final var customFees = List.of( + withFixedFee(fixedFee.copyBuilder() + .denominatingTokenId(TokenID.newBuilder().tokenNum(0L).build()) + .build()), + withFractionalFee(fractionalFee)); + txn = new TokenCreateBuilder().withCustomFees(customFees).build(); + given(handleContext.body()).willReturn(txn); + + configuration = new HederaTestConfigBuilder() + .withValue("entities.limitTokenAssociations", "true") + .withValue("tokens.maxPerAccount", "1") + .getOrCreateConfig(); + given(handleContext.configuration()).willReturn(configuration); + + assertThat(writableTokenStore.get(newTokenId)).isNull(); + assertThat(writableTokenRelStore.get(treasuryId, newTokenId)).isNull(); + assertThat(writableTokenRelStore.get(payerId, newTokenId)).isNull(); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED)); + } + + @Test + void failsIfAssociationAlreadyExistsWhileAssociatingCollector() { + setUpTxnContext(); + final var customFees = List.of( + withFixedFee(fixedFee.copyBuilder() + .denominatingTokenId(TokenID.newBuilder().tokenNum(0L).build()) + .build()), + withFractionalFee(fractionalFee)); + txn = new TokenCreateBuilder().withCustomFees(customFees).build(); + given(handleContext.body()).willReturn(txn); + + configuration = new HederaTestConfigBuilder() + .withValue("entities.limitTokenAssociations", "true") + .withValue("tokens.maxPerAccount", "10") + .getOrCreateConfig(); + given(handleContext.configuration()).willReturn(configuration); + + assertThat(writableTokenStore.get(newTokenId)).isNull(); + assertThat(writableTokenRelStore.get(treasuryId, newTokenId)).isNull(); + + // Just to simulate existing token association , add to store. Only for testing + writableTokenRelStore.put(TokenRelation.newBuilder() + .tokenNumber(newTokenId.tokenNum()) + .accountNumber(payerId.accountNum()) + .balance(1000L) + .build()); + assertThat(writableTokenRelStore.get(payerId, newTokenId)).isNotNull(); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT)); + } + + @Test + void uniqueNotSupportedIfNftsNotEnabled() { + setUpTxnContext(); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.areEnabled", "false") + .getOrCreateConfig(); + txn = new TokenCreateBuilder().withUniqueToken().build(); + given(handleContext.configuration()).willReturn(configuration); + given(handleContext.body()).willReturn(txn); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void uniqueSupportedIfNftsEnabled() { + setUpTxnContext(); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.areEnabled", "true") + .getOrCreateConfig(); + txn = new TokenCreateBuilder() + .withUniqueToken() + .withCustomFees(List.of(withRoyaltyFee(royaltyFee))) + .build(); + given(handleContext.configuration()).willReturn(configuration); + given(handleContext.body()).willReturn(txn); + + assertThat(writableTokenStore.get(newTokenId)).isNull(); + assertThat(writableTokenRelStore.get(treasuryId, newTokenId)).isNull(); + + subject.handle(handleContext); + + assertThat(writableTokenStore.get(newTokenId)).isNotNull(); + final var token = writableTokenStore.get(newTokenId); + + assertThat(token.treasuryAccountNumber()).isEqualTo(treasuryId.accountNum()); + assertThat(token.tokenNumber()).isEqualTo(newTokenId.tokenNum()); + assertThat(token.totalSupply()).isZero(); + assertThat(token.tokenType()).isEqualTo(TokenType.NON_FUNGIBLE_UNIQUE); + assertThat(token.expiry()) + .isEqualTo(consensusInstant.plusSeconds(autoRenewSecs).getEpochSecond()); + assertThat(token.freezeKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.kycKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.adminKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.wipeKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.supplyKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.feeScheduleKey()).isEqualTo(A_COMPLEX_KEY); + assertThat(token.autoRenewSecs()).isEqualTo(autoRenewSecs); + assertThat(token.autoRenewAccountNumber()).isEqualTo(autoRenewAccountId.accountNum()); + assertThat(token.decimals()).isZero(); + assertThat(token.name()).isEqualTo("TestToken"); + assertThat(token.symbol()).isEqualTo("TT"); + assertThat(token.memo()).isEqualTo("test token"); + assertThat(token.customFees()).isEqualTo(List.of(withRoyaltyFee(royaltyFee))); + + assertThat(writableTokenRelStore.get(treasuryId, newTokenId)).isNotNull(); + final var tokenRel = writableTokenRelStore.get(treasuryId, newTokenId); + + assertThat(tokenRel.balance()).isZero(); + assertThat(tokenRel.deleted()).isFalse(); + assertThat(tokenRel.tokenNumber()).isEqualTo(newTokenId.tokenNum()); + assertThat(tokenRel.accountNumber()).isEqualTo(treasuryId.accountNum()); + assertThat(tokenRel.kycGranted()).isFalse(); + assertThat(tokenRel.automaticAssociation()).isFalse(); + assertThat(tokenRel.frozen()).isFalse(); + assertThat(tokenRel.nextToken()).isZero(); + assertThat(tokenRel.previousToken()).isZero(); + } + + @Test + void validatesInPureChecks() { + setUpTxnContext(); + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + } + + @Test + void acceptsMissingAutoRenewAcountInPureChecks() { + setUpTxnContext(); + txn = new TokenCreateBuilder() + .withAutoRenewAccount(AccountID.newBuilder().accountNum(200000L).build()) + .build(); + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + } + + @Test + void failsOnMissingAutoRenewAcountInHandle() { + setUpTxnContext(); + txn = new TokenCreateBuilder() + .withAutoRenewAccount(AccountID.newBuilder().accountNum(200000L).build()) + .build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_AUTORENEW_ACCOUNT)); + } + + @Test + void failsForZeroLengthSymbol() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withSymbol("").build(); + given(handleContext.body()).willReturn(txn); + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(MISSING_TOKEN_SYMBOL)); + } + + @Test + void failsForNullSymbol() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withSymbol(null).build(); + given(handleContext.body()).willReturn(txn); + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(MISSING_TOKEN_SYMBOL)); + } + + @Test + void failsForVeryLongSymbol() { + setUpTxnContext(); + txn = new TokenCreateBuilder() + .withSymbol("1234567890123456789012345678901234567890123456789012345678901234567890") + .build(); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.maxSymbolUtf8Bytes", "10") + .getOrCreateConfig(); + given(handleContext.configuration()).willReturn(configuration); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + given(handleContext.body()).willReturn(txn); + + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_SYMBOL_TOO_LONG)); + } + + @Test + void failsForZeroLengthName() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withName("").build(); + given(handleContext.body()).willReturn(txn); + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(MISSING_TOKEN_NAME)); + } + + @Test + void failsForNullName() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withName(null).build(); + given(handleContext.body()).willReturn(txn); + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(MISSING_TOKEN_NAME)); + } + + @Test + void failsForVeryLongName() { + setUpTxnContext(); + txn = new TokenCreateBuilder() + .withName("1234567890123456789012345678901234567890123456789012345678901234567890") + .build(); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.maxTokenNameUtf8Bytes", "10") + .getOrCreateConfig(); + given(handleContext.configuration()).willReturn(configuration); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + given(handleContext.body()).willReturn(txn); + + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_NAME_TOO_LONG)); + } + + @Test + void failsForNegativeInitialSupplyForFungibleTokenInPreCheck() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withInitialSupply(-1).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TOKEN_INITIAL_SUPPLY)); + } + + @Test + void failsForNonZeroInitialSupplyForNFTInPreCheck() { + setUpTxnContext(); + txn = new TokenCreateBuilder() + .withTokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .withInitialSupply(1) + .build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TOKEN_INITIAL_SUPPLY)); + } + + @Test + void failsForNegativeDecimalsForFungibleTokenInPreCheck() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withDecimals(-1).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TOKEN_DECIMALS)); + } + + @Test + void failsForNonZeroDecimalsForNFTInPreCheck() { + setUpTxnContext(); + txn = new TokenCreateBuilder() + .withTokenType(TokenType.NON_FUNGIBLE_UNIQUE) + .withDecimals(1) + .withInitialSupply(0) + .build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TOKEN_DECIMALS)); + } + + @Test + void failsOnMissingTreasury() { + setUpTxnContext(); + txn = new TokenCreateBuilder() + .withTreasury(AccountID.newBuilder().accountNum(200000L).build()) + .build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_TREASURY_ACCOUNT_FOR_TOKEN)); + } + + @Test + void failsForInvalidFeeScheduleKey() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withFeeScheduleKey(Key.DEFAULT).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_CUSTOM_FEE_SCHEDULE_KEY)); + } + + @Test + void failsForInvalidAdminKey() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withAdminKey(Key.DEFAULT).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_ADMIN_KEY)); + } + + @Test + void acceptsSentinelAdminKeyForImmutableObjects() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withAdminKey(IMMUTABILITY_SENTINEL_KEY).build(); + given(handleContext.body()).willReturn(txn); + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + } + + @Test + void failsForInvalidSupplyKey() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withSupplyKey(Key.DEFAULT).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_SUPPLY_KEY)); + } + + @Test + void failsForInvalidKycKey() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withKycKey(Key.DEFAULT).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_KYC_KEY)); + } + + @Test + void failsForInvalidWipeKey() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withWipeKey(Key.DEFAULT).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_WIPE_KEY)); + } + + @Test + void failsForInvalidFreezeKey() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withFreezeKey(Key.DEFAULT).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_FREEZE_KEY)); + } + + @Test + void failsIfFreezeDefaultAndNoFreezeKey() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withFreezeDefault().withFreezeKey(null).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(TOKEN_HAS_NO_FREEZE_KEY)); + } + + @Test + void succeedsIfFreezeDefaultWithFreezeKey() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withFreezeDefault().build(); + given(handleContext.body()).willReturn(txn); + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + } + + @Test + void failsOnInvalidMemo() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withMemo("\0").build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_ZERO_BYTE_IN_STRING)); + } + + @Test + void failsOnInvalidAutoRenewPeriod() { + setUpTxnContext(); + given(dynamicProperties.maxAutoRenewDuration()).willReturn(30000L); + given(dynamicProperties.minAutoRenewDuration()).willReturn(1000L); + + txn = new TokenCreateBuilder().withAutoRenewPeriod(30001L).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(AUTORENEW_DURATION_NOT_IN_RANGE)); + + txn = new TokenCreateBuilder().withAutoRenewPeriod(100).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(AUTORENEW_DURATION_NOT_IN_RANGE)); + } + + @Test + void failsOnExpiryPastConsensusTime() { + setUpTxnContext(); + txn = new TokenCreateBuilder() + .withAutoRenewPeriod(0) + .withExpiry(consensusInstant.getEpochSecond() - 1) + .build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_EXPIRATION_TIME)); + } + + @Test + void rejectsInvalidMaxSupplyForInfiniteSupplyInPureChecks() { + setUpTxnContext(); + txn = new TokenCreateBuilder() + .withSupplyType(TokenSupplyType.INFINITE) + .withMaxSupply(1) + .build(); + assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TOKEN_MAX_SUPPLY)); + } + + @Test + void rejectsInvalidMaxSupplyforFiniteSupplyInPureChecks() { + setUpTxnContext(); + txn = new TokenCreateBuilder() + .withSupplyType(TokenSupplyType.FINITE) + .withMaxSupply(0) + .build(); + assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TOKEN_MAX_SUPPLY)); + } + + @Test + void failsOnInvalidInitialAndMaxSupplyInPureChecks() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withInitialSupply(100).withMaxSupply(10).build(); + assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TOKEN_INITIAL_SUPPLY)); + } + + @Test + void failsOnMissingSupplyKeyOnNftCreateInPureChecks() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withUniqueToken().withSupplyKey(null).build(); + assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(TOKEN_HAS_NO_SUPPLY_KEY)); + } + + @Test + void succeedsWithSupplyKeyOnNftCreateInPureChecks() { + setUpTxnContext(); + txn = new TokenCreateBuilder().withUniqueToken().build(); + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + } + /* --------------------------------- Helpers */ + /** + * A builder for {@link com.hedera.hapi.node.transaction.TransactionBody} instances. + */ + private class TokenCreateBuilder { + private AccountID payer = payerId; + private AccountID treasury = treasuryId; + private Key adminKey = key; + private boolean isUnique = false; + private String name = "TestToken"; + private String symbol = "TT"; + private Key kycKey = A_COMPLEX_KEY; + private Key freezeKey = A_COMPLEX_KEY; + private Key wipeKey = A_COMPLEX_KEY; + private Key supplyKey = A_COMPLEX_KEY; + private Key feeScheduleKey = A_COMPLEX_KEY; + private Key pauseKey = A_COMPLEX_KEY; + private Timestamp expiry = Timestamp.newBuilder().seconds(1234600L).build(); + private AccountID autoRenewAccount = autoRenewAccountId; + private long autoRenewPeriod = autoRenewSecs; + private String memo = "test token"; + private TokenType tokenType = TokenType.FUNGIBLE_COMMON; + private TokenSupplyType supplyType = TokenSupplyType.FINITE; + private long maxSupply = 10000L; + private int decimals = 0; + private long initialSupply = 1000L; + private boolean freezeDefault = false; + private List customFees = List.of(withFixedFee(fixedFee), withFractionalFee(fractionalFee)); + + private TokenCreateBuilder() {} + + public TransactionBody build() { + final var transactionID = + TransactionID.newBuilder().accountID(payer).transactionValidStart(consensusTimestamp); + final var createTxnBody = TokenCreateTransactionBody.newBuilder() + .tokenType(tokenType) + .symbol(symbol) + .name(name) + .treasury(treasury) + .adminKey(adminKey) + .supplyKey(supplyKey) + .kycKey(kycKey) + .freezeKey(freezeKey) + .wipeKey(wipeKey) + .feeScheduleKey(feeScheduleKey) + .pauseKey(pauseKey) + .autoRenewAccount(autoRenewAccount) + .expiry(expiry) + .freezeDefault(freezeDefault) + .memo(memo) + .maxSupply(maxSupply) + .supplyType(supplyType) + .customFees(customFees); + if (autoRenewPeriod > 0) { + createTxnBody.autoRenewPeriod( + Duration.newBuilder().seconds(autoRenewPeriod).build()); + } + if (isUnique) { + createTxnBody.tokenType(TokenType.NON_FUNGIBLE_UNIQUE); + createTxnBody.initialSupply(0L); + createTxnBody.decimals(0); + } else { + createTxnBody.decimals(decimals); + createTxnBody.initialSupply(initialSupply); + } + return TransactionBody.newBuilder() + .transactionID(transactionID) + .tokenCreation(createTxnBody.build()) + .build(); + } + + public TokenCreateBuilder withUniqueToken() { + this.isUnique = true; + return this; + } + + public TokenCreateBuilder withCustomFees(List fees) { + this.customFees = fees; + return this; + } + + public TokenCreateBuilder withFreezeKey(Key freezeKey) { + this.freezeKey = freezeKey; + return this; + } + + public TokenCreateBuilder withAutoRenewAccount(AccountID autoRenewAccount) { + this.autoRenewAccount = autoRenewAccount; + return this; + } + + public TokenCreateBuilder withSymbol(final String symbol) { + this.symbol = symbol; + return this; + } + + public TokenCreateBuilder withName(final String name) { + this.name = name; + return this; + } + + public TokenCreateBuilder withInitialSupply(final long number) { + this.initialSupply = number; + return this; + } + + public TokenCreateBuilder withTokenType(final TokenType type) { + this.tokenType = type; + return this; + } + + public TokenCreateBuilder withDecimals(final int decimals) { + this.decimals = decimals; + return this; + } + + public TokenCreateBuilder withTreasury(final AccountID treasury) { + this.treasury = treasury; + return this; + } + + public TokenCreateBuilder withFeeScheduleKey(final Key key) { + this.feeScheduleKey = key; + return this; + } + + public TokenCreateBuilder withAdminKey(final Key key) { + this.adminKey = key; + return this; + } + + public TokenCreateBuilder withSupplyKey(final Key key) { + this.supplyKey = key; + return this; + } + + public TokenCreateBuilder withKycKey(final Key key) { + this.kycKey = key; + return this; + } + + public TokenCreateBuilder withWipeKey(final Key key) { + this.wipeKey = key; + return this; + } + + public TokenCreateBuilder withMaxSupply(final long maxSupply) { + this.maxSupply = maxSupply; + return this; + } + + public TokenCreateBuilder withSupplyType(final TokenSupplyType supplyType) { + this.supplyType = supplyType; + return this; + } + + public TokenCreateBuilder withExpiry(final long expiry) { + this.expiry = Timestamp.newBuilder().seconds(expiry).build(); + return this; + } + + public TokenCreateBuilder withAutoRenewPeriod(final long autoRenewPeriod) { + this.autoRenewPeriod = autoRenewPeriod; + return this; + } + + public TokenCreateBuilder withMemo(final String s) { + this.memo = s; + return this; + } + + public TokenCreateBuilder withFreezeDefault() { + this.freezeDefault = true; + return this; + } + } + + private void setUpTxnContext() { + txn = new TokenCreateBuilder().build(); + given(handleContext.body()).willReturn(txn); + given(handleContext.recordBuilder(any())).willReturn(recordBuilder); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + given(configProvider.getConfiguration()).willReturn(versionedConfig); + given(handleContext.configuration()).willReturn(configuration); + given(handleContext.consensusNow()).willReturn(consensusInstant); + given(compositeProps.getLongProperty("entities.maxLifetime")).willReturn(7200000L); + + attributeValidator = + new StandardizedAttributeValidator(consensusInstant::getEpochSecond, compositeProps, dynamicProperties); + expiryValidator = new StandardizedExpiryValidator( + id -> { + final var account = writableAccountStore.get( + AccountID.newBuilder().accountNum(id.num()).build()); + validateTrue(account != null, INVALID_AUTORENEW_ACCOUNT); + }, + attributeValidator, + consensusInstant::getEpochSecond, + hederaNumbers, + configProvider); + + given(handleContext.expiryValidator()).willReturn(expiryValidator); + given(handleContext.attributeValidator()).willReturn(attributeValidator); + given(dynamicProperties.maxAutoRenewDuration()).willReturn(3000000L); + given(dynamicProperties.minAutoRenewDuration()).willReturn(10L); + given(dynamicProperties.maxMemoUtf8Bytes()).willReturn(100); + given(handleContext.newEntityNum()).willReturn(newTokenId.tokenNum()); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java index 2cb2436e36cc..9674be46da94 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java @@ -39,10 +39,10 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.notNull; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.lenient; -import static org.mockito.Mockito.mock; import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.ResponseCodeEnum; @@ -61,6 +61,7 @@ import com.hedera.node.app.service.token.impl.test.handlers.util.ParityTestBase; import com.hedera.node.app.service.token.impl.util.IdConvenienceUtils; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; +import com.hedera.node.app.spi.validation.EntityType; import com.hedera.node.app.spi.validation.ExpiryValidator; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; @@ -71,8 +72,18 @@ import org.hamcrest.Matchers; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +@ExtendWith(MockitoExtension.class) class TokenDissociateFromAccountHandlerTest extends ParityTestBase { + @Mock(strictness = Mock.Strictness.LENIENT) + private ExpiryValidator expiryValidator; + + @Mock(strictness = Mock.Strictness.LENIENT) + private HandleContext handleContext; + private static final AccountID ACCOUNT_1339 = AccountID.newBuilder().accountNum(MISC_ACCOUNT.getAccountNum()).build(); private static final AccountID ACCOUNT_2020 = IdConvenienceUtils.fromAccountNum(2020); @@ -178,7 +189,8 @@ void rejectsExpiredAccount() { final var txn = newDissociateTxn( AccountID.newBuilder().accountNum(accountNumber).build(), List.of(TOKEN_555_ID)); given(context.body()).willReturn(txn); - + given(expiryValidator.expirationStatus(eq(EntityType.ACCOUNT), eq(true), anyLong())) + .willReturn(ACCOUNT_EXPIRED_AND_PENDING_REMOVAL); Assertions.assertThatThrownBy(() -> subject.handle(context)) .isInstanceOf(HandleException.class) .has(responseCode(ACCOUNT_EXPIRED_AND_PENDING_REMOVAL)); @@ -578,9 +590,6 @@ void multipleTokenRelsAreRemoved() { } private HandleContext mockContext() { - final var handleContext = mock(HandleContext.class); - - final var expiryValidator = mock(ExpiryValidator.class); given(handleContext.expiryValidator()).willReturn(expiryValidator); given(expiryValidator.expirationStatus(notNull(), anyBoolean(), anyLong())) .willReturn(ResponseCodeEnum.OK); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java index 9d262482098a..258756a2075d 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java @@ -73,6 +73,7 @@ import com.swirlds.common.utility.CommonUtils; import com.swirlds.config.api.Configuration; import edu.umd.cs.findbugs.annotations.NonNull; +import java.time.Instant; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -203,6 +204,7 @@ public class CryptoTokenHandlerTestBase extends StateBuilderUtil { /* ---------- Misc ---------- */ protected final Timestamp consensusTimestamp = Timestamp.newBuilder().seconds(1_234_567L).build(); + protected final Instant consensusInstant = Instant.ofEpochSecond(1_234_567L); protected final String tokenName = "test token"; protected final String tokenSymbol = "TT"; protected final String memo = "test memo"; @@ -266,10 +268,12 @@ public class CryptoTokenHandlerTestBase extends StateBuilderUtil { protected WritableStates writableStates; protected Configuration configuration; + protected VersionedConfigImpl versionedConfig; @BeforeEach public void setUp() { configuration = new HederaTestConfigBuilder().getOrCreateConfig(); + versionedConfig = new VersionedConfigImpl(configuration, 1); givenValidAccounts(); givenValidTokens(); givenValidTokenRelations(); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenHandlerHelperTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenHandlerHelperTest.java index e5700f307a4e..f4a04fbc8f93 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenHandlerHelperTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/TokenHandlerHelperTest.java @@ -16,11 +16,13 @@ package com.hedera.node.app.service.token.impl.test.util; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; import static com.hedera.node.app.service.token.impl.util.TokenHandlerHelper.getIfUsable; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.notNull; +import static org.mockito.BDDMockito.given; import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.ResponseCodeEnum; @@ -37,7 +39,6 @@ import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.BDDMockito; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; @@ -62,43 +63,50 @@ class TokenHandlerHelperTest { @SuppressWarnings("DataFlowIssue") @Test void account_getIfUsable_nullArg() { - Assertions.assertThatThrownBy(() -> TokenHandlerHelper.getIfUsable(null, accountStore, expiryValidator)) + Assertions.assertThatThrownBy( + () -> TokenHandlerHelper.getIfUsable(null, accountStore, expiryValidator, INVALID_ACCOUNT_ID)) .isInstanceOf(NullPointerException.class); final var acctId = ACCT_2300; - Assertions.assertThatThrownBy(() -> TokenHandlerHelper.getIfUsable(acctId, null, expiryValidator)) + Assertions.assertThatThrownBy( + () -> TokenHandlerHelper.getIfUsable(acctId, null, expiryValidator, INVALID_ACCOUNT_ID)) .isInstanceOf(NullPointerException.class); - Assertions.assertThatThrownBy(() -> TokenHandlerHelper.getIfUsable(acctId, accountStore, null)) + Assertions.assertThatThrownBy( + () -> TokenHandlerHelper.getIfUsable(acctId, accountStore, null, INVALID_ACCOUNT_ID)) + .isInstanceOf(NullPointerException.class); + Assertions.assertThatThrownBy(() -> TokenHandlerHelper.getIfUsable(acctId, accountStore, expiryValidator, null)) .isInstanceOf(NullPointerException.class); } @Test void account_getIfUsable_nullAccount() { - BDDMockito.given(accountStore.getAccountById(notNull())).willReturn(null); + given(accountStore.getAccountById(notNull())).willReturn(null); - Assertions.assertThatThrownBy(() -> TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator)) + Assertions.assertThatThrownBy(() -> + TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator, INVALID_ACCOUNT_ID)) .isInstanceOf(HandleException.class) - .has(responseCode(ResponseCodeEnum.INVALID_ACCOUNT_ID)); + .has(responseCode(INVALID_ACCOUNT_ID)); } @Test void account_getIfUsable_deletedAccount() { - BDDMockito.given(accountStore.getAccountById(notNull())) + given(accountStore.getAccountById(notNull())) .willReturn(Account.newBuilder() .accountNumber(ACCT_2300.accountNumOrThrow()) .tinybarBalance(0L) .deleted(true) .build()); - Assertions.assertThatThrownBy(() -> TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator)) + Assertions.assertThatThrownBy(() -> + TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator, INVALID_ACCOUNT_ID)) .isInstanceOf(HandleException.class) .has(responseCode(ResponseCodeEnum.ACCOUNT_DELETED)); } @Test void account_getIfUsable_expiredAndPendingRemovalAccount() { - BDDMockito.given(accountStore.getAccountById(notNull())) + given(accountStore.getAccountById(notNull())) .willReturn(Account.newBuilder() .accountNumber(ACCT_2300.accountNumOrThrow()) .tinybarBalance(0L) @@ -106,31 +114,17 @@ void account_getIfUsable_expiredAndPendingRemovalAccount() { .smartContract(false) .expiredAndPendingRemoval(true) .build()); - - Assertions.assertThatThrownBy(() -> TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator)) + given(expiryValidator.expirationStatus(notNull(), anyBoolean(), anyLong())) + .willReturn(ResponseCodeEnum.ACCOUNT_EXPIRED_AND_PENDING_REMOVAL); + Assertions.assertThatThrownBy(() -> + TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator, INVALID_ACCOUNT_ID)) .isInstanceOf(HandleException.class) .has(responseCode(ResponseCodeEnum.ACCOUNT_EXPIRED_AND_PENDING_REMOVAL)); } - @Test - void contract_getIfUsable_expiredAndPendingRemovalContract() { - BDDMockito.given(accountStore.getAccountById(notNull())) - .willReturn(Account.newBuilder() - .accountNumber(ACCT_2300.accountNumOrThrow()) - .tinybarBalance(0L) - .deleted(false) - .smartContract(true) - .expiredAndPendingRemoval(true) - .build()); - - Assertions.assertThatThrownBy(() -> TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator)) - .isInstanceOf(HandleException.class) - .has(responseCode(ResponseCodeEnum.CONTRACT_EXPIRED_AND_PENDING_REMOVAL)); - } - @Test void account_getIfUsable_accountTypeIsExpired() { - BDDMockito.given(accountStore.getAccountById(notNull())) + given(accountStore.getAccountById(notNull())) .willReturn(Account.newBuilder() .accountNumber(ACCT_2300.accountNumOrThrow()) .tinybarBalance(0L) @@ -138,17 +132,18 @@ void account_getIfUsable_accountTypeIsExpired() { .smartContract(false) .expiredAndPendingRemoval(false) .build()); - BDDMockito.given(expiryValidator.expirationStatus(notNull(), anyBoolean(), anyLong())) + given(expiryValidator.expirationStatus(notNull(), anyBoolean(), anyLong())) .willReturn(ResponseCodeEnum.ACCOUNT_EXPIRED_AND_PENDING_REMOVAL); - Assertions.assertThatThrownBy(() -> TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator)) + Assertions.assertThatThrownBy(() -> + TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator, INVALID_ACCOUNT_ID)) .isInstanceOf(HandleException.class) .has(responseCode(ResponseCodeEnum.ACCOUNT_EXPIRED_AND_PENDING_REMOVAL)); } @Test void contract_getIfUsable_contractTypeIsExpired() { - BDDMockito.given(accountStore.getAccountById(notNull())) + given(accountStore.getAccountById(notNull())) .willReturn(Account.newBuilder() .accountNumber(ACCT_2300.accountNumOrThrow()) .tinybarBalance(0L) @@ -156,17 +151,18 @@ void contract_getIfUsable_contractTypeIsExpired() { .smartContract(true) .expiredAndPendingRemoval(false) .build()); - BDDMockito.given(expiryValidator.expirationStatus(notNull(), anyBoolean(), anyLong())) + given(expiryValidator.expirationStatus(notNull(), anyBoolean(), anyLong())) .willReturn(ResponseCodeEnum.CONTRACT_EXPIRED_AND_PENDING_REMOVAL); - Assertions.assertThatThrownBy(() -> TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator)) + Assertions.assertThatThrownBy(() -> + TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator, INVALID_ACCOUNT_ID)) .isInstanceOf(HandleException.class) .has(responseCode(ResponseCodeEnum.CONTRACT_EXPIRED_AND_PENDING_REMOVAL)); } @Test void account_getIfUsable_usableAccount() { - BDDMockito.given(accountStore.getAccountById(notNull())) + given(accountStore.getAccountById(notNull())) .willReturn(Account.newBuilder() .accountNumber(ACCT_2300.accountNumOrThrow()) .tinybarBalance(0L) @@ -175,16 +171,16 @@ void account_getIfUsable_usableAccount() { .expiredAndPendingRemoval(false) .build()); - BDDMockito.given(expiryValidator.expirationStatus(notNull(), anyBoolean(), anyLong())) + given(expiryValidator.expirationStatus(notNull(), anyBoolean(), anyLong())) .willReturn(ResponseCodeEnum.OK); - final var result = TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator); + final var result = TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator, INVALID_ACCOUNT_ID); Assertions.assertThat(result).isNotNull(); } @Test void contract_getIfUsable_usableContract() { - BDDMockito.given(accountStore.getAccountById(notNull())) + given(accountStore.getAccountById(notNull())) .willReturn(Account.newBuilder() .accountNumber(ACCT_2300.accountNumOrThrow()) .tinybarBalance(0L) @@ -193,10 +189,10 @@ void contract_getIfUsable_usableContract() { .expiredAndPendingRemoval(false) .build()); - BDDMockito.given(expiryValidator.expirationStatus(notNull(), anyBoolean(), anyLong())) + given(expiryValidator.expirationStatus(notNull(), anyBoolean(), anyLong())) .willReturn(ResponseCodeEnum.OK); - final var result = TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator); + final var result = TokenHandlerHelper.getIfUsable(ACCT_2300, accountStore, expiryValidator, INVALID_ACCOUNT_ID); Assertions.assertThat(result).isNotNull(); } @@ -211,7 +207,7 @@ void token_getIfUsable_nullArg() { @Test void token_getIfUsable_nullToken() { - BDDMockito.given(tokenStore.get(notNull())).willReturn(null); + given(tokenStore.get(notNull())).willReturn(null); Assertions.assertThatThrownBy(() -> getIfUsable(TOKEN_ID_45, tokenStore)) .isInstanceOf(HandleException.class) @@ -220,7 +216,7 @@ void token_getIfUsable_nullToken() { @Test void token_getIfUsable_deletedToken() { - BDDMockito.given(tokenStore.get(notNull())) + given(tokenStore.get(notNull())) .willReturn(Token.newBuilder() .tokenNumber(TOKEN_ID_45.tokenNum()) .deleted(true) @@ -234,7 +230,7 @@ void token_getIfUsable_deletedToken() { @Test void token_getIfUsable_pausedToken() { - BDDMockito.given(tokenStore.get(notNull())) + given(tokenStore.get(notNull())) .willReturn(Token.newBuilder() .tokenNumber(TOKEN_ID_45.tokenNum()) .deleted(false) @@ -248,7 +244,7 @@ void token_getIfUsable_pausedToken() { @Test void token_getIfUsable_usableToken() { - BDDMockito.given(tokenStore.get(notNull())) + given(tokenStore.get(notNull())) .willReturn(Token.newBuilder() .tokenNumber(TOKEN_ID_45.tokenNum()) .deleted(false) @@ -281,7 +277,7 @@ void tokenRel_getIfUsable_notFound() { @Test void tokenRel_getIfUsable_usableTokenRel() { - BDDMockito.given(tokenRelStore.get(notNull(), notNull())) + given(tokenRelStore.get(notNull(), notNull())) .willReturn(TokenRelation.newBuilder() .accountNumber(ACCT_2300.accountNumOrThrow()) .tokenNumber(TOKEN_ID_45.tokenNum()) diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/CustomFeesValidatorTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/CustomFeesValidatorTest.java index 9bf6f29729d5..89841f3ff972 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/CustomFeesValidatorTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/CustomFeesValidatorTest.java @@ -23,6 +23,7 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID_IN_CUSTOM_FEES; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_NOT_ASSOCIATED_TO_FEE_COLLECTOR; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.AssertionsForClassTypes.assertThatNoException; import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; import static org.mockito.BDDMockito.given; @@ -58,8 +59,8 @@ public void commonSetUp() { } @Test - @DisplayName("throws if fee collector is not set") - void validateNullFeeCollector() { + @DisplayName("throws if fee collector is not set on fee schedule update") + void validateNullFeeCollectorOnFeeScheduleUpdate() { final var nullCollectorFee = setFeeCollector(customFees, null); assertThatThrownBy(() -> subject.validateForFeeScheduleUpdate( fungibleToken, @@ -72,8 +73,8 @@ void validateNullFeeCollector() { } @Test - @DisplayName("throws if fee collector doesn't exist") - void validateMissingFeeCollector() { + @DisplayName("throws if fee collector doesn't exist on fee schedule update") + void validateMissingFeeCollectorOnFeeScheduleUpdate() { final var missingFeeCollectorFee = setFeeCollector( customFees, AccountID.newBuilder().accountNum(100).build()); assertThatThrownBy(() -> subject.validateForFeeScheduleUpdate( @@ -87,8 +88,8 @@ void validateMissingFeeCollector() { } @Test - @DisplayName("throws if fee collector is default instance") - void validateDefaultInstance() { + @DisplayName("throws if fee collector is default instance on fee schedule update") + void validateDefaultInstanceOnFeeScheduleUpdate() { final var missingFeeCollectorFee = setFeeCollector(customFees, AccountID.newBuilder().build()); assertThatThrownBy(() -> subject.validateForFeeScheduleUpdate( @@ -102,16 +103,16 @@ void validateDefaultInstance() { } @Test - @DisplayName("fixed fee and fractional fee for fungible tokens are allowed") - void validateFixedFeeAndFractionalFees() { + @DisplayName("fixed fee and fractional fee for fungible tokens are allowed on fee schedule update") + void validateFixedFeeAndFractionalFeesOnFeeScheduleUpdate() { assertThatNoException() .isThrownBy(() -> subject.validateForFeeScheduleUpdate( fungibleToken, readableAccountStore, readableTokenRelStore, writableTokenStore, customFees)); } @Test - @DisplayName("royalty fee can be set only for non fungible unique tokens") - void royaltyFeeForFungibleTokenFails() { + @DisplayName("royalty fee can be set only for non fungible unique tokens on fee schedule update") + void royaltyFeeForFungibleTokenFailOnFeeScheduleUpdates() { final List feeWithRoyalty = new ArrayList<>(); feeWithRoyalty.add(withRoyaltyFee(royaltyFee)); assertThatThrownBy(() -> subject.validateForFeeScheduleUpdate( @@ -121,8 +122,8 @@ void royaltyFeeForFungibleTokenFails() { } @Test - @DisplayName("royalty fee can be set for non fungible unique tokens") - void royaltyFeeForNonFungibleTokenSucceeds() { + @DisplayName("royalty fee can be set for non fungible unique tokens on fee schedule update") + void royaltyFeeForNonFungibleTokenSucceedsOnFeeScheduleUpdate() { refreshWritableStores(); final List feeWithRoyalty = new ArrayList<>(); feeWithRoyalty.add(withRoyaltyFee(royaltyFee)); @@ -136,8 +137,8 @@ void royaltyFeeForNonFungibleTokenSucceeds() { } @Test - @DisplayName("royalty fee for NFTs will fail if the denominating token is missing") - void royaltyFeeFailsWithMissingToken() { + @DisplayName("royalty fee for NFTs will fail if the denominating token is missing on fee schedule update") + void royaltyFeeFailsWithMissingTokenOnFeeScheduleUpdate() { writableTokenState = emptyWritableTokenState(); given(writableStates.get(TOKENS)).willReturn(writableTokenState); writableTokenStore = new WritableTokenStore(writableStates); @@ -155,8 +156,8 @@ void royaltyFeeFailsWithMissingToken() { } @Test - @DisplayName("royalty fee for NFTs will fail if the denominating token is missing") - void royaltyFeeFailsFungibleDenom() { + @DisplayName("royalty fee for NFTs will fail if the denominating token is missing on fee schedule update") + void royaltyFeeFailsFungibleDenomOnFeeScheduleUpdate() { refreshWritableStores(); final List feeWithRoyalty = new ArrayList<>(); final var nftDenom = royaltyFee @@ -178,7 +179,7 @@ void royaltyFeeFailsFungibleDenom() { } @Test - void missingTokenAssociationForRoyaltyFeeFails() { + void missingTokenAssociationForRoyaltyFeeFailsOnFeeScheduleUpdate() { refreshWritableStores(); readableTokenRelState = emptyReadableTokenRelsStateBuilder().build(); given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); @@ -195,8 +196,8 @@ void missingTokenAssociationForRoyaltyFeeFails() { } @Test - @DisplayName("fractional fee can be set only for fungible unique tokens") - void fractionalFeeForNonFungibleTokenFails() { + @DisplayName("fractional fee can be set only for fungible unique tokens on fee schedule update") + void fractionalFeeForNonFungibleTokenFailsOnFeeScheduleUpdate() { final List feeWithFractional = new ArrayList<>(); feeWithFractional.add(withFractionalFee(fractionalFee)); assertThatThrownBy(() -> subject.validateForFeeScheduleUpdate( @@ -210,8 +211,8 @@ void fractionalFeeForNonFungibleTokenFails() { } @Test - @DisplayName("fixed fee can be set for non fungible unique tokens") - void fixedFeeIsAllowedForNonFungibleToken() { + @DisplayName("fixed fee can be set for non fungible unique tokens on fee schedule update") + void fixedFeeIsAllowedForNonFungibleTokenOnFeeScheduleUpdate() { refreshWritableStores(); final List feeWithFixed = new ArrayList<>(); feeWithFixed.add(withFixedFee(fixedFee)); @@ -225,8 +226,9 @@ void fixedFeeIsAllowedForNonFungibleToken() { } @Test - @DisplayName("fails if there is no token relation between token and fee collector in fixed fee") - void failsIfTokenRelationIsMissingInFixedFee() { + @DisplayName( + "fails if there is no token relation between token and fee collector in fixed fee on fee schedule update") + void failsIfTokenRelationIsMissingInFixedFeeOnFeeScheduleUpdate() { readableTokenRelState = emptyReadableTokenRelsStateBuilder().build(); given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates); @@ -242,8 +244,9 @@ void failsIfTokenRelationIsMissingInFixedFee() { } @Test - @DisplayName("fails if there is no token relation between token and fee collector in fractional fee") - void failsIfTokenRelationIsMissingForFractionalFee() { + @DisplayName( + "fails if there is no token relation between token and fee collector in fractional fee on fee schedule update") + void failsIfTokenRelationIsMissingForFractionalFeeOnFeeScheduleUpdate() { readableTokenRelState = emptyReadableTokenRelsStateBuilder().build(); given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates); @@ -259,8 +262,8 @@ void failsIfTokenRelationIsMissingForFractionalFee() { } @Test - @DisplayName("token denomination should be fungible common for fixed fee") - void validateTokenDenominationForFixedFee() { + @DisplayName("token denomination should be fungible common for fixed fee on fee schedule update") + void validateTokenDenominationForFixedFeeOnFeeScheduleUpdate() { refreshWritableStores(); final var newFee = fixedFee.copyBuilder() .denominatingTokenId(TokenID.newBuilder() @@ -278,15 +281,7 @@ void validateTokenDenominationForFixedFee() { } @Test - @DisplayName("Custom fee validation for TokenCreate is not implemented") - void validateCustomFeeForCreation() { - assertThatThrownBy(() -> subject.validateCreation( - fungibleToken, readableAccountStore, readableTokenRelStore, writableTokenStore, customFees)) - .isInstanceOf(UnsupportedOperationException.class); - } - - @Test - void nullParamsThrow() { + void nullParamsThrowOnFeeScheduleUpdate() { assertThatThrownBy(() -> subject.validateForFeeScheduleUpdate( null, readableAccountStore, readableTokenRelStore, writableTokenStore, customFees)) .isInstanceOf(NullPointerException.class); @@ -305,7 +300,7 @@ void nullParamsThrow() { } @Test - void failsIfEmptyCustomFees() { + void failsIfEmptyCustomFeesOnFeeScheduleUpdate() { assertThatThrownBy(() -> subject.validateForFeeScheduleUpdate( fungibleToken, readableAccountStore, @@ -318,6 +313,265 @@ void failsIfEmptyCustomFees() { .hasMessage("Unexpected value for custom fee type: UNSET"); } + @Test + @DisplayName("Custom fee validation for TokenCreate") + void validateCustomFeeForCreation() { + final var requireAutoAssociation = subject.validateForCreation( + fungibleToken, readableAccountStore, readableTokenRelStore, writableTokenStore, customFees); + assertThat(requireAutoAssociation).isEmpty(); + } + + @Test + @DisplayName("Custom fee validation for TokenCreate with self denominating tokenId") + void validateCustomFeeForCreationWithSelfDenomination() { + final var fixesFeeWithSelfDenomination = fixedFee.copyBuilder() + .denominatingTokenId(TokenID.newBuilder().tokenNum(0).build()) + .build(); + final var expectedFeeWithNewToken = + fixedFee.copyBuilder().denominatingTokenId(fungibleTokenId).build(); + final var fees = List.of(withFixedFee(fixesFeeWithSelfDenomination), withFractionalFee(fractionalFee)); + final var requireAutoAssociation = subject.validateForCreation( + fungibleToken, readableAccountStore, readableTokenRelStore, writableTokenStore, fees); + assertThat(requireAutoAssociation).hasSize(1); + assertThat(requireAutoAssociation).contains(withFixedFee(expectedFeeWithNewToken)); + } + + @Test + @DisplayName("throws if fee collector is not set on token create") + void validateNullFeeCollectorOnTokenCreate() { + final var nullCollectorFee = setFeeCollector(customFees, null); + assertThatThrownBy(() -> subject.validateForCreation( + fungibleToken, + readableAccountStore, + readableTokenRelStore, + writableTokenStore, + nullCollectorFee)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_CUSTOM_FEE_COLLECTOR)); + } + + @Test + @DisplayName("throws if fee collector doesn't exist on token create") + void validateMissingFeeCollectorOnTokenCreate() { + final var missingFeeCollectorFee = setFeeCollector( + customFees, AccountID.newBuilder().accountNum(100).build()); + assertThatThrownBy(() -> subject.validateForCreation( + fungibleToken, + readableAccountStore, + readableTokenRelStore, + writableTokenStore, + missingFeeCollectorFee)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_CUSTOM_FEE_COLLECTOR)); + } + + @Test + @DisplayName("throws if fee collector is default instance") + void validateDefaultInstanceOnTokenCreate() { + final var missingFeeCollectorFee = + setFeeCollector(customFees, AccountID.newBuilder().build()); + assertThatThrownBy(() -> subject.validateForCreation( + fungibleToken, + readableAccountStore, + readableTokenRelStore, + writableTokenStore, + missingFeeCollectorFee)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_CUSTOM_FEE_COLLECTOR)); + } + + @Test + @DisplayName("fixed fee and fractional fee for fungible tokens are allowed") + void validateFixedFeeAndFractionalFeesOnTokenCreate() { + assertThatNoException() + .isThrownBy(() -> subject.validateForCreation( + fungibleToken, readableAccountStore, readableTokenRelStore, writableTokenStore, customFees)); + } + + @Test + @DisplayName("royalty fee can be set only for non fungible unique tokens") + void royaltyFeeForFungibleTokenFailOnTokenCreate() { + final List feeWithRoyalty = new ArrayList<>(); + feeWithRoyalty.add(withRoyaltyFee(royaltyFee)); + assertThatThrownBy(() -> subject.validateForCreation( + fungibleToken, readableAccountStore, readableTokenRelStore, writableTokenStore, feeWithRoyalty)) + .isInstanceOf(HandleException.class) + .has(responseCode(CUSTOM_ROYALTY_FEE_ONLY_ALLOWED_FOR_NON_FUNGIBLE_UNIQUE)); + } + + @Test + @DisplayName("royalty fee can be set for non fungible unique tokens") + void royaltyFeeForNonFungibleTokenSucceedsOnTokenCreate() { + refreshWritableStores(); + final List feeWithRoyalty = new ArrayList<>(); + feeWithRoyalty.add(withRoyaltyFee(royaltyFee)); + assertThatNoException() + .isThrownBy(() -> subject.validateForCreation( + nonFungibleToken, + readableAccountStore, + readableTokenRelStore, + writableTokenStore, + feeWithRoyalty)); + } + + @Test + @DisplayName("royalty fee for NFTs will fail if the denominating token is missing") + void royaltyFeeFailsWithMissingTokenOnTokenCreate() { + writableTokenState = emptyWritableTokenState(); + given(writableStates.get(TOKENS)).willReturn(writableTokenState); + writableTokenStore = new WritableTokenStore(writableStates); + + final List feeWithRoyalty = new ArrayList<>(); + feeWithRoyalty.add(withRoyaltyFee(royaltyFee)); + assertThatThrownBy(() -> subject.validateForCreation( + nonFungibleToken, + readableAccountStore, + readableTokenRelStore, + writableTokenStore, + feeWithRoyalty)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_TOKEN_ID_IN_CUSTOM_FEES)); + } + + @Test + @DisplayName("royalty fee for NFTs will fail if the denominating token is missing") + void royaltyFeeFailsFungibleDenomOnTokenCreate() { + refreshWritableStores(); + final List feeWithRoyalty = new ArrayList<>(); + final var nftDenom = royaltyFee + .copyBuilder() + .fallbackFee(fixedFee.copyBuilder() + .denominatingTokenId(TokenID.newBuilder() + .tokenNum(nonFungibleTokenNum.longValue()) + .build())) + .build(); + feeWithRoyalty.add(withRoyaltyFee(nftDenom)); + assertThatThrownBy(() -> subject.validateForCreation( + nonFungibleToken, + readableAccountStore, + readableTokenRelStore, + writableTokenStore, + feeWithRoyalty)) + .isInstanceOf(HandleException.class) + .has(responseCode(CUSTOM_FEE_DENOMINATION_MUST_BE_FUNGIBLE_COMMON)); + } + + @Test + void missingTokenAssociationForRoyaltyFeeFailsOnTokenCreate() { + refreshWritableStores(); + readableTokenRelState = emptyReadableTokenRelsStateBuilder().build(); + given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); + readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates); + + assertThatThrownBy(() -> subject.validateForCreation( + nonFungibleToken, + readableAccountStore, + readableTokenRelStore, + writableTokenStore, + List.of(withRoyaltyFee(royaltyFee)))) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_NOT_ASSOCIATED_TO_FEE_COLLECTOR)); + } + + @Test + @DisplayName("fractional fee can be set only for fungible unique tokens") + void fractionalFeeForNonFungibleTokenFailsOnTokenCreate() { + final List feeWithFractional = new ArrayList<>(); + feeWithFractional.add(withFractionalFee(fractionalFee)); + assertThatThrownBy(() -> subject.validateForCreation( + nonFungibleToken, + readableAccountStore, + readableTokenRelStore, + writableTokenStore, + feeWithFractional)) + .isInstanceOf(HandleException.class) + .has(responseCode(CUSTOM_FRACTIONAL_FEE_ONLY_ALLOWED_FOR_FUNGIBLE_COMMON)); + } + + @Test + @DisplayName("fixed fee can be set for non fungible unique tokens") + void fixedFeeIsAllowedForNonFungibleTokenOnTokenCreate() { + refreshWritableStores(); + final List feeWithFixed = new ArrayList<>(); + feeWithFixed.add(withFixedFee(fixedFee)); + assertThatNoException() + .isThrownBy(() -> subject.validateForCreation( + nonFungibleToken, + readableAccountStore, + readableTokenRelStore, + writableTokenStore, + feeWithFixed)); + } + + @Test + @DisplayName("fails if there is no token relation between token and fee collector in fixed fee") + void failsIfTokenRelationIsMissingInFixedFeeOnTokenCreate() { + readableTokenRelState = emptyReadableTokenRelsStateBuilder().build(); + given(readableStates.get(TOKEN_RELS)).willReturn(readableTokenRelState); + readableTokenRelStore = new ReadableTokenRelationStoreImpl(readableStates); + + assertThatThrownBy(() -> subject.validateForCreation( + fungibleToken, + readableAccountStore, + readableTokenRelStore, + writableTokenStore, + List.of(withFixedFee(fixedFee)))) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_NOT_ASSOCIATED_TO_FEE_COLLECTOR)); + } + + @Test + @DisplayName("token denomination should be fungible common for fixed fee") + void validateTokenDenominationForFixedFeeOnTokenCreate() { + refreshWritableStores(); + final var newFee = fixedFee.copyBuilder() + .denominatingTokenId(TokenID.newBuilder() + .tokenNum(nonFungibleTokenNum.longValue()) + .build()) + .build(); + assertThatThrownBy(() -> subject.validateForCreation( + fungibleToken, + readableAccountStore, + readableTokenRelStore, + writableTokenStore, + List.of(withFixedFee(newFee)))) + .isInstanceOf(HandleException.class) + .has(responseCode(CUSTOM_FEE_DENOMINATION_MUST_BE_FUNGIBLE_COMMON)); + } + + @Test + void nullParamsThrowOnTokenCreate() { + assertThatThrownBy(() -> subject.validateForCreation( + null, readableAccountStore, readableTokenRelStore, writableTokenStore, customFees)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> subject.validateForCreation( + fungibleToken, null, readableTokenRelStore, writableTokenStore, customFees)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> subject.validateForCreation( + fungibleToken, readableAccountStore, null, writableTokenStore, customFees)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> subject.validateForCreation( + fungibleToken, readableAccountStore, readableTokenRelStore, null, customFees)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> subject.validateForCreation( + fungibleToken, readableAccountStore, readableTokenRelStore, writableTokenStore, null)) + .isInstanceOf(NullPointerException.class); + } + + @Test + void failsIfEmptyCustomFeesOnTokenCreate() { + assertThatThrownBy(() -> subject.validateForCreation( + fungibleToken, + readableAccountStore, + readableTokenRelStore, + writableTokenStore, + List.of(CustomFee.newBuilder() + .feeCollectorAccountId(AccountID.newBuilder().accountNum(accountNum.longValue())) + .build()))) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Unexpected value for custom fee type: UNSET"); + } + private List setFeeCollector(List original, AccountID feeCollector) { List copy = new ArrayList<>(); for (CustomFee fee : original) { diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/TokenAttributesValidatorTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/TokenAttributesValidatorTest.java new file mode 100644 index 000000000000..0df749f46043 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/TokenAttributesValidatorTest.java @@ -0,0 +1,397 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.validators; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ADMIN_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_CUSTOM_FEE_SCHEDULE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_FREEZE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_KYC_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_PAUSE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_SUPPLY_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_WIPE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ZERO_BYTE_IN_STRING; +import static com.hedera.hapi.node.base.ResponseCodeEnum.MISSING_TOKEN_NAME; +import static com.hedera.hapi.node.base.ResponseCodeEnum.MISSING_TOKEN_SYMBOL; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_NAME_TOO_LONG; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_SYMBOL_TOO_LONG; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static com.hedera.test.utils.KeyUtils.A_COMPLEX_KEY; +import static org.assertj.core.api.Assertions.assertThatNoException; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.BDDMockito.given; + +import com.hedera.hapi.node.base.Key; +import com.hedera.node.app.config.VersionedConfigImpl; +import com.hedera.node.app.service.token.impl.validators.TokenAttributesValidator; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class TokenAttributesValidatorTest { + @Mock + private ConfigProvider configProvider; + + private TokenAttributesValidator subject; + + @BeforeEach + void setUp() { + subject = new TokenAttributesValidator(configProvider); + } + + @Test + void failsForZeroLengthSymbol() { + final var configuration = new HederaTestConfigBuilder() + .withValue("tokens.maxSymbolUtf8Bytes", "10") + .getOrCreateConfig(); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + + assertThatThrownBy(() -> subject.validateTokenSymbol("")) + .isInstanceOf(HandleException.class) + .has(responseCode(MISSING_TOKEN_SYMBOL)); + } + + @Test + void failsForNullSymbol() { + final var configuration = new HederaTestConfigBuilder() + .withValue("tokens.maxSymbolUtf8Bytes", "10") + .getOrCreateConfig(); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + + assertThatThrownBy(() -> subject.validateTokenSymbol(null)) + .isInstanceOf(HandleException.class) + .has(responseCode(MISSING_TOKEN_SYMBOL)); + } + + @Test + void failsForVeryLongSymbol() { + final var configuration = new HederaTestConfigBuilder() + .withValue("tokens.maxSymbolUtf8Bytes", "10") + .getOrCreateConfig(); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + + assertThatThrownBy(() -> subject.validateTokenSymbol( + "12345678901234567890123456789012345678901234567890123456789012345678901234567890")) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_SYMBOL_TOO_LONG)); + } + + @Test + void failsForZeroByteInSymbol() { + final var configuration = new HederaTestConfigBuilder() + .withValue("tokens.maxSymbolUtf8Bytes", "10") + .getOrCreateConfig(); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + + assertThatThrownBy(() -> subject.validateTokenSymbol("\0")) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_ZERO_BYTE_IN_STRING)); + } + + @Test + void failsForZeroByteInName() { + final var configuration = new HederaTestConfigBuilder() + .withValue("tokens.maxTokenNameUtf8Bytes", "10") + .getOrCreateConfig(); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + + assertThatThrownBy(() -> subject.validateTokenName("\0")) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_ZERO_BYTE_IN_STRING)); + } + + @Test + void failsForZeroLengthName() { + final var configuration = new HederaTestConfigBuilder() + .withValue("tokens.maxTokenNameUtf8Bytes", "10") + .getOrCreateConfig(); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + + assertThatThrownBy(() -> subject.validateTokenName("")) + .isInstanceOf(HandleException.class) + .has(responseCode(MISSING_TOKEN_NAME)); + } + + @Test + void failsForNullName() { + final var configuration = new HederaTestConfigBuilder() + .withValue("tokens.maxTokenNameUtf8Bytes", "10") + .getOrCreateConfig(); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + + assertThatThrownBy(() -> subject.validateTokenName(null)) + .isInstanceOf(HandleException.class) + .has(responseCode(MISSING_TOKEN_NAME)); + } + + @Test + void failsForVeryLongName() { + final var configuration = new HederaTestConfigBuilder() + .withValue("tokens.maxTokenNameUtf8Bytes", "10") + .getOrCreateConfig(); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + + assertThatThrownBy(() -> subject.validateTokenName( + "12345678901234567890123456789012345678901234567890123456789012345678901234567890")) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_NAME_TOO_LONG)); + } + + @Test + void validatesKeys() { + assertThatThrownBy(() -> subject.checkKeys( + true, + Key.DEFAULT, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_ADMIN_KEY)); + assertThatThrownBy(() -> subject.checkKeys( + true, + A_COMPLEX_KEY, + true, + Key.DEFAULT, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_KYC_KEY)); + assertThatThrownBy(() -> subject.checkKeys( + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + Key.DEFAULT, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_WIPE_KEY)); + assertThatThrownBy(() -> subject.checkKeys( + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + Key.DEFAULT, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_SUPPLY_KEY)); + assertThatThrownBy(() -> subject.checkKeys( + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + Key.DEFAULT, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_FREEZE_KEY)); + assertThatThrownBy(() -> subject.checkKeys( + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + Key.DEFAULT, + true, + A_COMPLEX_KEY)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_CUSTOM_FEE_SCHEDULE_KEY)); + assertThatThrownBy(() -> subject.checkKeys( + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + Key.DEFAULT)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_PAUSE_KEY)); + } + + @Test + void validatesKeysWithNulls() { + assertThatNoException() + .isThrownBy(() -> subject.checkKeys( + false, + Key.DEFAULT, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY)); + assertThatNoException() + .isThrownBy(() -> subject.checkKeys( + true, + A_COMPLEX_KEY, + false, + Key.DEFAULT, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY)); + assertThatNoException() + .isThrownBy(() -> subject.checkKeys( + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + false, + Key.DEFAULT, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY)); + assertThatNoException() + .isThrownBy(() -> subject.checkKeys( + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + false, + Key.DEFAULT, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY)); + assertThatNoException() + .isThrownBy(() -> subject.checkKeys( + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + false, + Key.DEFAULT, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY)); + assertThatNoException() + .isThrownBy(() -> subject.checkKeys( + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + false, + Key.DEFAULT, + true, + A_COMPLEX_KEY)); + assertThatNoException() + .isThrownBy(() -> subject.checkKeys( + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + true, + A_COMPLEX_KEY, + false, + Key.DEFAULT)); + } +} From 696c92cba5061e6ca67bb382d61599580088f12f Mon Sep 17 00:00:00 2001 From: Kim Rader Date: Tue, 13 Jun 2023 22:31:37 -0700 Subject: [PATCH 25/70] implement prepareUpgrade in FreezeService (#7039) Signed-off-by: Kim Rader --- .../dispatcher/ReadableStoreFactory.java | 6 +- .../dispatcher/WritableStoreFactory.java | 6 +- .../networkadmin/impl/FreezeServiceImpl.java | 4 +- .../impl/ReadableSpecialFileStoreImpl.java | 53 ------- .../impl/ReadableUpdateFileStoreImpl.java | 81 +++++++++++ .../impl/WritableUpdateFileStore.java | 84 ++++++++++++ .../impl/handlers/FreezeHandler.java | 47 ++++--- .../test/ReadableUpdateFileStoreTest.java | 129 ++++++++++++++++++ .../test/WritableUpdateFileStoreTest.java | 88 ++++++++++++ .../impl/test/handlers/FreezeHandlerTest.java | 15 +- .../ReadableSpecialFileStore.java | 40 ------ .../networkadmin/ReadableUpdateFileStore.java | 58 ++++++++ 12 files changed, 486 insertions(+), 125 deletions(-) delete mode 100644 hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/ReadableSpecialFileStoreImpl.java create mode 100644 hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/ReadableUpdateFileStoreImpl.java create mode 100644 hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/WritableUpdateFileStore.java create mode 100644 hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/ReadableUpdateFileStoreTest.java create mode 100644 hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/WritableUpdateFileStoreTest.java delete mode 100644 hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/ReadableSpecialFileStore.java create mode 100644 hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/ReadableUpdateFileStore.java diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/ReadableStoreFactory.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/ReadableStoreFactory.java index 3198ce1d23a8..1f1807c6bef1 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/ReadableStoreFactory.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/ReadableStoreFactory.java @@ -24,9 +24,9 @@ import com.hedera.node.app.service.networkadmin.FreezeService; import com.hedera.node.app.service.networkadmin.NetworkService; import com.hedera.node.app.service.networkadmin.ReadableRunningHashLeafStore; -import com.hedera.node.app.service.networkadmin.ReadableSpecialFileStore; +import com.hedera.node.app.service.networkadmin.ReadableUpdateFileStore; import com.hedera.node.app.service.networkadmin.impl.ReadableRunningHashLeafStoreImpl; -import com.hedera.node.app.service.networkadmin.impl.ReadableSpecialFileStoreImpl; +import com.hedera.node.app.service.networkadmin.impl.ReadableUpdateFileStoreImpl; import com.hedera.node.app.service.schedule.ReadableScheduleStore; import com.hedera.node.app.service.schedule.ScheduleService; import com.hedera.node.app.service.schedule.impl.ReadableScheduleStoreImpl; @@ -58,7 +58,7 @@ ReadableAccountStore.class, new StoreEntry(TokenService.NAME, ReadableAccountSto ReadableTokenStore.class, new StoreEntry(TokenService.NAME, ReadableTokenStoreImpl::new), ReadableTopicStore.class, new StoreEntry(ConsensusService.NAME, ReadableTopicStoreImpl::new), ReadableScheduleStore.class, new StoreEntry(ScheduleService.NAME, ReadableScheduleStoreImpl::new), - ReadableSpecialFileStore.class, new StoreEntry(FreezeService.NAME, ReadableSpecialFileStoreImpl::new), + ReadableUpdateFileStore.class, new StoreEntry(FreezeService.NAME, ReadableUpdateFileStoreImpl::new), ReadableRunningHashLeafStore.class, new StoreEntry(NetworkService.NAME, ReadableRunningHashLeafStoreImpl::new), ReadableTokenRelationStore.class, new StoreEntry(TokenService.NAME, ReadableTokenRelationStoreImpl::new)); diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/WritableStoreFactory.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/WritableStoreFactory.java index 91ec04fe2158..38540a70962d 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/WritableStoreFactory.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/WritableStoreFactory.java @@ -20,6 +20,8 @@ import com.hedera.node.app.service.consensus.ConsensusService; import com.hedera.node.app.service.consensus.impl.WritableTopicStore; +import com.hedera.node.app.service.networkadmin.FreezeService; +import com.hedera.node.app.service.networkadmin.impl.WritableUpdateFileStore; import com.hedera.node.app.service.token.TokenService; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; @@ -48,7 +50,9 @@ public class WritableStoreFactory { Map.of( WritableAccountStore.class, WritableAccountStore::new, WritableTokenStore.class, WritableTokenStore::new, - WritableTokenRelationStore.class, WritableTokenRelationStore::new)); + WritableTokenRelationStore.class, WritableTokenRelationStore::new), + FreezeService.NAME, + Map.of(WritableUpdateFileStore.class, WritableUpdateFileStore::new)); private final Map, Function> storeFactories; private final WritableStates states; diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/FreezeServiceImpl.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/FreezeServiceImpl.java index 86593e04c095..afb84aa2af1c 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/FreezeServiceImpl.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/FreezeServiceImpl.java @@ -30,7 +30,9 @@ /** Standard implementation of the {@link FreezeService} {@link com.hedera.node.app.spi.Service}. */ public final class FreezeServiceImpl implements FreezeService { // special files will move to FileService - public static final String UPGRADE_FILES_KEY = "SPECIAL_FILES"; + public static final String UPGRADE_FILES_KEY = "UPGRADE_FILES"; + public static final String UPGRADE_FILE_ID_KEY = "UPGRADE_FILE_ID"; + public static final String UPGRADE_FILE_HASH_KEY = "UPGRADE_FILE_HASH"; public static final String DUAL_STATE_KEY = "DUAL_STATE"; diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/ReadableSpecialFileStoreImpl.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/ReadableSpecialFileStoreImpl.java deleted file mode 100644 index f9db03c1c1ae..000000000000 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/ReadableSpecialFileStoreImpl.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.service.networkadmin.impl; - -import com.hedera.node.app.service.networkadmin.ReadableSpecialFileStore; -import com.hedera.node.app.spi.state.ReadableKVState; -import com.hedera.node.app.spi.state.ReadableStates; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.Objects; -import java.util.Optional; - -/** - * Implementation of {@link ReadableSpecialFileStore} - * - * @deprecated This is a temporary class. It will be replaced by a new class in FileService. - */ -@Deprecated -public class ReadableSpecialFileStoreImpl implements ReadableSpecialFileStore { - - /** The underlying data storage class that holds the file data. */ - private final ReadableKVState freezeFilesById; - - /** - * Create a new {@link ReadableSpecialFileStoreImpl} instance. - * - * @param states The state to use. - */ - public ReadableSpecialFileStoreImpl(@NonNull final ReadableStates states) { - Objects.requireNonNull(states); - this.freezeFilesById = states.get(FreezeServiceImpl.UPGRADE_FILES_KEY); - } - - @Override - @NonNull - public Optional get(long fileId) { - final var file = freezeFilesById.get(fileId); - return Optional.ofNullable(file); - } -} diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/ReadableUpdateFileStoreImpl.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/ReadableUpdateFileStoreImpl.java new file mode 100644 index 000000000000..70b159684c73 --- /dev/null +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/ReadableUpdateFileStoreImpl.java @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.networkadmin.impl; + +import static com.hedera.node.app.service.networkadmin.impl.FreezeServiceImpl.UPGRADE_FILE_HASH_KEY; +import static com.hedera.node.app.service.networkadmin.impl.FreezeServiceImpl.UPGRADE_FILE_ID_KEY; +import static java.util.Objects.requireNonNull; + +import com.hedera.hapi.node.base.FileID; +import com.hedera.node.app.service.networkadmin.ReadableUpdateFileStore; +import com.hedera.node.app.spi.state.ReadableKVState; +import com.hedera.node.app.spi.state.ReadableSingletonState; +import com.hedera.node.app.spi.state.ReadableStates; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Optional; + +/** + * Implementation of {@link ReadableUpdateFileStore} + */ +// This is a temporary location for this class. It will be moved to FileService. +// @todo('Issue #6856') +public class ReadableUpdateFileStoreImpl implements ReadableUpdateFileStore { + /** The underlying data storage class that holds the file data. */ + private final ReadableKVState freezeFilesById; + + /** The underlying data storage class that holds the prepared update file number. + * If null, no prepared update file has been set. */ + private final ReadableSingletonState updateFileID; + /** The underlying data storage class that holds the prepared update file hash. + * May be null if no prepared update file has been set. */ + private final ReadableSingletonState updateFileHash; + + /** + * Create a new {@link ReadableUpdateFileStoreImpl} instance. + * + * @param states The state to use. + */ + public ReadableUpdateFileStoreImpl(@NonNull final ReadableStates states) { + requireNonNull(states); + this.freezeFilesById = states.get(FreezeServiceImpl.UPGRADE_FILES_KEY); + this.updateFileID = states.getSingleton(UPGRADE_FILE_ID_KEY); + this.updateFileHash = states.getSingleton(UPGRADE_FILE_HASH_KEY); + } + + @Override + @NonNull + public Optional get(@NonNull FileID fileId) { + requireNonNull(fileId); + final var file = freezeFilesById.get(fileId); + return Optional.ofNullable(file); + } + + @Override + @NonNull + public Optional updateFileID() { + FileID fileId = updateFileID.get(); + return (fileId == null ? Optional.empty() : Optional.of(fileId)); + } + + @Override + @NonNull + public Optional updateFileHash() { + Bytes hash = updateFileHash.get(); + return (hash == null ? Optional.empty() : Optional.of(hash)); + } +} diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/WritableUpdateFileStore.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/WritableUpdateFileStore.java new file mode 100644 index 000000000000..0de80ef79d90 --- /dev/null +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/WritableUpdateFileStore.java @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.networkadmin.impl; + +import static java.util.Objects.requireNonNull; + +import com.hedera.hapi.node.base.FileID; +import com.hedera.node.app.spi.state.WritableSingletonState; +import com.hedera.node.app.spi.state.WritableStates; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Optional; + +/** + * Provides write methods for modifying underlying data storage mechanisms for + * working with freeze states. + */ +// This is a temporary location for this class. It will be moved to FileService. +// @todo('Issue #6856') +public class WritableUpdateFileStore extends ReadableUpdateFileStoreImpl { + /** The underlying data storage class that holds the update file number. */ + private final WritableSingletonState updateFileID; + /** The underlying data storage class that holds the update file hash. */ + private final WritableSingletonState updateFileHash; + + /** + * Create a new {@link WritableUpdateFileStore} instance. + * + * @param states The state to use. + */ + public WritableUpdateFileStore(@NonNull final WritableStates states) { + super(states); + requireNonNull(states); + updateFileID = states.getSingleton(FreezeServiceImpl.UPGRADE_FILE_ID_KEY); + updateFileHash = states.getSingleton(FreezeServiceImpl.UPGRADE_FILE_HASH_KEY); + } + + /** + * Sets or clears the update file ID. + * + * @param updateFileID The update file ID to set. If null, clears the update file ID. + */ + public void updateFileID(@Nullable final FileID updateFileID) { + this.updateFileID.put(updateFileID); + } + + /** + * Sets or clears the update file hash. + * + * @param updateFileHash The update file hash to set. If null, clears the update file hash. + */ + public void updateFileHash(@Nullable final Bytes updateFileHash) { + this.updateFileHash.put(updateFileHash); + } + + @Override + @NonNull + public Optional updateFileID() { + FileID fileId = updateFileID.get(); + return (fileId == null ? Optional.empty() : Optional.of(fileId)); + } + + @Override + @NonNull + public Optional updateFileHash() { + Bytes fileHash = updateFileHash.get(); + return (fileHash == null ? Optional.empty() : Optional.of(fileHash)); + } +} diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/FreezeHandler.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/FreezeHandler.java index 017e676c0ebb..00edd2590673 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/FreezeHandler.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/FreezeHandler.java @@ -30,7 +30,8 @@ import com.hedera.hapi.node.freeze.FreezeTransactionBody; import com.hedera.hapi.node.freeze.FreezeType; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.service.networkadmin.ReadableSpecialFileStore; +import com.hedera.node.app.service.networkadmin.ReadableUpdateFileStore; +import com.hedera.node.app.service.networkadmin.impl.WritableUpdateFileStore; import com.hedera.node.app.service.networkadmin.impl.config.NetworkAdminServiceConfig; import com.hedera.node.app.spi.state.WritableFreezeStore; import com.hedera.node.app.spi.workflows.HandleContext; @@ -84,7 +85,7 @@ public void preHandle(@NonNull final PreHandleContext context) throws PreCheckEx verifyFreezeStartTimeIsInFuture(freezeTxn, txValidStart); } if (Arrays.asList(FREEZE_UPGRADE, TELEMETRY_UPGRADE, PREPARE_UPGRADE).contains(freezeType)) { - final ReadableSpecialFileStore specialFileStore = context.createStore(ReadableSpecialFileStore.class); + final ReadableUpdateFileStore specialFileStore = context.createStore(ReadableUpdateFileStore.class); verifyUpdateFileAndHash(freezeTxn, specialFileStore); } @@ -123,14 +124,14 @@ public void handle(@NonNull final HandleContext context) throws HandleException final var txn = context.body(); final NetworkAdminServiceConfig adminServiceConfig = context.configuration().getConfigData(NetworkAdminServiceConfig.class); - final ReadableSpecialFileStore specialFileStore = context.readableStore(ReadableSpecialFileStore.class); + final WritableUpdateFileStore specialFileStore = context.writableStore(WritableUpdateFileStore.class); final WritableFreezeStore freezeStore = context.writableStore(WritableFreezeStore.class); final FreezeTransactionBody freezeTxn = txn.freezeOrThrow(); - final FileID updateFileNum = + final FileID updateFileID = freezeTxn.updateFile(); // only some freeze types require this, it may be null for others - validateSemantics(freezeTxn, specialFileStore, updateFileNum); + validateSemantics(freezeTxn, specialFileStore, updateFileID); final FreezeUpgradeActions upgradeActions = new FreezeUpgradeActions(adminServiceConfig, freezeStore); final Timestamp freezeStartTime = freezeTxn.startTime(); // may be null for some freeze types @@ -140,18 +141,24 @@ public void handle(@NonNull final HandleContext context) throws HandleException // @todo('Issue #6761') - the below switch returns a CompletableFuture, need to use this with an ExecutorService switch (freezeTxn.freezeType()) { - case PREPARE_UPGRADE -> - // by the time we get here, we've already checked that updateFileNum is non-null in validateSemantics() - upgradeActions.extractSoftwareUpgrade(specialFileStore - .get(requireNonNull(updateFileNum).fileNum()) - .orElseThrow(() -> new IllegalStateException("Update file not found"))); - // @todo('Issue #6201'): call networkCtx.recordPreparedUpgrade(freezeTxn); + case PREPARE_UPGRADE -> { + // by the time we get here, we've already checked that updateFileID is non-null in validateSemantics() + // and that fileHash is non-null in preHandle() + specialFileStore.updateFileHash(freezeTxn.fileHash()); + specialFileStore.updateFileID(updateFileID); + upgradeActions.extractSoftwareUpgrade(specialFileStore + .get(requireNonNull(updateFileID)) + .orElseThrow(() -> new IllegalStateException("Update file not found"))); + } case FREEZE_UPGRADE -> upgradeActions.scheduleFreezeUpgradeAt(requireNonNull(freezeStartTimeInstant)); - case FREEZE_ABORT -> upgradeActions.abortScheduledFreeze(); - // @todo('Issue #6201'): call networkCtx.discardPreparedUpgradeMeta(); + case FREEZE_ABORT -> { + upgradeActions.abortScheduledFreeze(); + specialFileStore.updateFileHash(null); + specialFileStore.updateFileID(null); + } case TELEMETRY_UPGRADE -> upgradeActions.extractTelemetryUpgrade( specialFileStore - .get(requireNonNull(updateFileNum).fileNum()) + .get(requireNonNull(updateFileID)) .orElseThrow(() -> new IllegalStateException("Telemetry update file not found")), requireNonNull(freezeStartTimeInstant)); case FREEZE_ONLY -> upgradeActions.scheduleFreezeOnlyAt(requireNonNull(freezeStartTimeInstant)); @@ -165,16 +172,16 @@ public void handle(@NonNull final HandleContext context) throws HandleException */ private static void validateSemantics( @NonNull final FreezeTransactionBody freezeTxn, - @NonNull final ReadableSpecialFileStore specialFileStore, - @Nullable final FileID updateFileNum) { + @NonNull final ReadableUpdateFileStore specialFileStore, + @Nullable final FileID updateFileID) { requireNonNull(freezeTxn); requireNonNull(specialFileStore); if (freezeTxn.freezeType() == PREPARE_UPGRADE || freezeTxn.freezeType() == TELEMETRY_UPGRADE) { - if (updateFileNum == null) { + if (updateFileID == null) { throw new HandleException(ResponseCodeEnum.INVALID_FREEZE_TRANSACTION_BODY); } - final Optional updateFileZip = specialFileStore.get(updateFileNum.fileNum()); + final Optional updateFileZip = specialFileStore.get(updateFileID); if (updateFileZip.isEmpty()) { throw new IllegalStateException("Update file not found"); } @@ -212,13 +219,13 @@ private static void verifyFreezeStartTimeIsInFuture( * @throws PreCheckException if updateFile or fileHash are not set or don't pass sanity checks */ private static void verifyUpdateFileAndHash( - final @NonNull FreezeTransactionBody freezeTxn, final @NonNull ReadableSpecialFileStore specialFileStore) + final @NonNull FreezeTransactionBody freezeTxn, final @NonNull ReadableUpdateFileStore specialFileStore) throws PreCheckException { requireNonNull(freezeTxn); requireNonNull(specialFileStore); final FileID updateFile = freezeTxn.updateFile(); - if (updateFile == null || specialFileStore.get(updateFile.fileNum()).isEmpty()) { + if (updateFile == null || specialFileStore.get(updateFile).isEmpty()) { throw new PreCheckException(ResponseCodeEnum.FREEZE_UPDATE_FILE_DOES_NOT_EXIST); } diff --git a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/ReadableUpdateFileStoreTest.java b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/ReadableUpdateFileStoreTest.java new file mode 100644 index 000000000000..a27cdc53eca7 --- /dev/null +++ b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/ReadableUpdateFileStoreTest.java @@ -0,0 +1,129 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.networkadmin.impl.test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mock.Strictness.LENIENT; +import static org.mockito.Mockito.when; + +import com.hedera.hapi.node.base.FileID; +import com.hedera.node.app.service.networkadmin.ReadableUpdateFileStore; +import com.hedera.node.app.service.networkadmin.impl.FreezeServiceImpl; +import com.hedera.node.app.service.networkadmin.impl.ReadableUpdateFileStoreImpl; +import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; +import com.hedera.node.app.spi.state.ReadableSingletonStateBase; +import com.hedera.node.app.spi.state.ReadableStates; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class ReadableUpdateFileStoreTest { + private ReadableUpdateFileStore subject; + + @Mock(strictness = LENIENT) + protected ReadableStates readableStates; + + @Test + void constructorCreatesFreezeState() { + final ReadableUpdateFileStore store = new ReadableUpdateFileStoreImpl(readableStates); + assertNotNull(store); + } + + @Test + void testNullConstructorArgs() { + assertThrows(NullPointerException.class, () -> new ReadableUpdateFileStoreImpl(null)); + } + + @Test + void testGetFileByFileID() { + final FileID fileId = FileID.newBuilder().fileNum(42L).build(); + final byte[] fileBytes = "bogus file bytes".getBytes(); + final MapReadableKVState state = MapReadableKVState.builder( + FreezeServiceImpl.UPGRADE_FILES_KEY) + .value(fileId, fileBytes) + .build(); + when(readableStates.get(FreezeServiceImpl.UPGRADE_FILES_KEY)).then(invocation -> state); + + final ReadableUpdateFileStore store = new ReadableUpdateFileStoreImpl(readableStates); + assertEquals(fileBytes, store.get(fileId).get()); + } + + @Test + void testEmptyGetFileByFileID() { + final FileID fileId = FileID.newBuilder().fileNum(42L).build(); + final MapReadableKVState state = MapReadableKVState.builder( + FreezeServiceImpl.UPGRADE_FILES_KEY) + .build(); + when(readableStates.get(FreezeServiceImpl.UPGRADE_FILES_KEY)).then(invocation -> state); + + final ReadableUpdateFileStore store = new ReadableUpdateFileStoreImpl(readableStates); + assertTrue(store.get(fileId).isEmpty()); + } + + @Test + void testPreparedUpdateFileID() { + final FileID fileId = FileID.newBuilder().fileNum(42L).build(); + final AtomicReference backingStore = new AtomicReference<>(fileId); + when(readableStates.getSingleton(FreezeServiceImpl.UPGRADE_FILE_ID_KEY)) + .then(invocation -> + new ReadableSingletonStateBase<>(FreezeServiceImpl.UPGRADE_FILE_ID_KEY, backingStore::get)); + + final ReadableUpdateFileStore store = new ReadableUpdateFileStoreImpl(readableStates); + + assertEquals(42L, store.updateFileID().get().fileNum()); + } + + @Test + void testEmptyPreparedUpdateFileID() { + final AtomicReference backingStore = new AtomicReference<>(); // contains null + when(readableStates.getSingleton(FreezeServiceImpl.UPGRADE_FILE_ID_KEY)) + .then(invocation -> + new ReadableSingletonStateBase<>(FreezeServiceImpl.UPGRADE_FILE_ID_KEY, backingStore::get)); + + final ReadableUpdateFileStore store = new ReadableUpdateFileStoreImpl(readableStates); + + assertTrue(store.updateFileID().isEmpty()); + } + + @Test + void testPreparedUpdateFileHash() { + final Bytes fileBytes = Bytes.wrap("test hash"); + final AtomicReference backingStore = new AtomicReference<>(fileBytes); + when(readableStates.getSingleton(FreezeServiceImpl.UPGRADE_FILE_HASH_KEY)) + .then(invocation -> + new ReadableSingletonStateBase<>(FreezeServiceImpl.UPGRADE_FILE_HASH_KEY, backingStore::get)); + final ReadableUpdateFileStore store = new ReadableUpdateFileStoreImpl(readableStates); + assertEquals(Bytes.wrap("test hash"), store.updateFileHash().get()); + } + + @Test + void testEmptyPreparedUpdateFileHash() { + final AtomicReference backingStore = new AtomicReference<>(); // contains null + when(readableStates.getSingleton(FreezeServiceImpl.UPGRADE_FILE_HASH_KEY)) + .then(invocation -> + new ReadableSingletonStateBase<>(FreezeServiceImpl.UPGRADE_FILE_HASH_KEY, backingStore::get)); + final ReadableUpdateFileStore store = new ReadableUpdateFileStoreImpl(readableStates); + assertTrue(store.updateFileHash().isEmpty()); + } +} diff --git a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/WritableUpdateFileStoreTest.java b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/WritableUpdateFileStoreTest.java new file mode 100644 index 000000000000..16f1b9e40027 --- /dev/null +++ b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/WritableUpdateFileStoreTest.java @@ -0,0 +1,88 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.networkadmin.impl.test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mock.Strictness.LENIENT; +import static org.mockito.Mockito.when; + +import com.hedera.hapi.node.base.FileID; +import com.hedera.node.app.service.networkadmin.impl.FreezeServiceImpl; +import com.hedera.node.app.service.networkadmin.impl.WritableUpdateFileStore; +import com.hedera.node.app.spi.state.WritableSingletonStateBase; +import com.hedera.node.app.spi.state.WritableStates; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class WritableUpdateFileStoreTest { + private WritableUpdateFileStore subject; + + @Mock(strictness = LENIENT) + protected WritableStates writableStates; + + @Test + void constructorCreatesFreezeState() { + final WritableUpdateFileStore store = new WritableUpdateFileStore(writableStates); + assertNotNull(store); + } + + @Test + void testNullConstructorArgs() { + assertThrows(NullPointerException.class, () -> new WritableUpdateFileStore(null)); + } + + @Test + void testPreparedUpdateFileID() { + final AtomicReference backingStore = new AtomicReference<>(null); + when(writableStates.getSingleton(FreezeServiceImpl.UPGRADE_FILE_ID_KEY)) + .then(invocation -> new WritableSingletonStateBase<>( + FreezeServiceImpl.UPGRADE_FILE_ID_KEY, backingStore::get, backingStore::set)); + + final WritableUpdateFileStore store = new WritableUpdateFileStore(writableStates); + + // test with no file ID set + assertTrue(store.updateFileID().isEmpty()); + + // test with file ID set + store.updateFileID(FileID.newBuilder().fileNum(42L).build()); + assertEquals(42L, store.updateFileID().get().fileNum()); + } + + @Test + void testPreparedUpdateFileHash() { + final AtomicReference backingStore = new AtomicReference<>(null); + when(writableStates.getSingleton(FreezeServiceImpl.UPGRADE_FILE_HASH_KEY)) + .then(invocation -> new WritableSingletonStateBase<>( + FreezeServiceImpl.UPGRADE_FILE_HASH_KEY, backingStore::get, backingStore::set)); + final WritableUpdateFileStore store = new WritableUpdateFileStore(writableStates); + + // test with no file hash set + assertTrue(store.updateFileHash().isEmpty()); + + // test with file hash set + store.updateFileHash(Bytes.wrap("test hash")); + assertEquals(Bytes.wrap("test hash"), store.updateFileHash().get()); + } +} diff --git a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/FreezeHandlerTest.java b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/FreezeHandlerTest.java index 7e8ceaf79705..8a4eb818eb4a 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/FreezeHandlerTest.java +++ b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/FreezeHandlerTest.java @@ -40,7 +40,8 @@ import com.hedera.hapi.node.freeze.FreezeType; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.service.networkadmin.ReadableSpecialFileStore; +import com.hedera.node.app.service.networkadmin.ReadableUpdateFileStore; +import com.hedera.node.app.service.networkadmin.impl.WritableUpdateFileStore; import com.hedera.node.app.service.networkadmin.impl.handlers.FreezeHandler; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.spi.state.WritableFreezeStore; @@ -59,7 +60,7 @@ @ExtendWith(MockitoExtension.class) class FreezeHandlerTest { @Mock(strictness = LENIENT) - ReadableSpecialFileStore specialFileStore; + WritableUpdateFileStore specialFileStore; @Mock(strictness = LENIENT) private WritableFreezeStore freezeStore; @@ -92,10 +93,10 @@ void setUp() { given(account.key()).willReturn(key); given(preHandleContext.createStore(ReadableAccountStore.class)).willReturn(accountStore); - given(preHandleContext.createStore(ReadableSpecialFileStore.class)).willReturn(specialFileStore); + given(preHandleContext.createStore(ReadableUpdateFileStore.class)).willReturn(specialFileStore); given(handleContext.configuration()).willReturn(config); - given(handleContext.readableStore(ReadableSpecialFileStore.class)).willReturn(specialFileStore); + given(handleContext.writableStore(WritableUpdateFileStore.class)).willReturn(specialFileStore); given(handleContext.writableStore(WritableFreezeStore.class)).willReturn(freezeStore); } @@ -179,7 +180,7 @@ void rejectIfFileHashNotSetForCertainFreezeTypes() { FreezeType[] freezeTypes = {PREPARE_UPGRADE, FREEZE_UPGRADE, TELEMETRY_UPGRADE}; FileID fileId = FileID.newBuilder().fileNum(1234L).build(); - given(specialFileStore.get(1234L)).willReturn(Optional.of(new byte[0])); + given(specialFileStore.get(fileId)).willReturn(Optional.of(new byte[0])); for (FreezeType freezeType : freezeTypes) { TransactionID txnId = TransactionID.newBuilder() @@ -225,7 +226,7 @@ void happyPathFreezeUpgradeOrTelemetryUpgrade() { FreezeType[] freezeTypes = {FREEZE_UPGRADE, TELEMETRY_UPGRADE}; FileID fileId = FileID.newBuilder().fileNum(1234L).build(); - given(specialFileStore.get(1234L)).willReturn(Optional.of(new byte[0])); + given(specialFileStore.get(fileId)).willReturn(Optional.of(new byte[0])); for (FreezeType freezeType : freezeTypes) { TransactionID txnId = TransactionID.newBuilder() .accountID(nonAdminAccount) @@ -254,7 +255,7 @@ void happyPathPrepareUpgrade() { // start time not required FileID fileId = FileID.newBuilder().fileNum(1234L).build(); - given(specialFileStore.get(1234L)).willReturn(Optional.of(new byte[0])); + given(specialFileStore.get(fileId)).willReturn(Optional.of(new byte[0])); TransactionID txnId = TransactionID.newBuilder() .accountID(nonAdminAccount) .transactionValidStart(Timestamp.newBuilder().seconds(1000).build()) diff --git a/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/ReadableSpecialFileStore.java b/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/ReadableSpecialFileStore.java deleted file mode 100644 index e45ddc0bb244..000000000000 --- a/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/ReadableSpecialFileStore.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.service.networkadmin; - -import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.Optional; - -/** - * Provides read-only methods for interacting with the underlying data storage mechanisms for - * working with special files used in freeze transactions. - *
- * @deprecated This is a temporary interface. It will be replaced by a new interface in FileService. - */ -@Deprecated -public interface ReadableSpecialFileStore { - - /** - * Gets the freeze file with the given ID. If there is no file with given ID - * returns {@link Optional#empty()}. - * - * @param fileId given id for the file - * @return the file with the given id - */ - @NonNull - Optional get(long fileId); -} diff --git a/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/ReadableUpdateFileStore.java b/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/ReadableUpdateFileStore.java new file mode 100644 index 000000000000..a37534fac62e --- /dev/null +++ b/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/ReadableUpdateFileStore.java @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.networkadmin; + +import com.hedera.hapi.node.base.FileID; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Optional; + +/** + * Provides read-only methods for interacting with the underlying data storage mechanisms for + * working with update files used in freeze transactions. + *
+ */ +// This is a temporary location for this interface. It will be replaced by a new interface in FileService. +// @todo('Issue #6856') +public interface ReadableUpdateFileStore { + + /** + * Gets the freeze file with the given FileID. If there is no file with given FileID then + * returns {@link Optional#empty()}. + * + * @param fileId given id for the file + * @return the file with the given id + */ + @NonNull + Optional get(FileID fileId); + + /** + * Get the file ID of the prepared update file. If no prepared update file has been set + * (i.e. if the network is not in the process of an upgrade), this method will return {@link Optional#empty()}. + * @return the file ID of the prepared update file, or {@link Optional#empty()} if no prepared update file has been set + */ + @NonNull + Optional updateFileID(); + + /** + * Get the hash of the prepared update file. If no prepared update file has been set + * (i.e. if the network is not in the process of an upgrade), this method will return {@link Optional#empty()}. + * @return the hash of the prepared update file, or {@link Optional#empty()} if no prepared update file has been set + */ + @NonNull + Optional updateFileHash(); +} From 63508f4a4111def993c23689aaa94b505cfce364 Mon Sep 17 00:00:00 2001 From: Hendrik Ebbers Date: Wed, 14 Jun 2023 11:16:45 +0200 Subject: [PATCH 26/70] Time interface moved to swirlds-base (#7043) Signed-off-by: Hendrik Ebbers --- .../src/main/java/com/swirlds/base}/time/Time.java | 4 ++-- platform-sdk/swirlds-base/src/main/java/module-info.java | 1 + .../java/com/swirlds/common/metrics/extensions/BusyTime.java | 2 +- .../com/swirlds/common/metrics/extensions/CountPerSecond.java | 2 +- .../common/metrics/platform/DefaultRunningAverageMetric.java | 2 +- .../common/metrics/platform/DefaultSpeedometerMetric.java | 2 +- .../com/swirlds/common/metrics/platform/SnapshotService.java | 2 +- .../com/swirlds/common/statistics/StatsRunningAverage.java | 2 +- .../java/com/swirlds/common/statistics/StatsSpeedometer.java | 2 +- .../com/swirlds/common/statistics/internal/StatsBuffer.java | 2 +- .../framework/config/QueueThreadMetricsConfiguration.java | 2 +- .../main/java/com/swirlds/common/time/IntegerEpochTime.java | 1 + .../src/main/java/com/swirlds/common/time/OSTime.java | 1 + .../java/com/swirlds/common/utility/throttle/RateLimiter.java | 2 +- .../swirlds/common/metrics/platform/SnapshotServiceTest.java | 2 +- .../java/com/swirlds/common/test/fixtures/FakeTime.java | 2 +- .../src/main/java/com/swirlds/platform/SwirldsPlatform.java | 2 +- .../com/swirlds/platform/components/EventTaskDispatcher.java | 2 +- .../swirlds/platform/event/creation/ChatterEventCreator.java | 2 +- .../event/preconsensus/PreconsensusEventFileManager.java | 2 +- .../event/preconsensus/PreconsensusEventReplayWorkflow.java | 2 +- .../main/java/com/swirlds/platform/gossip/GossipFactory.java | 2 +- .../com/swirlds/platform/gossip/chatter/ChatterGossip.java | 2 +- .../swirlds/platform/gossip/chatter/protocol/ChatterCore.java | 2 +- .../chatter/protocol/heartbeat/HeartbeatSendReceive.java | 2 +- .../gossip/chatter/protocol/heartbeat/HeartbeatSender.java | 2 +- .../protocol/processing/ProcessingTimeSendReceive.java | 2 +- .../java/com/swirlds/platform/gossip/sync/SyncGossip.java | 2 +- .../swirlds/platform/gossip/sync/protocol/SyncProtocol.java | 2 +- .../com/swirlds/platform/heartbeats/HeartbeatProtocol.java | 2 +- .../java/com/swirlds/platform/intake/IntakeCycleStats.java | 2 +- .../swirlds/platform/metrics/ConsensusHandlingMetrics.java | 2 +- .../java/com/swirlds/platform/metrics/EventIntakeMetrics.java | 2 +- .../com/swirlds/platform/state/iss/ConsensusHashManager.java | 2 +- .../main/java/com/swirlds/platform/state/iss/IssHandler.java | 2 +- .../swirlds/platform/state/signed/SignedStateFileManager.java | 2 +- .../com/swirlds/platform/state/signed/SignedStateHistory.java | 2 +- .../swirlds/platform/state/signed/SignedStateSentinel.java | 2 +- .../java/com/swirlds/platform/stats/cycle/CycleTracker.java | 2 +- .../main/java/com/swirlds/platform/uptime/UptimeTracker.java | 2 +- .../java/com/swirlds/platform/test/consensus/TestIntake.java | 2 +- .../platform/test/simulated/SimpleSimulatedGossip.java | 2 +- .../platform/test/simulated/SimulatedEventCreationNode.java | 2 +- .../platform/test/chatter/network/DelayableIntakeQueue.java | 2 +- .../platform/test/chatter/network/TimedEventCreator.java | 2 +- .../test/chatter/network/framework/ChatterInstance.java | 2 +- 46 files changed, 47 insertions(+), 44 deletions(-) rename platform-sdk/{swirlds-common/src/main/java/com/swirlds/common => swirlds-base/src/main/java/com/swirlds/base}/time/Time.java (94%) diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/Time.java b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/time/Time.java similarity index 94% rename from platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/Time.java rename to platform-sdk/swirlds-base/src/main/java/com/swirlds/base/time/Time.java index 56f83076925b..ad7b8a24b2a0 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/Time.java +++ b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/time/Time.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016-2023 Hedera Hashgraph, LLC + * Copyright (C) 2023 Hedera Hashgraph, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.swirlds.common.time; +package com.swirlds.base.time; import java.time.Instant; diff --git a/platform-sdk/swirlds-base/src/main/java/module-info.java b/platform-sdk/swirlds-base/src/main/java/module-info.java index df87ec5d95a1..0b13377980f1 100644 --- a/platform-sdk/swirlds-base/src/main/java/module-info.java +++ b/platform-sdk/swirlds-base/src/main/java/module-info.java @@ -2,6 +2,7 @@ exports com.swirlds.base; exports com.swirlds.base.function; exports com.swirlds.base.state; + exports com.swirlds.base.time; requires static com.github.spotbugs.annotations; } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/BusyTime.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/BusyTime.java index c3d9703d0bb2..6d7f57d70edd 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/BusyTime.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/BusyTime.java @@ -16,12 +16,12 @@ package com.swirlds.common.metrics.extensions; +import com.swirlds.base.time.Time; import com.swirlds.common.metrics.FloatFormats; import com.swirlds.common.metrics.FunctionGauge; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.time.IntegerEpochTime; import com.swirlds.common.time.OSTime; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.ByteUtils; import com.swirlds.common.utility.StackTrace; import com.swirlds.common.utility.throttle.RateLimiter; diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/CountPerSecond.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/CountPerSecond.java index ff529b9bafa9..d4bbdde13d8b 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/CountPerSecond.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/CountPerSecond.java @@ -20,13 +20,13 @@ import static com.swirlds.common.metrics.FloatFormats.FORMAT_10_2; import static com.swirlds.common.utility.CommonUtils.throwArgBlank; +import com.swirlds.base.time.Time; import com.swirlds.common.metrics.IntegerPairAccumulator; import com.swirlds.common.metrics.LongAccumulator; import com.swirlds.common.metrics.Metric; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.time.IntegerEpochTime; import com.swirlds.common.time.OSTime; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.Units; /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultRunningAverageMetric.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultRunningAverageMetric.java index 0cc458a6f3bb..9c515dd16c0b 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultRunningAverageMetric.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultRunningAverageMetric.java @@ -16,11 +16,11 @@ package com.swirlds.common.metrics.platform; +import com.swirlds.base.time.Time; import com.swirlds.common.metrics.RunningAverageMetric; import com.swirlds.common.statistics.StatsBuffered; import com.swirlds.common.statistics.StatsRunningAverage; import com.swirlds.common.time.OSTime; -import com.swirlds.common.time.Time; import org.apache.commons.lang3.builder.ToStringBuilder; /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultSpeedometerMetric.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultSpeedometerMetric.java index ef16478178ae..2635ebb4eb1f 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultSpeedometerMetric.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultSpeedometerMetric.java @@ -18,11 +18,11 @@ import static org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE; +import com.swirlds.base.time.Time; import com.swirlds.common.metrics.SpeedometerMetric; import com.swirlds.common.statistics.StatsBuffered; import com.swirlds.common.statistics.StatsSpeedometer; import com.swirlds.common.time.OSTime; -import com.swirlds.common.time.Time; import org.apache.commons.lang3.builder.ToStringBuilder; /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/SnapshotService.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/SnapshotService.java index 33a85e6845f6..5a1b9212b2a3 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/SnapshotService.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/SnapshotService.java @@ -20,10 +20,10 @@ import static com.swirlds.common.utility.CommonUtils.throwArgNull; import com.swirlds.base.state.Startable; +import com.swirlds.base.time.Time; import com.swirlds.common.metrics.Metric; import com.swirlds.common.metrics.config.MetricsConfig; import com.swirlds.common.time.OSTime; -import com.swirlds.common.time.Time; import java.time.Duration; import java.util.List; import java.util.Map; diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsRunningAverage.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsRunningAverage.java index be54c68533d7..bd82eb2f6044 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsRunningAverage.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsRunningAverage.java @@ -16,9 +16,9 @@ package com.swirlds.common.statistics; +import com.swirlds.base.time.Time; import com.swirlds.common.statistics.internal.StatsBuffer; import com.swirlds.common.time.OSTime; -import com.swirlds.common.time.Time; import com.swirlds.logging.LogMarker; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsSpeedometer.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsSpeedometer.java index 0a7516fc32de..a357eb85001f 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsSpeedometer.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsSpeedometer.java @@ -18,9 +18,9 @@ import static com.swirlds.common.utility.CommonUtils.throwArgNull; +import com.swirlds.base.time.Time; import com.swirlds.common.statistics.internal.StatsBuffer; import com.swirlds.common.time.OSTime; -import com.swirlds.common.time.Time; /** * This class measures how many times per second the cycle() method is called. It is recalculated every diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/internal/StatsBuffer.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/internal/StatsBuffer.java index 54b834b4c8ec..bf8d39c75a24 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/internal/StatsBuffer.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/internal/StatsBuffer.java @@ -18,8 +18,8 @@ import static com.swirlds.common.utility.Units.NANOSECONDS_TO_SECONDS; +import com.swirlds.base.time.Time; import com.swirlds.common.time.OSTime; -import com.swirlds.common.time.Time; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.IntBinaryOperator; import java.util.function.IntUnaryOperator; diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/config/QueueThreadMetricsConfiguration.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/config/QueueThreadMetricsConfiguration.java index ea88d5f32557..f89bcaf8a365 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/config/QueueThreadMetricsConfiguration.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/config/QueueThreadMetricsConfiguration.java @@ -18,9 +18,9 @@ import static com.swirlds.base.ArgumentUtils.throwArgNull; +import com.swirlds.base.time.Time; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.time.OSTime; -import com.swirlds.common.time.Time; import edu.umd.cs.findbugs.annotations.NonNull; /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/IntegerEpochTime.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/IntegerEpochTime.java index 3a5e81248169..4f2b128d8470 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/IntegerEpochTime.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/IntegerEpochTime.java @@ -16,6 +16,7 @@ package com.swirlds.common.time; +import com.swirlds.base.time.Time; import com.swirlds.common.utility.Units; /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/OSTime.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/OSTime.java index 87b86bb3a540..e322b6adb5c1 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/OSTime.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/OSTime.java @@ -16,6 +16,7 @@ package com.swirlds.common.time; +import com.swirlds.base.time.Time; import java.time.Instant; /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/utility/throttle/RateLimiter.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/utility/throttle/RateLimiter.java index 5998da296984..97ea6640effe 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/utility/throttle/RateLimiter.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/utility/throttle/RateLimiter.java @@ -19,7 +19,7 @@ import static com.swirlds.common.utility.CompareTo.isGreaterThanOrEqualTo; import static com.swirlds.common.utility.Units.SECONDS_TO_NANOSECONDS; -import com.swirlds.common.time.Time; +import com.swirlds.base.time.Time; import java.time.Duration; import java.time.Instant; diff --git a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/SnapshotServiceTest.java b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/SnapshotServiceTest.java index 2905107c325a..d7c418a5af47 100644 --- a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/SnapshotServiceTest.java +++ b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/SnapshotServiceTest.java @@ -31,10 +31,10 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import com.swirlds.base.time.Time; import com.swirlds.common.metrics.config.MetricsConfig; import com.swirlds.common.system.NodeId; import com.swirlds.common.test.fixtures.FakeTime; -import com.swirlds.common.time.Time; import com.swirlds.config.api.Configuration; import com.swirlds.test.framework.config.TestConfigBuilder; import java.time.Duration; diff --git a/platform-sdk/swirlds-common/src/testFixtures/java/com/swirlds/common/test/fixtures/FakeTime.java b/platform-sdk/swirlds-common/src/testFixtures/java/com/swirlds/common/test/fixtures/FakeTime.java index fde196b62c22..1d27a4ea4a9e 100644 --- a/platform-sdk/swirlds-common/src/testFixtures/java/com/swirlds/common/test/fixtures/FakeTime.java +++ b/platform-sdk/swirlds-common/src/testFixtures/java/com/swirlds/common/test/fixtures/FakeTime.java @@ -18,7 +18,7 @@ import static com.swirlds.common.utility.Units.NANOSECONDS_TO_MILLISECONDS; -import com.swirlds.common.time.Time; +import com.swirlds.base.time.Time; import java.time.Duration; import java.time.Instant; import java.util.Calendar; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java index 5a58fc81254d..103f61baff9f 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java @@ -27,6 +27,7 @@ import static com.swirlds.platform.state.signed.ReservedSignedState.createNullReservation; import com.swirlds.base.state.Startable; +import com.swirlds.base.time.Time; import com.swirlds.common.config.BasicConfig; import com.swirlds.common.config.ConsensusConfig; import com.swirlds.common.config.StateConfig; @@ -63,7 +64,6 @@ import com.swirlds.common.threading.framework.config.QueueThreadMetricsConfiguration; import com.swirlds.common.threading.manager.ThreadManager; import com.swirlds.common.time.OSTime; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.AutoCloseableWrapper; import com.swirlds.common.utility.Clearable; import com.swirlds.common.utility.LoggingClearables; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventTaskDispatcher.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventTaskDispatcher.java index 5a60f43fec02..328c6b7038d7 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventTaskDispatcher.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventTaskDispatcher.java @@ -16,7 +16,7 @@ package com.swirlds.platform.components; -import com.swirlds.common.time.Time; +import com.swirlds.base.time.Time; import com.swirlds.logging.LogMarker; import com.swirlds.platform.event.CreateEventTask; import com.swirlds.platform.event.EventIntakeTask; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/creation/ChatterEventCreator.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/creation/ChatterEventCreator.java index fc533b1a57ef..8c01fccdabe8 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/creation/ChatterEventCreator.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/creation/ChatterEventCreator.java @@ -18,6 +18,7 @@ import static com.swirlds.logging.LogMarker.CREATE_EVENT; +import com.swirlds.base.time.Time; import com.swirlds.common.crypto.Cryptography; import com.swirlds.common.stream.Signer; import com.swirlds.common.system.EventCreationRuleResponse; @@ -26,7 +27,6 @@ import com.swirlds.common.system.events.BaseEvent; import com.swirlds.common.system.events.BaseEventHashedData; import com.swirlds.common.system.events.BaseEventUnhashedData; -import com.swirlds.common.time.Time; import com.swirlds.platform.components.EventCreationRules; import com.swirlds.platform.components.EventMapper; import com.swirlds.platform.components.transaction.TransactionSupplier; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/PreconsensusEventFileManager.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/PreconsensusEventFileManager.java index 7b0c595c6c37..acfcacb45ab6 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/PreconsensusEventFileManager.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/PreconsensusEventFileManager.java @@ -19,11 +19,11 @@ import static com.swirlds.logging.LogMarker.EXCEPTION; import static com.swirlds.logging.LogMarker.STARTUP; +import com.swirlds.base.time.Time; import com.swirlds.common.config.StateConfig; import com.swirlds.common.context.PlatformContext; import com.swirlds.common.io.utility.RecycleBin; import com.swirlds.common.system.NodeId; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.RandomAccessDeque; import com.swirlds.common.utility.Units; import com.swirlds.common.utility.ValueReference; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/PreconsensusEventReplayWorkflow.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/PreconsensusEventReplayWorkflow.java index a02d667abbd5..403928b4f1f2 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/PreconsensusEventReplayWorkflow.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/PreconsensusEventReplayWorkflow.java @@ -24,13 +24,13 @@ import static com.swirlds.logging.LogMarker.EXCEPTION; import static com.swirlds.logging.LogMarker.STARTUP; +import com.swirlds.base.time.Time; import com.swirlds.common.context.PlatformContext; import com.swirlds.common.formatting.UnitFormatter; import com.swirlds.common.io.IOIterator; import com.swirlds.common.system.platformstatus.PlatformStatus; import com.swirlds.common.threading.framework.QueueThread; import com.swirlds.common.threading.manager.ThreadManager; -import com.swirlds.common.time.Time; import com.swirlds.platform.components.EventTaskDispatcher; import com.swirlds.platform.components.state.StateManagementComponent; import com.swirlds.platform.event.EventIntakeTask; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/GossipFactory.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/GossipFactory.java index 8419835092c7..1a3cc5309003 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/GossipFactory.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/GossipFactory.java @@ -18,6 +18,7 @@ import static com.swirlds.logging.LogMarker.STARTUP; +import com.swirlds.base.time.Time; import com.swirlds.common.context.PlatformContext; import com.swirlds.common.notification.NotificationEngine; import com.swirlds.common.system.NodeId; @@ -26,7 +27,6 @@ import com.swirlds.common.threading.framework.QueueThread; import com.swirlds.common.threading.interrupt.InterruptableConsumer; import com.swirlds.common.threading.manager.ThreadManager; -import com.swirlds.common.time.Time; import com.swirlds.platform.Consensus; import com.swirlds.platform.Crypto; import com.swirlds.platform.FreezeManager; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/ChatterGossip.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/ChatterGossip.java index 35ca9e48cef4..e9630fc349db 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/ChatterGossip.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/ChatterGossip.java @@ -21,6 +21,7 @@ import static com.swirlds.platform.SwirldsPlatform.PLATFORM_THREAD_POOL_NAME; import com.swirlds.base.state.LifecyclePhase; +import com.swirlds.base.time.Time; import com.swirlds.common.config.BasicConfig; import com.swirlds.common.context.PlatformContext; import com.swirlds.common.crypto.CryptographyHolder; @@ -38,7 +39,6 @@ import com.swirlds.common.threading.pool.ParallelExecutor; import com.swirlds.common.threading.utility.SequenceCycle; import com.swirlds.common.time.OSTime; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.Clearable; import com.swirlds.common.utility.LoggingClearables; import com.swirlds.common.utility.PlatformVersion; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/ChatterCore.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/ChatterCore.java index a1261c65843c..e2b6fa0fbf3d 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/ChatterCore.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/ChatterCore.java @@ -16,12 +16,12 @@ package com.swirlds.platform.gossip.chatter.protocol; +import com.swirlds.base.time.Time; import com.swirlds.common.metrics.DurationGauge; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.metrics.extensions.CountPerSecond; import com.swirlds.common.sequence.Shiftable; import com.swirlds.common.system.NodeId; -import com.swirlds.common.time.Time; import com.swirlds.platform.gossip.chatter.config.ChatterConfig; import com.swirlds.platform.gossip.chatter.protocol.heartbeat.HeartbeatMessage; import com.swirlds.platform.gossip.chatter.protocol.heartbeat.HeartbeatSendReceive; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/heartbeat/HeartbeatSendReceive.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/heartbeat/HeartbeatSendReceive.java index 2b624f6a9bbb..af3d664ca22d 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/heartbeat/HeartbeatSendReceive.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/heartbeat/HeartbeatSendReceive.java @@ -16,9 +16,9 @@ package com.swirlds.platform.gossip.chatter.protocol.heartbeat; +import com.swirlds.base.time.Time; import com.swirlds.common.io.SelfSerializable; import com.swirlds.common.system.NodeId; -import com.swirlds.common.time.Time; import com.swirlds.platform.gossip.chatter.protocol.MessageHandler; import com.swirlds.platform.gossip.chatter.protocol.MessageProvider; import edu.umd.cs.findbugs.annotations.NonNull; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/heartbeat/HeartbeatSender.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/heartbeat/HeartbeatSender.java index 538023e37cb9..5e5c4e77a4c0 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/heartbeat/HeartbeatSender.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/heartbeat/HeartbeatSender.java @@ -16,9 +16,9 @@ package com.swirlds.platform.gossip.chatter.protocol.heartbeat; +import com.swirlds.base.time.Time; import com.swirlds.common.io.SelfSerializable; import com.swirlds.common.system.NodeId; -import com.swirlds.common.time.Time; import com.swirlds.platform.gossip.chatter.protocol.MessageHandler; import com.swirlds.platform.gossip.chatter.protocol.MessageProvider; import edu.umd.cs.findbugs.annotations.NonNull; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/processing/ProcessingTimeSendReceive.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/processing/ProcessingTimeSendReceive.java index a47b9118db3e..6ac749309d5c 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/processing/ProcessingTimeSendReceive.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/chatter/protocol/processing/ProcessingTimeSendReceive.java @@ -16,8 +16,8 @@ package com.swirlds.platform.gossip.chatter.protocol.processing; +import com.swirlds.base.time.Time; import com.swirlds.common.io.SelfSerializable; -import com.swirlds.common.time.Time; import com.swirlds.platform.gossip.chatter.protocol.MessageHandler; import com.swirlds.platform.gossip.chatter.protocol.MessageProvider; import java.time.Duration; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncGossip.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncGossip.java index 642d289f7d2e..126bfcf03ec6 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncGossip.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncGossip.java @@ -20,6 +20,7 @@ import static com.swirlds.platform.SwirldsPlatform.PLATFORM_THREAD_POOL_NAME; import com.swirlds.base.state.LifecyclePhase; +import com.swirlds.base.time.Time; import com.swirlds.common.config.BasicConfig; import com.swirlds.common.context.PlatformContext; import com.swirlds.common.merkle.synchronization.config.ReconnectConfig; @@ -34,7 +35,6 @@ import com.swirlds.common.threading.interrupt.InterruptableConsumer; import com.swirlds.common.threading.manager.ThreadManager; import com.swirlds.common.threading.pool.ParallelExecutor; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.Clearable; import com.swirlds.common.utility.LoggingClearables; import com.swirlds.common.utility.PlatformVersion; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/protocol/SyncProtocol.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/protocol/SyncProtocol.java index f14e037a643e..cb63634b39e8 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/protocol/SyncProtocol.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/protocol/SyncProtocol.java @@ -18,11 +18,11 @@ import static com.swirlds.common.utility.CompareTo.isGreaterThanOrEqualTo; +import com.swirlds.base.time.Time; import com.swirlds.common.system.NodeId; import com.swirlds.common.threading.SyncPermitProvider; import com.swirlds.common.threading.locks.locked.MaybeLocked; import com.swirlds.common.threading.pool.ParallelExecutionException; -import com.swirlds.common.time.Time; import com.swirlds.platform.Utilities; import com.swirlds.platform.components.CriticalQuorum; import com.swirlds.platform.gossip.FallenBehindManager; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/heartbeats/HeartbeatProtocol.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/heartbeats/HeartbeatProtocol.java index 90a5d44fbd82..2d0e0439d857 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/heartbeats/HeartbeatProtocol.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/heartbeats/HeartbeatProtocol.java @@ -18,8 +18,8 @@ import static com.swirlds.common.utility.CompareTo.isGreaterThanOrEqualTo; +import com.swirlds.base.time.Time; import com.swirlds.common.system.NodeId; -import com.swirlds.common.time.Time; import com.swirlds.platform.network.ByteConstants; import com.swirlds.platform.network.Connection; import com.swirlds.platform.network.NetworkMetrics; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/intake/IntakeCycleStats.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/intake/IntakeCycleStats.java index f0cf705b3d3a..6ba92f9ddf94 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/intake/IntakeCycleStats.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/intake/IntakeCycleStats.java @@ -16,8 +16,8 @@ package com.swirlds.platform.intake; +import com.swirlds.base.time.Time; import com.swirlds.common.metrics.Metrics; -import com.swirlds.common.time.Time; import com.swirlds.platform.stats.cycle.AccumulatedCycleMetrics; import com.swirlds.platform.stats.cycle.CycleDefinition; import com.swirlds.platform.stats.cycle.CycleTracker; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/metrics/ConsensusHandlingMetrics.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/metrics/ConsensusHandlingMetrics.java index d61c4d68d7df..14e33f26fe24 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/metrics/ConsensusHandlingMetrics.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/metrics/ConsensusHandlingMetrics.java @@ -19,9 +19,9 @@ import static com.swirlds.common.metrics.FloatFormats.FORMAT_8_1; import static com.swirlds.common.metrics.Metrics.INTERNAL_CATEGORY; +import com.swirlds.base.time.Time; import com.swirlds.common.metrics.LongGauge; import com.swirlds.common.metrics.Metrics; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.CommonUtils; import com.swirlds.platform.eventhandling.ConsensusRoundHandler; import com.swirlds.platform.internal.ConsensusRound; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/metrics/EventIntakeMetrics.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/metrics/EventIntakeMetrics.java index 3c918b48484d..a0007173b9ad 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/metrics/EventIntakeMetrics.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/metrics/EventIntakeMetrics.java @@ -25,11 +25,11 @@ import static com.swirlds.common.metrics.Metrics.PLATFORM_CATEGORY; import static com.swirlds.common.utility.Units.NANOSECONDS_TO_SECONDS; +import com.swirlds.base.time.Time; import com.swirlds.common.metrics.Counter; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.metrics.RunningAverageMetric; import com.swirlds.common.metrics.SpeedometerMetric; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.CommonUtils; import com.swirlds.platform.internal.EventImpl; import com.swirlds.platform.observers.StaleEventObserver; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/ConsensusHashManager.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/ConsensusHashManager.java index 5efbcff8f87b..77531a6da964 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/ConsensusHashManager.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/ConsensusHashManager.java @@ -19,6 +19,7 @@ import static com.swirlds.logging.LogMarker.EXCEPTION; import static com.swirlds.logging.LogMarker.STATE_HASH; +import com.swirlds.base.time.Time; import com.swirlds.common.config.ConsensusConfig; import com.swirlds.common.config.StateConfig; import com.swirlds.common.crypto.Hash; @@ -26,7 +27,6 @@ import com.swirlds.common.sequence.map.SequenceMap; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.address.AddressBook; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.throttle.RateLimiter; import com.swirlds.logging.payloads.IssPayload; import com.swirlds.platform.dispatch.DispatchBuilder; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/IssHandler.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/IssHandler.java index c59309c4c5ea..acd81f6a333a 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/IssHandler.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/IssHandler.java @@ -18,11 +18,11 @@ import static com.swirlds.base.ArgumentUtils.throwArgNull; +import com.swirlds.base.time.Time; import com.swirlds.common.config.StateConfig; import com.swirlds.common.crypto.Hash; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.state.notifications.IssNotification; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.throttle.RateLimiter; import com.swirlds.platform.components.common.output.FatalErrorConsumer; import com.swirlds.platform.components.state.output.IssConsumer; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileManager.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileManager.java index 69cc3b63865a..d86cde0fe2f0 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileManager.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileManager.java @@ -27,6 +27,7 @@ import static com.swirlds.platform.state.signed.SignedStateFileWriter.writeSignedStateToDisk; import com.swirlds.base.state.Startable; +import com.swirlds.base.time.Time; import com.swirlds.common.config.BasicConfig; import com.swirlds.common.config.StateConfig; import com.swirlds.common.context.PlatformContext; @@ -35,7 +36,6 @@ import com.swirlds.common.threading.framework.config.QueueThreadConfiguration; import com.swirlds.common.threading.interrupt.Uninterruptable; import com.swirlds.common.threading.manager.ThreadManager; -import com.swirlds.common.time.Time; import com.swirlds.platform.components.state.output.MinimumGenerationNonAncientConsumer; import com.swirlds.platform.components.state.output.StateToDiskAttemptConsumer; import edu.umd.cs.findbugs.annotations.NonNull; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateHistory.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateHistory.java index 52bb07a5c552..6d713020c5c2 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateHistory.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateHistory.java @@ -18,8 +18,8 @@ import static com.swirlds.platform.state.signed.SignedStateHistory.SignedStateAction.RESERVE; +import com.swirlds.base.time.Time; import com.swirlds.common.formatting.TextTable; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.StackTrace; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateSentinel.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateSentinel.java index d7ce46ddfe60..323d32338715 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateSentinel.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateSentinel.java @@ -20,13 +20,13 @@ import com.swirlds.base.state.Startable; import com.swirlds.base.state.Stoppable; +import com.swirlds.base.time.Time; import com.swirlds.common.config.StateConfig; import com.swirlds.common.context.PlatformContext; import com.swirlds.common.threading.framework.StoppableThread; import com.swirlds.common.threading.framework.config.StoppableThreadConfiguration; import com.swirlds.common.threading.manager.ThreadManager; import com.swirlds.common.time.OSTime; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.CompareTo; import com.swirlds.common.utility.RuntimeObjectRecord; import com.swirlds.common.utility.RuntimeObjectRegistry; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/stats/cycle/CycleTracker.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/stats/cycle/CycleTracker.java index d14b5300b5c3..dfba905ed389 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/stats/cycle/CycleTracker.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/stats/cycle/CycleTracker.java @@ -16,7 +16,7 @@ package com.swirlds.platform.stats.cycle; -import com.swirlds.common.time.Time; +import com.swirlds.base.time.Time; import java.util.Arrays; /** diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/uptime/UptimeTracker.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/uptime/UptimeTracker.java index ecc8ea3d25fe..9b57b42592e5 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/uptime/UptimeTracker.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/uptime/UptimeTracker.java @@ -22,13 +22,13 @@ import static com.swirlds.common.units.TimeUnit.UNIT_NANOSECONDS; import static com.swirlds.common.units.TimeUnit.UNIT_SECONDS; +import com.swirlds.base.time.Time; import com.swirlds.common.context.PlatformContext; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.Round; import com.swirlds.common.system.address.Address; import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.system.events.ConsensusEvent; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.CompareTo; import com.swirlds.platform.internal.ConsensusRound; import com.swirlds.platform.internal.EventImpl; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/TestIntake.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/TestIntake.java index 7623fb13cb51..b672f8fb3418 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/TestIntake.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/TestIntake.java @@ -18,12 +18,12 @@ import static org.mockito.Mockito.mock; +import com.swirlds.base.time.Time; import com.swirlds.common.config.ConsensusConfig; import com.swirlds.common.config.singleton.ConfigurationHolder; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.time.OSTime; -import com.swirlds.common.time.Time; import com.swirlds.platform.Consensus; import com.swirlds.platform.ConsensusImpl; import com.swirlds.platform.components.EventIntake; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/simulated/SimpleSimulatedGossip.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/simulated/SimpleSimulatedGossip.java index 9179c80441a3..48fe77936f00 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/simulated/SimpleSimulatedGossip.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/simulated/SimpleSimulatedGossip.java @@ -16,9 +16,9 @@ package com.swirlds.platform.test.simulated; +import com.swirlds.base.time.Time; import com.swirlds.common.io.SelfSerializable; import com.swirlds.common.system.NodeId; -import com.swirlds.common.time.Time; import com.swirlds.platform.test.simulated.config.NetworkConfig; import com.swirlds.platform.test.simulated.config.NodeConfig; import edu.umd.cs.findbugs.annotations.NonNull; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/simulated/SimulatedEventCreationNode.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/simulated/SimulatedEventCreationNode.java index b00cdfc96de2..f9973635a009 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/simulated/SimulatedEventCreationNode.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/simulated/SimulatedEventCreationNode.java @@ -18,6 +18,7 @@ import static com.swirlds.common.threading.manager.AdHocThreadManager.getStaticThreadManager; +import com.swirlds.base.time.Time; import com.swirlds.common.crypto.Cryptography; import com.swirlds.common.crypto.Hash; import com.swirlds.common.crypto.SerializableHashable; @@ -28,7 +29,6 @@ import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.system.transaction.internal.ConsensusTransactionImpl; import com.swirlds.common.test.RandomUtils; -import com.swirlds.common.time.Time; import com.swirlds.common.utility.CommonUtils; import com.swirlds.platform.components.CriticalQuorum; import com.swirlds.platform.components.CriticalQuorumImpl; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/DelayableIntakeQueue.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/DelayableIntakeQueue.java index 5b55111d0164..e92d33729686 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/DelayableIntakeQueue.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/DelayableIntakeQueue.java @@ -16,8 +16,8 @@ package com.swirlds.platform.test.chatter.network; +import com.swirlds.base.time.Time; import com.swirlds.common.system.NodeId; -import com.swirlds.common.time.Time; import com.swirlds.platform.gossip.chatter.protocol.ChatterCore; import com.swirlds.platform.gossip.chatter.protocol.messages.ChatterEvent; import com.swirlds.platform.test.chatter.network.framework.AbstractSimulatedEventPipeline; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/TimedEventCreator.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/TimedEventCreator.java index afb9b1a067a1..bc52219d41e4 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/TimedEventCreator.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/TimedEventCreator.java @@ -16,7 +16,7 @@ package com.swirlds.platform.test.chatter.network; -import com.swirlds.common.time.Time; +import com.swirlds.base.time.Time; import com.swirlds.platform.test.chatter.network.framework.SimulatedChatterEvent; import com.swirlds.platform.test.chatter.network.framework.SimulatedEventCreator; import com.swirlds.platform.test.simulated.config.NodeConfig; diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/framework/ChatterInstance.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/framework/ChatterInstance.java index c830554fa3f2..325bfebdbf0a 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/framework/ChatterInstance.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/network/framework/ChatterInstance.java @@ -16,10 +16,10 @@ package com.swirlds.platform.test.chatter.network.framework; +import com.swirlds.base.time.Time; import com.swirlds.common.io.SelfSerializable; import com.swirlds.common.metrics.noop.NoOpMetrics; import com.swirlds.common.system.NodeId; -import com.swirlds.common.time.Time; import com.swirlds.platform.gossip.chatter.config.ChatterConfig; import com.swirlds.platform.gossip.chatter.protocol.ChatterCore; import com.swirlds.platform.gossip.chatter.protocol.PeerMessageException; From abdd1d06316d11c1b015c4204893200dc5d2acb0 Mon Sep 17 00:00:00 2001 From: Hendrik Ebbers Date: Wed, 14 Jun 2023 11:16:59 +0200 Subject: [PATCH 27/70] update of Lock api (#7077) Signed-off-by: Hendrik Ebbers --- .../threading/locks/AutoClosableLock.java | 7 +++++- .../locks/AutoClosableResourceLock.java | 7 +++++- .../common/threading/locks/IndexLock.java | 11 +++++++--- .../swirlds/common/threading/locks/Locks.java | 6 ++++- .../locks/internal/AcquiredOnTry.java | 5 +++-- .../locks/internal/AcquiredResource.java | 9 +++++--- .../threading/locks/internal/AutoLock.java | 8 ++++++- .../locks/internal/AutoNoOpLock.java | 9 +++++++- .../locks/internal/DefaultIndexLock.java | 13 +++++++---- .../locks/internal/NotAcquiredResource.java | 6 +++-- .../locks/internal/ResourceLock.java | 13 ++++++++--- .../locks/internal/package-info.java | 22 +++++++++++++++++++ .../locks/locked/LockedResource.java | 5 ++++- 13 files changed, 98 insertions(+), 23 deletions(-) create mode 100644 platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/package-info.java diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/AutoClosableLock.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/AutoClosableLock.java index 403fffeb53f8..c86d5be55657 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/AutoClosableLock.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/AutoClosableLock.java @@ -18,6 +18,7 @@ import com.swirlds.common.threading.locks.locked.Locked; import com.swirlds.common.threading.locks.locked.MaybeLocked; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; @@ -32,11 +33,13 @@ public interface AutoClosableLock { * * @return an instance used to release the lock */ + @NonNull Locked lock(); /** * Same as {@link #lock()}, but can unblock if interrupted */ + @NonNull Locked lockInterruptibly() throws InterruptedException; /** @@ -50,7 +53,8 @@ public interface AutoClosableLock { /** * {@link #tryLock()} but with a timeout */ - MaybeLocked tryLock(long time, TimeUnit unit) throws InterruptedException; + @NonNull + MaybeLocked tryLock(long time, @NonNull TimeUnit unit) throws InterruptedException; /** * Returns a new {@link Condition} instance that is bound to this @@ -63,5 +67,6 @@ public interface AutoClosableLock { * * @return A new {@link Condition} instance for this {@code Lock} instance */ + @NonNull Condition newCondition(); } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/AutoClosableResourceLock.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/AutoClosableResourceLock.java index fef297d8c275..b1f4f903716b 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/AutoClosableResourceLock.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/AutoClosableResourceLock.java @@ -18,6 +18,7 @@ import com.swirlds.common.threading.locks.locked.LockedResource; import com.swirlds.common.threading.locks.locked.MaybeLockedResource; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.concurrent.TimeUnit; /** @@ -32,23 +33,27 @@ public interface AutoClosableResourceLock extends AutoClosableLock { * {@inheritDoc} */ @Override + @NonNull LockedResource lock(); /** * {@inheritDoc} */ @Override + @NonNull LockedResource lockInterruptibly() throws InterruptedException; /** * {@inheritDoc} */ @Override + @NonNull MaybeLockedResource tryLock(); /** * {@inheritDoc} */ @Override - MaybeLockedResource tryLock(final long time, final TimeUnit unit) throws InterruptedException; + @NonNull + MaybeLockedResource tryLock(final long time, @NonNull final TimeUnit unit) throws InterruptedException; } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/IndexLock.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/IndexLock.java index a4227cccfb07..762c3570f0ac 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/IndexLock.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/IndexLock.java @@ -17,6 +17,8 @@ package com.swirlds.common.threading.locks; import com.swirlds.common.threading.locks.locked.Locked; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; /** *

@@ -50,7 +52,7 @@ public interface IndexLock { * @param object * the object to lock, can be null */ - void lock(final Object object); + void lock(@Nullable final Object object); /** * Unlock on a given index value. @@ -67,7 +69,7 @@ public interface IndexLock { * @param object * the object to unlock, can be null */ - void unlock(final Object object); + void unlock(@Nullable final Object object); /** * Acquire a lock and return an autocloseable object that will release the lock. @@ -76,6 +78,7 @@ public interface IndexLock { * the index to lock * @return an object that will unlock the lock once it is closed */ + @NonNull Locked autoLock(final long index); /** @@ -86,7 +89,8 @@ public interface IndexLock { * the object to lock, can be null * @return an object that will unlock the lock once it is closed */ - Locked autoLock(final Object object); + @NonNull + Locked autoLock(@Nullable final Object object); /** * Lock every index. This is expensive, use with caution. @@ -104,5 +108,6 @@ public interface IndexLock { * * @return an object that will unlock the lock once it is closed */ + @NonNull Locked autoFullLock(); } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/Locks.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/Locks.java index cdd3bcc57496..d393a1c16ee6 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/Locks.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/Locks.java @@ -19,6 +19,7 @@ import com.swirlds.common.threading.locks.internal.AutoLock; import com.swirlds.common.threading.locks.internal.DefaultIndexLock; import com.swirlds.common.threading.locks.internal.ResourceLock; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.concurrent.locks.ReentrantLock; /** @@ -34,6 +35,7 @@ public interface Locks { * * indexes at the cost of additional memory overhead. * @return a new lock for index values. */ + @NonNull static IndexLock createIndexLock(final int parallelism) { return new DefaultIndexLock(parallelism); } @@ -43,6 +45,7 @@ static IndexLock createIndexLock(final int parallelism) { * * @return the lock */ + @NonNull static AutoClosableLock createAutoLock() { return new AutoLock(); } @@ -57,7 +60,8 @@ static AutoClosableLock createAutoLock() { * type of the resource * @return the lock */ - static AutoClosableResourceLock createResourceLock(final T resource) { + @NonNull + static AutoClosableResourceLock createResourceLock(@NonNull final T resource) { return new ResourceLock<>(new ReentrantLock(), resource); } } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AcquiredOnTry.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AcquiredOnTry.java index e3a0e10da568..c88521be24f8 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AcquiredOnTry.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AcquiredOnTry.java @@ -18,14 +18,15 @@ import com.swirlds.common.AutoCloseableNonThrowing; import com.swirlds.common.threading.locks.locked.MaybeLocked; +import edu.umd.cs.findbugs.annotations.NonNull; /** * Returned when a lock has been acquired on a try */ -public class AcquiredOnTry implements MaybeLocked { +public final class AcquiredOnTry implements MaybeLocked { private final AutoCloseableNonThrowing close; - public AcquiredOnTry(final AutoCloseableNonThrowing close) { + public AcquiredOnTry(@NonNull final AutoCloseableNonThrowing close) { this.close = close; } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AcquiredResource.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AcquiredResource.java index 838334fceabe..420298ee8918 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AcquiredResource.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AcquiredResource.java @@ -18,6 +18,8 @@ import com.swirlds.common.AutoCloseableNonThrowing; import com.swirlds.common.threading.locks.locked.MaybeLockedResource; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; /** * An instance which is returned by the {@link ResourceLock} when the lock is acquired. Provides access to the locked @@ -26,11 +28,11 @@ * @param * the type of resource */ -public class AcquiredResource implements MaybeLockedResource { +public final class AcquiredResource implements MaybeLockedResource { private final AutoCloseableNonThrowing unlock; private T resource; - public AcquiredResource(final AutoCloseableNonThrowing unlock, final T resource) { + public AcquiredResource(@NonNull final AutoCloseableNonThrowing unlock, @Nullable final T resource) { this.unlock = unlock; this.resource = resource; } @@ -39,6 +41,7 @@ public AcquiredResource(final AutoCloseableNonThrowing unlock, final T resource) * {@inheritDoc} */ @Override + @Nullable public T getResource() { return resource; } @@ -47,7 +50,7 @@ public T getResource() { * {@inheritDoc} */ @Override - public void setResource(final T resource) { + public void setResource(@Nullable final T resource) { this.resource = resource; } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AutoLock.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AutoLock.java index 10696aa0e7fa..01c9b8f202b6 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AutoLock.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AutoLock.java @@ -19,6 +19,7 @@ import com.swirlds.common.threading.locks.AutoClosableLock; import com.swirlds.common.threading.locks.locked.Locked; import com.swirlds.common.threading.locks.locked.MaybeLocked; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; @@ -27,7 +28,7 @@ /** * A standard lock that provides the {@link AutoCloseable} semantics. Lock is reentrant. */ -public class AutoLock implements AutoClosableLock { +public final class AutoLock implements AutoClosableLock { private final Lock lock = new ReentrantLock(); @@ -35,6 +36,7 @@ public class AutoLock implements AutoClosableLock { * {@inheritDoc} */ @Override + @NonNull public Locked lock() { lock.lock(); return lock::unlock; @@ -44,6 +46,7 @@ public Locked lock() { * {@inheritDoc} */ @Override + @NonNull public Locked lockInterruptibly() throws InterruptedException { lock.lockInterruptibly(); return lock::unlock; @@ -53,6 +56,7 @@ public Locked lockInterruptibly() throws InterruptedException { * {@inheritDoc} */ @Override + @NonNull public MaybeLocked tryLock() { final boolean locked = lock.tryLock(); return new MaybeLocked() { @@ -74,6 +78,7 @@ public void close() { * {@inheritDoc} */ @Override + @NonNull public MaybeLocked tryLock(final long time, final TimeUnit unit) throws InterruptedException { final boolean locked = lock.tryLock(time, unit); return new MaybeLocked() { @@ -95,6 +100,7 @@ public void close() { * {@inheritDoc} */ @Override + @NonNull public Condition newCondition() { return lock.newCondition(); } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AutoNoOpLock.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AutoNoOpLock.java index 50ab3cca19c7..db8cf03e5651 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AutoNoOpLock.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/AutoNoOpLock.java @@ -19,6 +19,7 @@ import com.swirlds.common.threading.locks.AutoClosableLock; import com.swirlds.common.threading.locks.locked.Locked; import com.swirlds.common.threading.locks.locked.MaybeLocked; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; @@ -55,6 +56,7 @@ private AutoNoOpLock() {} * * @return an instance of a no-op auto-lock */ + @NonNull public static AutoClosableLock getInstance() { return instance; } @@ -63,6 +65,7 @@ public static AutoClosableLock getInstance() { * {@inheritDoc} */ @Override + @NonNull public Locked lock() { return locked; } @@ -71,6 +74,7 @@ public Locked lock() { * {@inheritDoc} */ @Override + @NonNull public Locked lockInterruptibly() { return locked; } @@ -79,6 +83,7 @@ public Locked lockInterruptibly() { * {@inheritDoc} */ @Override + @NonNull public MaybeLocked tryLock() { return maybeLocked; } @@ -87,7 +92,8 @@ public MaybeLocked tryLock() { * {@inheritDoc} */ @Override - public MaybeLocked tryLock(final long time, final TimeUnit unit) { + @NonNull + public MaybeLocked tryLock(final long time, @NonNull final TimeUnit unit) { return maybeLocked; } @@ -98,6 +104,7 @@ public MaybeLocked tryLock(final long time, final TimeUnit unit) { * if called */ @Override + @NonNull public Condition newCondition() { throw new UnsupportedOperationException(); } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/DefaultIndexLock.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/DefaultIndexLock.java index 330906629b0b..9d163de309e0 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/DefaultIndexLock.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/DefaultIndexLock.java @@ -18,13 +18,15 @@ import com.swirlds.common.threading.locks.IndexLock; import com.swirlds.common.threading.locks.locked.Locked; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * Default implementation of {@link IndexLock} */ -public class DefaultIndexLock implements IndexLock { +public final class DefaultIndexLock implements IndexLock { private final int parallelism; private final Lock[] locks; @@ -58,7 +60,7 @@ public void lock(final long index) { * {@inheritDoc} */ @Override - public void lock(final Object object) { + public void lock(@Nullable final Object object) { final int hash = object == null ? 0 : object.hashCode(); lock(hash); } @@ -75,7 +77,7 @@ public void unlock(final long index) { * {@inheritDoc} */ @Override - public void unlock(final Object object) { + public void unlock(@Nullable final Object object) { final int hash = object == null ? 0 : object.hashCode(); unlock(hash); } @@ -84,6 +86,7 @@ public void unlock(final Object object) { * {@inheritDoc} */ @Override + @NonNull public Locked autoLock(final long index) { lock(index); return () -> unlock(index); @@ -93,7 +96,8 @@ public Locked autoLock(final long index) { * {@inheritDoc} */ @Override - public Locked autoLock(final Object object) { + @NonNull + public Locked autoLock(@Nullable final Object object) { final int hash = object == null ? 0 : object.hashCode(); return autoLock(hash); } @@ -122,6 +126,7 @@ public void fullyUnlock() { * {@inheritDoc} */ @Override + @NonNull public Locked autoFullLock() { fullyLock(); return this::fullyUnlock; diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/NotAcquiredResource.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/NotAcquiredResource.java index 90f73ceab9e0..2eb9bd9b7410 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/NotAcquiredResource.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/NotAcquiredResource.java @@ -17,11 +17,12 @@ package com.swirlds.common.threading.locks.internal; import com.swirlds.common.threading.locks.locked.MaybeLockedResource; +import edu.umd.cs.findbugs.annotations.Nullable; /** * Return an instance of this when a {@link ResourceLock} has not been acquired */ -public class NotAcquiredResource implements MaybeLockedResource { +public final class NotAcquiredResource implements MaybeLockedResource { /** * {@inheritDoc} @@ -35,6 +36,7 @@ public void close() { * {@inheritDoc} */ @Override + @Nullable public T getResource() { throw new IllegalStateException("Cannot get resource if the lock is not obtained"); } @@ -43,7 +45,7 @@ public T getResource() { * {@inheritDoc} */ @Override - public void setResource(T resource) { + public void setResource(@Nullable T resource) { throw new IllegalStateException("Cannot set resource if the lock is not obtained"); } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/ResourceLock.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/ResourceLock.java index 53229cb328e6..299835e3a578 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/ResourceLock.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/ResourceLock.java @@ -20,6 +20,8 @@ import com.swirlds.common.threading.locks.AutoClosableResourceLock; import com.swirlds.common.threading.locks.locked.LockedResource; import com.swirlds.common.threading.locks.locked.MaybeLockedResource; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; @@ -30,7 +32,7 @@ * @param * the type of resource */ -public class ResourceLock implements AutoClosableResourceLock { +public final class ResourceLock implements AutoClosableResourceLock { private final Lock lock; @@ -38,7 +40,7 @@ public class ResourceLock implements AutoClosableResourceLock { private final MaybeLockedResource notAcquired; - public ResourceLock(final Lock lock, final T resource) { + public ResourceLock(@NonNull final Lock lock, @Nullable final T resource) { this.lock = lock; acquired = new AcquiredResource<>(lock::unlock, resource); notAcquired = new NotAcquiredResource<>(); @@ -48,6 +50,7 @@ public ResourceLock(final Lock lock, final T resource) { * {@inheritDoc} */ @Override + @NonNull public LockedResource lock() { lock.lock(); return acquired; @@ -57,6 +60,7 @@ public LockedResource lock() { * {@inheritDoc} */ @Override + @NonNull public LockedResource lockInterruptibly() throws InterruptedException { lock.lockInterruptibly(); return acquired; @@ -66,6 +70,7 @@ public LockedResource lockInterruptibly() throws InterruptedException { * {@inheritDoc} */ @Override + @NonNull public MaybeLockedResource tryLock() { if (lock.tryLock()) { return acquired; @@ -77,7 +82,8 @@ public MaybeLockedResource tryLock() { * {@inheritDoc} */ @Override - public MaybeLockedResource tryLock(final long time, final TimeUnit unit) throws InterruptedException { + @NonNull + public MaybeLockedResource tryLock(final long time, @NonNull final TimeUnit unit) throws InterruptedException { if (lock.tryLock(time, unit)) { return acquired; } @@ -88,6 +94,7 @@ public MaybeLockedResource tryLock(final long time, final TimeUnit unit) thro * {@inheritDoc} */ @Override + @NonNull public Condition newCondition() { return lock.newCondition(); } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/package-info.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/package-info.java new file mode 100644 index 000000000000..294428cd58bd --- /dev/null +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/internal/package-info.java @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +/** + * This package contains the private / internal api for locks. The package is not exported and should not be used + * outside of the {@link com.swirlds.common.threading.locks} package. + */ +package com.swirlds.common.threading.locks.internal; diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/locked/LockedResource.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/locked/LockedResource.java index 165101d5523f..e25ecc713d5b 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/locked/LockedResource.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/locks/locked/LockedResource.java @@ -16,6 +16,8 @@ package com.swirlds.common.threading.locks.locked; +import edu.umd.cs.findbugs.annotations.Nullable; + /** * Provides access to the resource that has been locked * @@ -26,6 +28,7 @@ public interface LockedResource extends Locked { /** * @return the locked resource, may be null */ + @Nullable T getResource(); /** @@ -34,5 +37,5 @@ public interface LockedResource extends Locked { * @param resource * the object to set */ - void setResource(T resource); + void setResource(@Nullable T resource); } From d274fe22610f0a841eb204a2dc6c1fba07a94dc6 Mon Sep 17 00:00:00 2001 From: Quan Nguyen Date: Wed, 14 Jun 2023 05:41:27 -0400 Subject: [PATCH 28/70] Remove obsolete tests (#6999) Signed-off-by: Quan Nguyen --- .../main/resources/FCM-Basic-3500-22m.json | 67 ---------- .../src/main/resources/FCM-Basic-500-60m.json | 57 --------- .../main/resources/FCM-Basic-500k-60m.json | 57 --------- .../resources/FCM-BasicStress-2.5k-13m.json | 117 ------------------ .../main/resources/FCMFCQ-Basic-1-12m.json | 92 -------------- 5 files changed, 390 deletions(-) delete mode 100644 platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-Basic-3500-22m.json delete mode 100644 platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-Basic-500-60m.json delete mode 100644 platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-Basic-500k-60m.json delete mode 100644 platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-BasicStress-2.5k-13m.json delete mode 100644 platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCMFCQ-Basic-1-12m.json diff --git a/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-Basic-3500-22m.json b/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-Basic-3500-22m.json deleted file mode 100644 index 88fae1ef1f26..000000000000 --- a/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-Basic-3500-22m.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "payloadConfig": { - "appendSig": true, - "insertSeq": true, - "variedSize": false, - "_comment": "payload type TYPE_RANDOM_BYTES for random bytes", - "type": "TYPE_RANDOM_BYTES", - "payloadByteSize": 100, - "maxByteSize": 100, - "distribution": { - "sizeDistribution": [ - 100 - ], - "ratioDistribution": [ - 100 - ], - "typeDistribution": [ - "TYPE_FCM_TEST" - ] - } - }, - "submitConfig": { - "_comment0": [ - "0, BYTES_PER_SECOND_PER_NODE", - "1, TRANS_PER_SECOND_PER_NODE", - "2, EVENTS_PER_SECOND_WHOLE_NETWORK", - "hard to control and achieve", - "3, ROUNDS_PER_SECOND_WHOLE_NETWORK", - "4, TRANS_PER_EVENT_WHOLE_NETWORK", - "more like low bound", - "5, C2C_LATENCY" - ], - "systemMetric": "TRANS_PER_SECOND_PER_NODE", - "metricThreshold": 3500.0, - "_comment1": "pause after submit defined amount of transactions", - "pauseAfter": 0, - "pauseSeconds": 30, - "allowRunSubmit": true, - "C2CDelayThreshold": 6 - }, - "fcmConfig": { - "sequentialTest": true, - "_comment": "if sequentialTest is true, generate payload according to sequentialType, sequentialAmount, sequentialSize, this override typeDistribution defined in payloadConfig", - "sequentials": [ - { - "sequentialType": "TYPE_FCM_CREATE", - "sequentialAmount": 750000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_TRANSFER", - "sequentialAmount": 700000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_TRANSFER", - "sequentialAmount": 700000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_TRANSFER", - "sequentialAmount": 700000, - "sequentialSize": 100 - } - ] - } -} diff --git a/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-Basic-500-60m.json b/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-Basic-500-60m.json deleted file mode 100644 index 7286420ba1ac..000000000000 --- a/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-Basic-500-60m.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "payloadConfig": { - "appendSig": true, - "insertSeq": true, - "variedSize": false, - "_comment": "payload type TYPE_RANDOM_BYTES for random bytes", - "type": "TYPE_RANDOM_BYTES", - "payloadByteSize": 100, - "maxByteSize": 100, - "distribution": { - "sizeDistribution": [ - 100 - ], - "ratioDistribution": [ - 100 - ], - "typeDistribution": [ - "TYPE_FCM_TEST" - ] - } - }, - "submitConfig": { - "_comment0": [ - "0, BYTES_PER_SECOND_PER_NODE", - "1, TRANS_PER_SECOND_PER_NODE", - "2, EVENTS_PER_SECOND_WHOLE_NETWORK", - "hard to control and achieve", - "3, ROUNDS_PER_SECOND_WHOLE_NETWORK", - "4, TRANS_PER_EVENT_WHOLE_NETWORK", - "more like low bound", - "5, C2C_LATENCY" - ], - "systemMetric": "TRANS_PER_SECOND_PER_NODE", - "metricThreshold": 500.0, - "_comment1": "pause after submit defined amount of transactions", - "pauseAfter": 0, - "pauseSeconds": 30, - "allowRunSubmit": true, - "C2CDelayThreshold": 6 - }, - "fcmConfig": { - "sequentialTest": true, - "_comment": "if sequentialTest is true, generate payload according to sequentialType, sequentialAmoutn, sequentialSize, this override typeDistribution defined in payloadConfig", - "sequentials": [ - { - "sequentialType": "TYPE_FCM_CREATE", - "sequentialAmount": 25, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_TRANSFER", - "sequentialAmount": 1000000, - "sequentialSize": 100 - } - ] - } -} diff --git a/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-Basic-500k-60m.json b/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-Basic-500k-60m.json deleted file mode 100644 index 7286420ba1ac..000000000000 --- a/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-Basic-500k-60m.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "payloadConfig": { - "appendSig": true, - "insertSeq": true, - "variedSize": false, - "_comment": "payload type TYPE_RANDOM_BYTES for random bytes", - "type": "TYPE_RANDOM_BYTES", - "payloadByteSize": 100, - "maxByteSize": 100, - "distribution": { - "sizeDistribution": [ - 100 - ], - "ratioDistribution": [ - 100 - ], - "typeDistribution": [ - "TYPE_FCM_TEST" - ] - } - }, - "submitConfig": { - "_comment0": [ - "0, BYTES_PER_SECOND_PER_NODE", - "1, TRANS_PER_SECOND_PER_NODE", - "2, EVENTS_PER_SECOND_WHOLE_NETWORK", - "hard to control and achieve", - "3, ROUNDS_PER_SECOND_WHOLE_NETWORK", - "4, TRANS_PER_EVENT_WHOLE_NETWORK", - "more like low bound", - "5, C2C_LATENCY" - ], - "systemMetric": "TRANS_PER_SECOND_PER_NODE", - "metricThreshold": 500.0, - "_comment1": "pause after submit defined amount of transactions", - "pauseAfter": 0, - "pauseSeconds": 30, - "allowRunSubmit": true, - "C2CDelayThreshold": 6 - }, - "fcmConfig": { - "sequentialTest": true, - "_comment": "if sequentialTest is true, generate payload according to sequentialType, sequentialAmoutn, sequentialSize, this override typeDistribution defined in payloadConfig", - "sequentials": [ - { - "sequentialType": "TYPE_FCM_CREATE", - "sequentialAmount": 25, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_TRANSFER", - "sequentialAmount": 1000000, - "sequentialSize": 100 - } - ] - } -} diff --git a/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-BasicStress-2.5k-13m.json b/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-BasicStress-2.5k-13m.json deleted file mode 100644 index d264c834f02f..000000000000 --- a/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCM-BasicStress-2.5k-13m.json +++ /dev/null @@ -1,117 +0,0 @@ -{ - "payloadConfig": { - "appendSig": true, - "insertSeq": true, - "variedSize": false, - "_comment": "payload type TYPE_RANDOM_BYTES for random bytes", - "type": "TYPE_RANDOM_BYTES", - "payloadByteSize": 100, - "maxByteSize": 100, - "distribution": { - "sizeDistribution": [ - 100 - ], - "ratioDistribution": [ - 100 - ], - "typeDistribution": [ - "TYPE_FCM_TEST" - ] - } - }, - "submitConfig": { - "_comment0": [ - "0, BYTES_PER_SECOND_PER_NODE", - "1, TRANS_PER_SECOND_PER_NODE", - "2, EVENTS_PER_SECOND_WHOLE_NETWORK", - "hard to control and achieve", - "3, ROUNDS_PER_SECOND_WHOLE_NETWORK", - "4, TRANS_PER_EVENT_WHOLE_NETWORK", - "more like low bound", - "5, C2C_LATENCY" - ], - "systemMetric": "TRANS_PER_SECOND_PER_NODE", - "metricThreshold": 2500.0, - "_comment1": "pause after submit defined amount of transactions", - "pauseAfter": 0, - "pauseSeconds": 30, - "allowRunSubmit": true, - "C2CDelayThreshold": 6 - }, - "fcmConfig": { - "sequentialTest": true, - "_comment": "if sequentialTest is true, generate payload according to sequentialType, sequentialAmount, sequentialSize, this override typeDistribution defined in payloadConfig", - "sequentials": [ - { - "sequentialType": "TYPE_FCM_CREATE", - "sequentialAmount": 250000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_TRANSFER", - "sequentialAmount": 150000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_CREATE", - "sequentialAmount": 10000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_TRANSFER", - "sequentialAmount": 150000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_DELETE", - "sequentialAmount": 20000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_TRANSFER", - "sequentialAmount": 150000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_CREATE", - "sequentialAmount": 30000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_TRANSFER", - "sequentialAmount": 150000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_DELETE", - "sequentialAmount": 30000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_TRANSFER", - "sequentialAmount": 150000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_CREATE", - "sequentialAmount": 20000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_TRANSFER", - "sequentialAmount": 150000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_DELETE", - "sequentialAmount": 10000, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_TRANSFER", - "sequentialAmount": 150000, - "sequentialSize": 100 - } - ] - } -} diff --git a/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCMFCQ-Basic-1-12m.json b/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCMFCQ-Basic-1-12m.json deleted file mode 100644 index 43a3ef778be0..000000000000 --- a/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCMFCQ-Basic-1-12m.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "payloadConfig": { - "appendSig": true, - "insertSeq": true, - "variedSize": false, - "_comment": "payload type TYPE_RANDOM_BYTES for random bytes", - "type": "TYPE_RANDOM_BYTES", - "payloadByteSize": 100, - "maxByteSize": 100, - "distribution": { - "sizeDistribution": [ - 100 - ], - "ratioDistribution": [ - 100 - ], - "typeDistribution": [ - "TYPE_FCM_TEST" - ] - } - }, - "submitConfig": { - "_comment0": [ - "0, BYTES_PER_SECOND_PER_NODE", - "1, TRANS_PER_SECOND_PER_NODE", - "2, EVENTS_PER_SECOND_WHOLE_NETWORK", - "hard to control and achieve", - "3, ROUNDS_PER_SECOND_WHOLE_NETWORK", - "4, TRANS_PER_EVENT_WHOLE_NETWORK", - "more like low bound", - "5, C2C_LATENCY" - ], - "systemMetric": "TRANS_PER_SECOND_PER_NODE", - "metricThreshold": 1.0, - "_comment1": "pause after submit defined amount of transactions", - "pauseAfter": 0, - "pauseSeconds": 30, - "allowRunSubmit": true, - "C2CDelayThreshold": 6 - }, - "fcmConfig": { - "sequentialTest": true, - "_comment": "if sequentialTest is true, generate payload according to sequentialType, sequentialAmount, sequentialSize, this override typeDistribution defined in payloadConfig", - "sequentials": [ - { - "sequentialType": "TYPE_FCM_CREATE_FCQ", - "sequentialAmount": 100, - "sequentialSize": 100 - }, - { - "sequentialType": "SAVE_EXPECTED_MAP", - "sequentialAmount": 1, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_UPDATE_FCQ", - "sequentialAmount": 100, - "sequentialSize": 100 - }, - { - "sequentialType": "SAVE_EXPECTED_MAP", - "sequentialAmount": 1, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_TRANSFER_FCQ", - "sequentialAmount": 100, - "sequentialSize": 100 - }, - { - "sequentialType": "SAVE_EXPECTED_MAP", - "sequentialAmount": 1, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_DELETE_FCQ_NODE", - "sequentialAmount": 100, - "sequentialSize": 100 - }, - { - "sequentialType": "SAVE_EXPECTED_MAP", - "sequentialAmount": 1, - "sequentialSize": 100 - }, - { - "sequentialType": "TYPE_FCM_DELETE_FCQ", - "sequentialAmount": 100, - "sequentialSize": 100 - } - ] - } -} From 2e9b67b6f796a7264835a486f0b3fb0c18d7240e Mon Sep 17 00:00:00 2001 From: Kim Rader Date: Wed, 14 Jun 2023 06:42:47 -0700 Subject: [PATCH 29/70] CryptoLiveHash handlers (#7071) Signed-off-by: Kim Rader --- .../MonoTransactionDispatcherTest.java | 49 +++++++++-- .../handlers/CryptoAddLiveHashHandler.java | 13 ++- .../handlers/CryptoDeleteLiveHashHandler.java | 13 ++- .../handlers/CryptoGetLiveHashHandler.java | 7 +- .../CryptoAddLiveHashHandlerTest.java | 57 +++++++++++++ .../CryptoDeleteLiveHashHandlerTest.java | 57 +++++++++++++ .../CryptoGetLiveHashHandlerTest.java | 85 +++++++++++++++++++ 7 files changed, 264 insertions(+), 17 deletions(-) create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoAddLiveHashHandlerTest.java create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteLiveHashHandlerTest.java create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetLiveHashHandlerTest.java diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java index 720d2b5e717e..f55cfbf32bb5 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java @@ -22,6 +22,7 @@ import static com.hedera.node.app.workflows.dispatcher.MonoTransactionDispatcher.TYPE_NOT_SUPPORTED; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.mockito.ArgumentMatchers.any; import static org.mockito.BDDMockito.given; import static org.mockito.Mock.Strictness.LENIENT; @@ -547,7 +548,7 @@ void dispatchesTokenGrantKycAsExpected() { .build(); given(handleContext.body()).willReturn(txnBody); - dispatcher.dispatchHandle(handleContext); + assertDoesNotThrow(() -> dispatcher.dispatchHandle(handleContext)); } @Test @@ -557,7 +558,7 @@ void dispatchesTokenRevokeKycAsExpected() { .build(); given(handleContext.body()).willReturn(txnBody); - dispatcher.dispatchHandle(handleContext); + assertDoesNotThrow(() -> dispatcher.dispatchHandle(handleContext)); } @Test @@ -591,7 +592,7 @@ void dispatchesTokenFreezeAsExpected() { .build(); given(handleContext.body()).willReturn(txnBody); - dispatcher.dispatchHandle(handleContext); + assertDoesNotThrow(() -> dispatcher.dispatchHandle(handleContext)); } @Test @@ -601,7 +602,7 @@ void dispatchesTokenUnfreezeAsExpected() { .build(); given(handleContext.body()).willReturn(txnBody); - dispatcher.dispatchHandle(handleContext); + assertDoesNotThrow(() -> dispatcher.dispatchHandle(handleContext)); } @Test @@ -611,7 +612,7 @@ void dispatchesTokenFeeScheduleUpdateAsExpected() { .build(); given(handleContext.body()).willReturn(txnBody); - dispatcher.dispatchHandle(handleContext); + assertDoesNotThrow(() -> dispatcher.dispatchHandle(handleContext)); } @Test @@ -621,7 +622,7 @@ void dispatchesTokenPauseAsExpected() { .build(); given(handleContext.body()).willReturn(txnBody); - dispatcher.dispatchHandle(handleContext); + assertDoesNotThrow(() -> dispatcher.dispatchHandle(handleContext)); } @Test @@ -631,7 +632,7 @@ void dispatchesTokenUnpauseAsExpected() { .build(); given(handleContext.body()).willReturn(txnBody); - dispatcher.dispatchHandle(handleContext); + assertDoesNotThrow(() -> dispatcher.dispatchHandle(handleContext)); } @Test @@ -695,7 +696,7 @@ void dispatchesCryptoUpdateAsExpected() { .build(); given(handleContext.body()).willReturn(txnBody); - dispatcher.dispatchHandle(handleContext); + assertDoesNotThrow(() -> dispatcher.dispatchHandle(handleContext)); } @Test @@ -706,7 +707,7 @@ void dispatchesCryptoDeleteAsExpected() { given(handleContext.body()).willReturn(txnBody); - dispatcher.dispatchHandle(handleContext); + assertDoesNotThrow(() -> dispatcher.dispatchHandle(handleContext)); } @Test @@ -759,6 +760,36 @@ void dispatchesNetworkUncheckedSubmitAsExpected() { verifyNoInteractions(txnCtx); } + @Test + void dispatchesCryptoAddLiveHashAsExpected() { + final var txnBody = TransactionBody.newBuilder() + .transactionID(TransactionID.newBuilder()) + .cryptoAddLiveHash(CryptoAddLiveHashTransactionBody.DEFAULT) + .build(); + given(handleContext.body()).willReturn(txnBody); + + assertThatThrownBy(() -> dispatcher.dispatchHandle(handleContext)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining(TYPE_NOT_SUPPORTED); + + verifyNoInteractions(txnCtx); + } + + @Test + void dispatchesCryptoDeleteLiveHashAsExpected() { + final var txnBody = TransactionBody.newBuilder() + .transactionID(TransactionID.newBuilder()) + .cryptoDeleteLiveHash(CryptoDeleteLiveHashTransactionBody.DEFAULT) + .build(); + given(handleContext.body()).willReturn(txnBody); + + assertThatThrownBy(() -> dispatcher.dispatchHandle(handleContext)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining(TYPE_NOT_SUPPORTED); + + verifyNoInteractions(txnCtx); + } + @Test void doesntCommitWhenUsageLimitsExceeded() { final var txnBody = TransactionBody.newBuilder() diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoAddLiveHashHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoAddLiveHashHandler.java index 4a08c6e4565a..f3a7f0eb16d8 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoAddLiveHashHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoAddLiveHashHandler.java @@ -19,8 +19,10 @@ import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.HederaFunctionality; +import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.PreHandleContext; import com.hedera.node.app.spi.workflows.TransactionHandler; import edu.umd.cs.findbugs.annotations.NonNull; @@ -30,6 +32,8 @@ /** * This class contains all workflow-related functionality regarding {@link * HederaFunctionality#CRYPTO_ADD_LIVE_HASH}. + * + * This transaction type is not currently supported. It is reserved for future use. */ @Singleton public class CryptoAddLiveHashHandler implements TransactionHandler { @@ -39,13 +43,16 @@ public CryptoAddLiveHashHandler() { } @Override - public void preHandle(@NonNull final PreHandleContext context) { + public void preHandle(@NonNull final PreHandleContext context) throws PreCheckException { requireNonNull(context); - throw new UnsupportedOperationException("Not implemented"); + throw new PreCheckException(ResponseCodeEnum.NOT_SUPPORTED); } @Override public void handle(@NonNull final HandleContext context) throws HandleException { - throw new UnsupportedOperationException("Not implemented"); + // this will never actually get called + // because preHandle will throw a PreCheckException + // before we get here + throw new HandleException(ResponseCodeEnum.NOT_SUPPORTED); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoDeleteLiveHashHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoDeleteLiveHashHandler.java index 1d044d5e82fc..bd381f5bffcb 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoDeleteLiveHashHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoDeleteLiveHashHandler.java @@ -19,8 +19,10 @@ import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.HederaFunctionality; +import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.PreHandleContext; import com.hedera.node.app.spi.workflows.TransactionHandler; import edu.umd.cs.findbugs.annotations.NonNull; @@ -30,6 +32,8 @@ /** * This class contains all workflow-related functionality regarding {@link * HederaFunctionality#CRYPTO_DELETE_LIVE_HASH}. + * + * This transaction type is not currently supported. It is reserved for future use. */ @Singleton public class CryptoDeleteLiveHashHandler implements TransactionHandler { @@ -39,13 +43,16 @@ public CryptoDeleteLiveHashHandler() { } @Override - public void preHandle(@NonNull final PreHandleContext context) { + public void preHandle(@NonNull final PreHandleContext context) throws PreCheckException { requireNonNull(context); - throw new UnsupportedOperationException("Not implemented"); + throw new PreCheckException(ResponseCodeEnum.NOT_SUPPORTED); } @Override public void handle(@NonNull final HandleContext context) throws HandleException { - throw new UnsupportedOperationException("Not implemented"); + // this will never actually get called + // because preHandle will throw a PreCheckException + // before we get here + throw new HandleException(ResponseCodeEnum.NOT_SUPPORTED); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetLiveHashHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetLiveHashHandler.java index 4ef1f9238ec6..149cd799c1f2 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetLiveHashHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetLiveHashHandler.java @@ -16,6 +16,7 @@ package com.hedera.node.app.service.token.impl.handlers; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.HederaFunctionality; @@ -34,6 +35,8 @@ /** * This class contains all workflow-related functionality regarding {@link * HederaFunctionality#CRYPTO_GET_LIVE_HASH}. + * + * This transaction type is not currently supported. It is reserved for future use. */ @Singleton public class CryptoGetLiveHashHandler extends FreeQueryHandler { @@ -58,13 +61,13 @@ public Response createEmptyResponse(@NonNull final ResponseHeader header) { @Override public void validate(@NonNull final QueryContext context) throws PreCheckException { requireNonNull(context); - throw new UnsupportedOperationException("Not implemented"); + throw new PreCheckException(NOT_SUPPORTED); } @Override public Response findResponse(@NonNull final QueryContext context, @NonNull final ResponseHeader header) { requireNonNull(context); requireNonNull(header); - throw new UnsupportedOperationException("Not implemented"); + throw new UnsupportedOperationException(NOT_SUPPORTED.toString()); } } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoAddLiveHashHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoAddLiveHashHandlerTest.java new file mode 100644 index 000000000000..98c233cd9089 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoAddLiveHashHandlerTest.java @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mock.Strictness.LENIENT; + +import com.hedera.node.app.service.token.impl.handlers.CryptoAddLiveHashHandler; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.spi.workflows.PreHandleContext; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class CryptoAddLiveHashHandlerTest { + @Mock(strictness = LENIENT) + private PreHandleContext preHandleContext; + + @Mock(strictness = LENIENT) + private HandleContext handleContext; + + private final CryptoAddLiveHashHandler subject = new CryptoAddLiveHashHandler(); + + @Test + void preHandleThrowsUnsupported() { + assertThatThrownBy(() -> subject.preHandle(preHandleContext)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void handleThrowsUnsupported() { + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(NOT_SUPPORTED)); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteLiveHashHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteLiveHashHandlerTest.java new file mode 100644 index 000000000000..37b85d859f3b --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteLiveHashHandlerTest.java @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mock.Strictness.LENIENT; + +import com.hedera.node.app.service.token.impl.handlers.CryptoDeleteLiveHashHandler; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.spi.workflows.PreHandleContext; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class CryptoDeleteLiveHashHandlerTest { + @Mock(strictness = LENIENT) + private PreHandleContext preHandleContext; + + @Mock(strictness = LENIENT) + private HandleContext handleContext; + + private final CryptoDeleteLiveHashHandler subject = new CryptoDeleteLiveHashHandler(); + + @Test + void preHandleThrowsUnsupported() { + assertThatThrownBy(() -> subject.preHandle(preHandleContext)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void handleThrowsUnsupported() { + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(NOT_SUPPORTED)); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetLiveHashHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetLiveHashHandlerTest.java new file mode 100644 index 000000000000..4b82ee4fce15 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetLiveHashHandlerTest.java @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.hedera.hapi.node.base.QueryHeader; +import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.token.CryptoGetLiveHashQuery; +import com.hedera.hapi.node.token.CryptoGetLiveHashResponse; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.node.app.service.token.impl.handlers.CryptoGetLiveHashHandler; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.spi.workflows.QueryContext; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class CryptoGetLiveHashHandlerTest { + @Mock + private QueryContext context; + + private CryptoGetLiveHashHandler subject; + + @BeforeEach + void setUp() { + subject = new CryptoGetLiveHashHandler(); + } + + @Test + void extractsHeader() { + final var data = CryptoGetLiveHashQuery.newBuilder() + .header(QueryHeader.newBuilder().build()) + .build(); + final var query = Query.newBuilder().cryptoGetLiveHash(data).build(); + final var header = subject.extractHeader(query); + final var op = query.cryptoGetLiveHashOrThrow(); + assertThat(op.header()).isEqualTo(header); + } + + @Test + void createsEmptyResponse() { + final var responseHeader = ResponseHeader.newBuilder().build(); + final var response = subject.createEmptyResponse(responseHeader); + final var expectedResponse = Response.newBuilder() + .cryptoGetLiveHash(CryptoGetLiveHashResponse.newBuilder().header(responseHeader)) + .build(); + assertThat(expectedResponse).isEqualTo(response); + } + + @Test + void validateThrowsPreCheck() { + assertThatThrownBy(() -> subject.validate(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void findResponseThrowsUnsupported() { + final var responseHeader = ResponseHeader.newBuilder().build(); + assertThatThrownBy(() -> subject.findResponse(context, responseHeader)) + .isInstanceOf(UnsupportedOperationException.class); + } +} From 173502c56db38b862fd1f7d44e1b795e47be9623 Mon Sep 17 00:00:00 2001 From: Cody Littley <56973212+cody-littley@users.noreply.github.com> Date: Wed, 14 Jun 2023 09:44:10 -0500 Subject: [PATCH 30/70] Fix flaky test. (#7065) Signed-off-by: Cody Littley --- .../java/com/swirlds/platform/recovery/RecoveryTestUtils.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/RecoveryTestUtils.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/RecoveryTestUtils.java index 8416108038f6..1df2e4027a99 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/RecoveryTestUtils.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/recovery/RecoveryTestUtils.java @@ -217,7 +217,9 @@ public static void writeRandomEventStream( // Each event will be serialized twice. Once when it is hashed, and once when it is written to disk. assertEventuallyTrue( - () -> writeCount.get() == events.size() * 2, Duration.ofSeconds(1), "event not serialized fast enough"); + () -> writeCount.get() == events.size() * 2, + Duration.ofSeconds(10), + "event not serialized fast enough"); eventEventStreamManager.stop(); } From c0054c0a5a70e02a15a82fec07563d6889b38d42 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20Brandst=C3=A4tter?= Date: Wed, 14 Jun 2023 18:29:15 +0200 Subject: [PATCH 31/70] #6449 Introduce EventConfig (#7048) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Timo Brandstätter --- .../swirlds/common/config/BasicConfig.java | 15 --- .../swirlds/common/config/EventConfig.java | 67 ++++++++++++ .../config/export/ConfigExportTest.java | 2 - .../java/com/swirlds/platform/Browser.java | 4 +- .../swirlds/platform/PlatformConstructor.java | 3 - .../swirlds/platform/SettingConstants.java | 29 +++-- .../java/com/swirlds/platform/Settings.java | 103 ------------------ .../swirlds/platform/SettingsProvider.java | 19 ---- .../platform/StaticSettingsProvider.java | 21 ---- .../com/swirlds/platform/SwirldsPlatform.java | 15 +-- .../platform/components/EventTaskCreator.java | 20 ++-- .../platform/config/ConfigMappings.java | 12 +- .../eventhandling/ConsensusRoundHandler.java | 9 +- .../platform/gossip/AbstractGossip.java | 6 +- .../platform/gossip/sync/SyncGossip.java | 4 +- .../platform/gossip/sync/SyncManagerImpl.java | 17 +-- .../com/swirlds/platform/SettingsTest.java | 33 ------ .../com/swirlds/platform/SyncManagerTest.java | 21 ++-- .../com/swirlds/platform/TestSettings.java | 19 ---- .../ConsensusRoundHandlerTests.java | 23 ++-- .../swirlds/platform/test/TestSettings.java | 25 ----- .../test/components/EventTaskCreatorTest.java | 75 +++++++++---- .../test/eventflow/EventFlowTests.java | 3 +- 23 files changed, 221 insertions(+), 324 deletions(-) create mode 100644 platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/EventConfig.java diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java index ab5ed1f58668..70688860d488 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java @@ -33,11 +33,6 @@ * show the user all statistics, including those with category "internal"? * @param verboseStatistics * show expand statistics values, inlcude mean, min, max, stdDev - * @param requireStateLoad - * if set to true, the platform will fail to start if it fails to load a state from disk - * @param signedStateFreq - * hash and sign a state every signedStateFreq rounds. 1 means that a state will be signed every round, 2 means - * every other round, and so on. If the value is 0 or less, no states will be signed * @param maxEventQueueForCons * max events that can be put in the forCons queue (q2) in ConsensusRoundHandler (0 for infinity) * @param throttleTransactionQueueSize @@ -103,8 +98,6 @@ * transaction exceeds this limit then the event will contain the single transaction only * @param maxTransactionCountPerEvent * the maximum number of transactions that a single event may contain - * @param emergencyStateFileName - * The CSV file name of the emergency state recovery file * @param eventIntakeQueueSize * The size of the event intake queue, * {@link com.swirlds.common.threading.framework.config.QueueThreadConfiguration#UNLIMITED_CAPACITY} for @@ -112,9 +105,6 @@ * TCP connections, but leaving it unbounded can cause out of memory errors, even with the * {@link #eventIntakeQueueThrottleSize()}, because syncs that started before the throttle engages can grow the * queue to very large sizes on larger networks. - * @param checkSignedStateFromDisk - * If true, the platform will recalculate the hash of the signed state and check it against the written hash. It - * will also verify that the signatures are valid. * @param randomEventProbability * The probability that after a sync, a node will create an event with a random other parent. The probability is * is 1 in X, where X is the value of randomEventProbability. A value of 0 means that a node will not create any @@ -197,8 +187,6 @@ public record BasicConfig( @ConfigProperty(defaultValue = "32") int numCryptoThreads, @ConfigProperty(defaultValue = "false") boolean showInternalStats, @ConfigProperty(defaultValue = "false") boolean verboseStatistics, - @ConfigProperty(defaultValue = "false") boolean requireStateLoad, - @ConfigProperty(defaultValue = "1") int signedStateFreq, @ConfigProperty(defaultValue = "10000") int maxEventQueueForCons, @ConfigProperty(defaultValue = "100000") int throttleTransactionQueueSize, @ConfigProperty(defaultValue = "false") boolean throttle7, @@ -228,10 +216,7 @@ public record BasicConfig( @ConfigProperty(value = "loadKeysFromPfxFiles", defaultValue = "true") boolean loadKeysFromPfxFiles, @ConfigProperty(value = "maxTransactionBytesPerEvent", defaultValue = "245760") int maxTransactionBytesPerEvent, @ConfigProperty(value = "maxTransactionCountPerEvent", defaultValue = "245760") int maxTransactionCountPerEvent, - @ConfigProperty(value = "emergencyStateFileName", defaultValue = "emergencyRecovery.csv") - String emergencyStateFileName, @ConfigProperty(value = "eventIntakeQueueSize", defaultValue = "10000") int eventIntakeQueueSize, - @ConfigProperty(value = "checkSignedStateFromDisk", defaultValue = "false") boolean checkSignedStateFromDisk, @ConfigProperty(value = "randomEventProbability", defaultValue = "0") int randomEventProbability, @ConfigProperty(value = "rescueChildlessInverseProbability", defaultValue = "10") int rescueChildlessInverseProbability, diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/EventConfig.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/EventConfig.java new file mode 100644 index 000000000000..62937d0c412d --- /dev/null +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/EventConfig.java @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.common.config; + +import com.swirlds.common.threading.framework.config.QueueThreadConfiguration; +import com.swirlds.config.api.ConfigData; +import com.swirlds.config.api.ConfigProperty; + +/** + * Configuration for event handling inside the platform. + *

+ * @param maxEventQueueForCons max events that can be put in the forCons queue (q2) in ConsensusRoundHandler + * (0 for infinity) + * @param eventIntakeQueueThrottleSize The value for the event intake queue at which the node should stop syncing + * @param eventIntakeQueueSize The size of the event intake queue, {@link QueueThreadConfiguration#UNLIMITED_CAPACITY} + * for unbounded. It is best that this queue is large, but not unbounded. Filling it up can + * cause sync threads to drop TCP connections, but leaving it unbounded can cause out + * of memory errors, even with the {@link #eventIntakeQueueThrottleSize}, because syncs + * that started before the throttle engages can grow the queue to very large sizes on + * larger networks. + * @param randomEventProbability The probability that after a sync, a node will create an event with a random other + * parent. The probability is is 1 in X, where X is the value of randomEventProbability. + * A value of 0 means that a node will not create any random events. + *

+ * This feature is used to get consensus on events with no descendants which are created + * by nodes who go offline. + * @param staleEventPreventionThreshold A setting used to prevent a node from generating events that will probably + * become stale. This value is multiplied by the address book size and compared to + * the number of events received in a sync. + * If ({@code numEventsReceived > staleEventPreventionThreshold * addressBookSize}) + * then we will not create an event for that sync, to reduce the probability of + * creating an event that will become stale. + * @param rescueChildlessInverseProbability The probability that we will create a child for a childless event. The + * probability is 1 / X, where X is the value of + * rescueChildlessInverseProbability. A value of 0 means that a node will not + * create any children for childless events. + * @param eventStreamQueueCapacity capacity of the blockingQueue from which we take events and write to EventStream files + * @param eventsLogPeriod period of generating eventStream file + * @param eventsLogDir eventStream files will be generated in this directory. + * @param enableEventStreaming enable stream event to server. + */ +@ConfigData("event") +public record EventConfig( + @ConfigProperty(defaultValue = "10000") int maxEventQueueForCons, + @ConfigProperty(defaultValue = "1000") int eventIntakeQueueThrottleSize, + @ConfigProperty(defaultValue = "10000") int eventIntakeQueueSize, + @ConfigProperty(defaultValue = "0") int randomEventProbability, + @ConfigProperty(defaultValue = "5") int staleEventPreventionThreshold, + @ConfigProperty(defaultValue = "10") int rescueChildlessInverseProbability, + @ConfigProperty(defaultValue = "500") int eventStreamQueueCapacity, + @ConfigProperty(defaultValue = "60") long eventsLogPeriod, + @ConfigProperty(defaultValue = "./eventstreams") String eventsLogDir, + @ConfigProperty(defaultValue = "false") boolean enableEventStreaming) {} diff --git a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/config/export/ConfigExportTest.java b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/config/export/ConfigExportTest.java index ba53dfa36b0b..c6f5cdbb7dba 100644 --- a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/config/export/ConfigExportTest.java +++ b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/config/export/ConfigExportTest.java @@ -62,7 +62,6 @@ void testPrint() throws IOException { // Verify properties in file are listed assertContains(regexForLine("verifyEventSigs", "false", true), lines); assertContains(regexForLine("doUpnp", "false", true), lines); - assertContains(regexForLine("checkSignedStateFromDisk", "true", true), lines); assertContains(regexForLine("showInternalStats", "true", true), lines); assertContains(regexForLine("csvFileName", "PlatformTesting", true), lines); assertContains(regexForLine("useLoopbackIp", "false", true), lines); @@ -75,7 +74,6 @@ void testPrint() throws IOException { // Verify properties not in file are listed (spot check only) assertContains(regexForLine("state.signedStateDisk", "3", true), lines); assertContains(regexForLine("numConnections", "40", true), lines); - assertContains(regexForLine("eventIntakeQueueSize", "10000", true), lines); assertContains(regexForLine("verboseStatistics", "false", true), lines); } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java index c26d60f92b48..f7eb4f3e79a5 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java @@ -37,6 +37,7 @@ import com.swirlds.common.StartupTime; import com.swirlds.common.config.BasicConfig; import com.swirlds.common.config.ConsensusConfig; +import com.swirlds.common.config.EventConfig; import com.swirlds.common.config.OSHealthCheckConfig; import com.swirlds.common.config.StateConfig; import com.swirlds.common.config.WiringConfig; @@ -232,7 +233,8 @@ private Browser(@NonNull final Set localNodesToStart) throws IOException .withConfigDataType(PreconsensusEventStreamConfig.class) .withConfigDataType(SyncConfig.class) .withConfigDataType(UptimeConfig.class) - .withConfigDataType(RecycleBinConfig.class); + .withConfigDataType(RecycleBinConfig.class) + .withConfigDataType(EventConfig.class); // Assume all locally run instances provide the same configuration definitions to the configuration builder. if (appMains.size() > 0) { diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/PlatformConstructor.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/PlatformConstructor.java index 8bec91c2be34..ad017987af11 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/PlatformConstructor.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/PlatformConstructor.java @@ -194,7 +194,6 @@ static SwirldStateManager swirldStateManager( * * @param threadManager responsible for creating and managing threads * @param selfId this node's id - * @param settingsProvider a static settings provider * @param swirldStateManager the instance of {@link SwirldStateManager} * @param consensusHandlingMetrics the class that records stats relating to {@link SwirldStateManager} * @param eventStreamManager the instance that streams consensus events to disk @@ -209,7 +208,6 @@ static ConsensusRoundHandler consensusHandler( @NonNull final PlatformContext platformContext, @NonNull final ThreadManager threadManager, @NonNull final NodeId selfId, - @NonNull final SettingsProvider settingsProvider, @NonNull final SwirldStateManager swirldStateManager, @NonNull final ConsensusHandlingMetrics consensusHandlingMetrics, @NonNull final EventStreamManager eventStreamManager, @@ -223,7 +221,6 @@ static ConsensusRoundHandler consensusHandler( platformContext, threadManager, selfId, - settingsProvider, swirldStateManager, consensusHandlingMetrics, eventStreamManager, diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java index a4f59c8f2e12..c256ad4b37e2 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java @@ -58,8 +58,6 @@ public final class SettingConstants { static final boolean VERIFY_EVENT_SIGS_DEFAULT_VALUE = true; static final boolean SHOW_INTERNAL_STATS_DEFAULT_VALUE = false; static final boolean VERBOSE_STATISTICS_DEFAULT_VALUE = false; - static final boolean REQUIRE_STATE_LOAD_DEFAULT_VALUE = false; - static final boolean THROTTLE_7_DEFAULT_VALUE = false; static final int DELAY_SHUFFLE_DEFAULT_VALUE = 200; static final int CALLER_SKIPS_BEFORE_SLEEP_DEFAULT_VALUE = 30; static final int SLEEP_CALLER_SKIPS_DEFAULT_VALUE = 50; @@ -74,11 +72,9 @@ public final class SettingConstants { static final boolean LOAD_KEYS_FROM_PFX_FILES_DEFAULT_VALUE = true; static final int MAX_TRANSACTION_BYTES_PER_EVENT_DEFAULT_VALUE = 245760; static final int MAX_TRANSACTION_COUNT_PER_EVENT_DEFAULT_VALUE = 245760; - static final boolean TRANS_THROTTLE_DEFAULT_VALUE = false; static final String CSV_OUTPUT_FOLDER_DEFAULT_VALUE = ""; static final boolean DISABLE_METRICS_OUTPUT_DEFAULT_VALUE = false; static final String CSV_FILE_NAME_DEFAULT_VALUE = ""; - static final String EMERGENCY_STATE_FILE_NAME_DEFAULT_VALUE = "emergencyRecovery.yaml"; static final int CSV_WRITE_FREQUENCY_DEFAULT_VALUE = 3000; static final boolean CSV_APPEND_DEFAULT_VALUE = false; static final boolean PROMETHEUS_ENDPOINT_ENABLED_DEFAULT_VALUE = false; @@ -98,9 +94,6 @@ public final class SettingConstants { static final String THREAD_DUMP_LOG_DIR_DEFAULT_VALUE = "data/threadDump"; static final int JVM_PAUSE_DETECTOR_SLEEP_MS_DEFAULT_VALUE = 1000; static final int JVM_PAUSE_REPORT_MS_DEFAULT_VALUE = 1000; - static final boolean ENABLE_STATE_RECOVERY_DEFAULT_VALUE = false; - static final String PLAYBACK_STREAM_FILE_DIRECTORY_DEFAULT_VALUE = ""; - static final String PLAYBACK_END_TIME_STAMP_DEFAULT_VALUE = ""; static final boolean GOSSIP_WITH_DIFFERENT_VERSIONS_DEFAULT_VALUE = false; static final Set REMOVED_SETTINGS = Set.of( @@ -193,7 +186,27 @@ public final class SettingConstants { "state.requireStateLoad", "state.emergencyStateFileName", "state.checkSignedStateFromDisk", - "signedStateFreq"); + "signedStateFreq", + "maxEventQueueForCons", + "eventIntakeQueueThrottleSize", + "eventIntakeQueueSize", + "randomEventProbability", + "staleEventPreventionThreshold", + "rescueChildlessInverseProbability", + "eventStreamQueueCapacity", + "eventsLogPeriod", + "eventsLogDir", + "enableEventStreaming", + "event.maxEventQueueForCons", + "event.eventIntakeQueueThrottleSize", + "event.eventIntakeQueueSize", + "event.randomEventProbability", + "event.staleEventPreventionThreshold", + "event.rescueChildlessInverseProbability", + "event.eventStreamQueueCapacity", + "event.eventsLogPeriod", + "event.eventsLogDir", + "event.enableEventStreaming"); private SettingConstants() {} } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java index d3464937aad0..b0ed8237a2c4 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java @@ -34,12 +34,6 @@ import static com.swirlds.platform.SettingConstants.DELAY_SHUFFLE_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.DISABLE_METRICS_OUTPUT_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.DO_UPNP_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.ENABLE_EVENT_STREAMING_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.EVENTS_LOG_DIR_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.EVENTS_LOG_PERIOD_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.EVENT_INTAKE_QUEUE_SIZE_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.EVENT_INTAKE_QUEUE_THROTTLE_SIZE_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.EVENT_STREAM_QUEUE_CAPACITY_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.FREEZE_SECONDS_AFTER_STARTUP_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.GOSSIP_WITH_DIFFERENT_VERSIONS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.HALF_LIFE_DEFAULT_VALUE; @@ -50,7 +44,6 @@ import static com.swirlds.platform.SettingConstants.LOG4J2_CONFIG_FILE; import static com.swirlds.platform.SettingConstants.LOG_STACK_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.MAX_ADDRESS_SIZE_ALLOWED_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.MAX_EVENT_QUEUE_FOR_CONS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.MAX_INCOMING_SYNCS_INC_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.MAX_OUTGOING_SYNCS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.MAX_TRANSACTION_BYTES_PER_EVENT_DEFAULT_VALUE; @@ -60,16 +53,13 @@ import static com.swirlds.platform.SettingConstants.PROMETHEUS_ENDPOINT_ENABLED_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.PROMETHEUS_ENDPOINT_MAX_BACKLOG_ALLOWED_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.PROMETHEUS_ENDPOINT_PORT_NUMBER_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.RANDOM_EVENT_PROBABILITY_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.REMOVED_SETTINGS; -import static com.swirlds.platform.SettingConstants.RESCUE_CHILDLESS_INVERSE_PROBABILITY_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SAVED_STRING; import static com.swirlds.platform.SettingConstants.SETTINGS_TXT; import static com.swirlds.platform.SettingConstants.SHOW_INTERNAL_STATS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SLEEP_CALLER_SKIPS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SLEEP_HEARTBEAT_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SOCKET_IP_TOS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.STALE_EVENT_PREVENTION_THRESHOLD_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.STATS_BUFFER_SIZE_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.STATS_RECENT_SECONDS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.STATS_SKIP_SECONDS_DEFAULT_VALUE; @@ -90,7 +80,6 @@ import com.swirlds.common.internal.SettingsCommon; import com.swirlds.common.settings.SettingsException; -import com.swirlds.common.threading.framework.config.QueueThreadConfiguration; import com.swirlds.common.utility.CommonUtils; import com.swirlds.common.utility.PlatformVersion; import com.swirlds.config.api.Configuration; @@ -172,8 +161,6 @@ public class Settings { private boolean showInternalStats = SHOW_INTERNAL_STATS_DEFAULT_VALUE; /** show expand statistics values, inlcude mean, min, max, stdDev */ private boolean verboseStatistics = VERBOSE_STATISTICS_DEFAULT_VALUE; - /** max events that can be put in the forCons queue (q2) in ConsensusRoundHandler (0 for infinity) */ - private int maxEventQueueForCons = MAX_EVENT_QUEUE_FOR_CONS_DEFAULT_VALUE; /** * Stop accepting new non-system transactions into the 4 transaction queues if any of them have more than this * many. @@ -301,50 +288,8 @@ public class Settings { /** Backlog of the Prometheus endpoint (= number of incoming TCP connections the system will queue) **/ private int prometheusEndpointMaxBacklogAllowed = PROMETHEUS_ENDPOINT_MAX_BACKLOG_ALLOWED_DEFAULT_VALUE; - /** The value for the event intake queue at which the node should stop syncing */ - private int eventIntakeQueueThrottleSize = EVENT_INTAKE_QUEUE_THROTTLE_SIZE_DEFAULT_VALUE; - /** - * The size of the event intake queue, {@link QueueThreadConfiguration#UNLIMITED_CAPACITY} for unbounded. It is best - * that this queue is large, but not unbounded. Filling it up can cause sync threads to drop TCP connections, but - * leaving it unbounded can cause out of memory errors, even with the {@link #eventIntakeQueueThrottleSize}, because - * syncs that started before the throttle engages can grow the queue to very large sizes on larger networks. - */ - private int eventIntakeQueueSize = EVENT_INTAKE_QUEUE_SIZE_DEFAULT_VALUE; - /** - * The probability that after a sync, a node will create an event with a random other parent. The probability is is - * 1 in X, where X is the value of randomEventProbability. A value of 0 means that a node will not create any random - * events. - *

- * This feature is used to get consensus on events with no descendants which are created by nodes who go offline. - */ - private int randomEventProbability = RANDOM_EVENT_PROBABILITY_DEFAULT_VALUE; - /** - * A setting used to prevent a node from generating events that will probably become stale. This value is multiplied - * by the address book size and compared to the number of events received in a sync. If ( numEventsReceived > - * staleEventPreventionThreshold * addressBookSize ) then we will not create an event for that sync, to reduce the - * probability of creating an event that will become stale. - */ - private int staleEventPreventionThreshold = STALE_EVENT_PREVENTION_THRESHOLD_DEFAULT_VALUE; - /** - * The probability that we will create a child for a childless event. The probability is 1 / X, where X is the value - * of rescueChildlessInverseProbability. A value of 0 means that a node will not create any children for childless - * events. - */ - private int rescueChildlessInverseProbability = RESCUE_CHILDLESS_INVERSE_PROBABILITY_DEFAULT_VALUE; - - /////////////////////////////////////////// - // Setting for stream event - /** enable stream event to server */ - private boolean enableEventStreaming = ENABLE_EVENT_STREAMING_DEFAULT_VALUE; - /** capacity of the blockingQueue from which we take events and write to EventStream files */ - private int eventStreamQueueCapacity = EVENT_STREAM_QUEUE_CAPACITY_DEFAULT_VALUE; - /** period of generating eventStream file */ - private long eventsLogPeriod = EVENTS_LOG_PERIOD_DEFAULT_VALUE; - /////////////////////////////////////////// // Setting for thread dump - /** eventStream files will be generated in this directory */ - private String eventsLogDir = EVENTS_LOG_DIR_DEFAULT_VALUE; /** period of generating thread dump file in the unit of milliseconds */ private long threadDumpPeriodMs = THREAD_DUMP_PERIOD_MS_DEFAULT_VALUE; @@ -717,10 +662,6 @@ public boolean isVerboseStatistics() { return verboseStatistics; } - public int getMaxEventQueueForCons() { - return maxEventQueueForCons; - } - public int getThrottleTransactionQueueSize() { return throttleTransactionQueueSize; } @@ -889,50 +830,6 @@ public int getPrometheusEndpointMaxBacklogAllowed() { return prometheusEndpointMaxBacklogAllowed; } - public int getEventIntakeQueueThrottleSize() { - return eventIntakeQueueThrottleSize; - } - - public void setEventIntakeQueueThrottleSize(final int eventIntakeQueueThrottleSize) { - this.eventIntakeQueueThrottleSize = eventIntakeQueueThrottleSize; - } - - public int getEventIntakeQueueSize() { - return eventIntakeQueueSize; - } - - public int getRandomEventProbability() { - return randomEventProbability; - } - - public int getStaleEventPreventionThreshold() { - return staleEventPreventionThreshold; - } - - public void setStaleEventPreventionThreshold(final int staleEventPreventionThreshold) { - this.staleEventPreventionThreshold = staleEventPreventionThreshold; - } - - public int getRescueChildlessInverseProbability() { - return rescueChildlessInverseProbability; - } - - public boolean isEnableEventStreaming() { - return enableEventStreaming; - } - - public int getEventStreamQueueCapacity() { - return eventStreamQueueCapacity; - } - - public long getEventsLogPeriod() { - return eventsLogPeriod; - } - - public String getEventsLogDir() { - return eventsLogDir; - } - public long getThreadDumpPeriodMs() { return threadDumpPeriodMs; } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingsProvider.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingsProvider.java index dfea89af5612..967f27607b11 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingsProvider.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingsProvider.java @@ -24,25 +24,6 @@ */ @Deprecated(forRemoval = true) public interface SettingsProvider { - /** - * Returns the inverse of a probability that we will create a child for a childless event - */ - int getRescueChildlessInverseProbability(); - - /** - * The probability that after a sync, a node will create an event with a random other parent. The probability is - * is 1 in X, where X is the value of randomEventProbability. A value of 0 means that a node will not create any - * random events. - * - * This feature is used to get consensus on events with no descendants which are created by nodes who go offline. - */ - int getRandomEventProbability(); - - /** - * @see Settings#maxEventQueueForCons - */ - int getMaxEventQueueForCons(); - /** * @see Settings#transactionMaxBytes */ diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/StaticSettingsProvider.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/StaticSettingsProvider.java index 7e7fbeb3fb11..0134d4aba6bd 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/StaticSettingsProvider.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/StaticSettingsProvider.java @@ -31,27 +31,6 @@ public static StaticSettingsProvider getSingleton() { private StaticSettingsProvider() {} - /** - * {@inheritDoc} - */ - @Override - public int getRescueChildlessInverseProbability() { - return settings.getRescueChildlessInverseProbability(); - } - - /** - * {@inheritDoc} - */ - @Override - public int getRandomEventProbability() { - return settings.getRandomEventProbability(); - } - - @Override - public int getMaxEventQueueForCons() { - return settings.getMaxEventQueueForCons(); - } - @Override public int getTransactionMaxBytes() { return settings.getTransactionMaxBytes(); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java index 103f61baff9f..c344c81cc38d 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java @@ -30,6 +30,7 @@ import com.swirlds.base.time.Time; import com.swirlds.common.config.BasicConfig; import com.swirlds.common.config.ConsensusConfig; +import com.swirlds.common.config.EventConfig; import com.swirlds.common.config.StateConfig; import com.swirlds.common.context.PlatformContext; import com.swirlds.common.crypto.Hash; @@ -408,7 +409,7 @@ public class SwirldsPlatform implements Platform, Startable { // FUTURE WORK remove this when there are no more ShutdownRequestedTriggers being dispatched components.add(new Shutdown()); - final Settings settings = Settings.getInstance(); + final EventConfig eventConfig = platformContext.getConfiguration().getConfigData(EventConfig.class); final Address address = getSelfAddress(); final String eventStreamManagerName; @@ -425,10 +426,10 @@ public class SwirldsPlatform implements Platform, Startable { getSelfId(), this, eventStreamManagerName, - settings.isEnableEventStreaming(), - settings.getEventsLogDir(), - settings.getEventsLogPeriod(), - settings.getEventStreamQueueCapacity(), + eventConfig.enableEventStreaming(), + eventConfig.eventsLogDir(), + eventConfig.eventsLogPeriod(), + eventConfig.eventStreamQueueCapacity(), this::isLastEventBeforeRestart); if (loadedSignedState.isNotNull()) { @@ -499,7 +500,6 @@ public class SwirldsPlatform implements Platform, Startable { platformContext, threadManager, selfId, - PlatformConstructor.settingsProvider(), swirldStateManager, new ConsensusHandlingMetrics(metrics, time), eventStreamManager, @@ -558,6 +558,7 @@ public class SwirldsPlatform implements Platform, Startable { shadowGraph); final EventCreator eventCreator = buildEventCreator(eventIntake); + final Settings settings = Settings.getInstance(); final List validators = new ArrayList<>(); // it is very important to discard ancient events, otherwise the deduplication will not work, since it @@ -590,7 +591,7 @@ public class SwirldsPlatform implements Platform, Startable { // which the handler lambda sidesteps (since the lambda is not invoked // until after all things have been constructed). .setHandler(e -> getGossip().getEventIntakeLambda().accept(e)) - .setCapacity(settings.getEventIntakeQueueSize()) + .setCapacity(eventConfig.eventIntakeQueueSize()) .setLogAfterPauseDuration(platformContext .getConfiguration() .getConfigData(ThreadConfig.class) diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventTaskCreator.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventTaskCreator.java index acfbd4fb9b9e..71fd73a06f7c 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventTaskCreator.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventTaskCreator.java @@ -20,11 +20,11 @@ import static com.swirlds.logging.LogMarker.STALE_EVENTS; import static com.swirlds.logging.LogMarker.SYNC; +import com.swirlds.common.config.EventConfig; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.address.Address; import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.threading.framework.QueueThread; -import com.swirlds.platform.SettingsProvider; import com.swirlds.platform.event.CreateEventTask; import com.swirlds.platform.event.EventIntakeTask; import com.swirlds.platform.event.GossipEvent; @@ -64,8 +64,8 @@ public class EventTaskCreator { /** A {@link QueueThread} that handles event intake */ private final BlockingQueue eventIntakeQueue; - /** provides access to settings */ - private final SettingsProvider settings; + /** provides access to configuration */ + private final EventConfig config; /** supplies the Random object */ private final Supplier random; @@ -87,8 +87,8 @@ public class EventTaskCreator { * tracks metrics * @param eventIntakeQueue * the queue add tasks to - * @param settings - * provides access to settings + * @param config + * provides access to configuration * @param syncManager * decides if an event should be created * @param random @@ -100,7 +100,7 @@ public EventTaskCreator( final NodeId selfId, final EventIntakeMetrics eventIntakeMetrics, final BlockingQueue eventIntakeQueue, - final SettingsProvider settings, + final EventConfig config, final SyncManager syncManager, final Supplier random) { this.eventMapper = eventMapper; @@ -109,7 +109,7 @@ public EventTaskCreator( this.addressBook = addressBook.copy(); this.addressBook.seal(); this.eventIntakeQueue = eventIntakeQueue; - this.settings = settings; + this.config = config; this.syncManager = syncManager; this.random = random; } @@ -142,7 +142,7 @@ public void syncDone(final SyncResult result) { private void randomEvent() { final Random r = random.get(); // maybe create an event with a random other parent - if (settings.getRandomEventProbability() > 0 && r.nextInt(settings.getRandomEventProbability()) == 0) { + if (config.randomEventProbability() > 0 && r.nextInt(config.randomEventProbability()) == 0) { int randomOtherIdIndex = r.nextInt(addressBook.getSize()); final NodeId randomOtherId = addressBook.getNodeId(randomOtherIdIndex); // we don't want to create an event with selfId==otherId @@ -160,7 +160,7 @@ private void randomEvent() { * This functionality may be deprecated in future. */ public void rescueChildlessEvents() { - if (settings.getRescueChildlessInverseProbability() <= 0) { + if (config.rescueChildlessInverseProbability() <= 0) { return; } @@ -183,7 +183,7 @@ public void rescueChildlessEvents() { // Decide, with probability = 1 / Settings.rescueChildlessInverseProbability, to create an other-child // for a childless event. - if (random.get().nextInt(settings.getRescueChildlessInverseProbability()) == 0) { + if (random.get().nextInt(config.rescueChildlessInverseProbability()) == 0) { logger.info(STALE_EVENTS.getMarker(), "Creating child for childless event {}", event::toShortString); createEvent(event.getCreatorId()); eventIntakeMetrics.rescuedEvent(); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java index 5aee62d889bf..b362f57c1025 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java @@ -39,7 +39,17 @@ private ConfigMappings() {} new ConfigMapping("state.signedStateFreq", "signedStateFreq"), new ConfigMapping("state.requireStateLoad", "requireStateLoad"), new ConfigMapping("state.emergencyStateFileName", "emergencyStateFileName"), - new ConfigMapping("state.checkSignedStateFromDisk", "checkSignedStateFromDisk")); + new ConfigMapping("state.checkSignedStateFromDisk", "checkSignedStateFromDisk"), + new ConfigMapping("event.maxEventQueueForCons", "maxEventQueueForCons"), + new ConfigMapping("event.eventIntakeQueueThrottleSize", "eventIntakeQueueThrottleSize"), + new ConfigMapping("event.eventIntakeQueueSize", "eventIntakeQueueSize"), + new ConfigMapping("event.randomEventProbability", "randomEventProbability"), + new ConfigMapping("event.staleEventPreventionThreshold", "staleEventPreventionThreshold"), + new ConfigMapping("event.rescueChildlessInverseProbability", "rescueChildlessInverseProbability"), + new ConfigMapping("event.eventStreamQueueCapacity", "eventStreamQueueCapacity"), + new ConfigMapping("event.eventsLogPeriod", "eventsLogPeriod"), + new ConfigMapping("event.eventsLogDir", "eventsLogDir"), + new ConfigMapping("event.enableEventStreaming", "enableEventStreaming")); /** * Add all known aliases to the provided config source diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/eventhandling/ConsensusRoundHandler.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/eventhandling/ConsensusRoundHandler.java index 6a7f40a03e02..7d2acd2c82b0 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/eventhandling/ConsensusRoundHandler.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/eventhandling/ConsensusRoundHandler.java @@ -25,6 +25,7 @@ import com.swirlds.base.function.CheckedConsumer; import com.swirlds.base.state.Startable; import com.swirlds.common.config.ConsensusConfig; +import com.swirlds.common.config.EventConfig; import com.swirlds.common.config.StateConfig; import com.swirlds.common.config.singleton.ConfigurationHolder; import com.swirlds.common.context.PlatformContext; @@ -43,7 +44,6 @@ import com.swirlds.common.threading.framework.config.QueueThreadMetricsConfiguration; import com.swirlds.common.threading.manager.ThreadManager; import com.swirlds.common.utility.Clearable; -import com.swirlds.platform.SettingsProvider; import com.swirlds.platform.components.common.output.RoundAppliedToStateConsumer; import com.swirlds.platform.config.ThreadConfig; import com.swirlds.platform.internal.ConsensusRound; @@ -85,7 +85,6 @@ public class ConsensusRoundHandler implements ConsensusRoundObserver, Clearable, /** Stores consensus events and round generations that need to be saved in state */ private final SignedStateEventsAndGenerations eventsAndGenerations; - private final SettingsProvider settings; private final ConsensusHandlingMetrics consensusHandlingMetrics; /** The queue thread that stores consensus rounds and feeds them to this class for handling. */ @@ -153,7 +152,6 @@ public class ConsensusRoundHandler implements ConsensusRoundObserver, Clearable, * @param platformContext contains various platform utilities * @param threadManager responsible for creating and managing threads * @param selfId the id of this node - * @param settings a provider of static settings * @param swirldStateManager the swirld state manager to send events to * @param consensusHandlingMetrics statistics updated by {@link ConsensusRoundHandler} * @param eventStreamManager the event stream manager to send consensus events to @@ -167,7 +165,6 @@ public ConsensusRoundHandler( @NonNull final PlatformContext platformContext, @NonNull final ThreadManager threadManager, @NonNull final NodeId selfId, - @NonNull final SettingsProvider settings, @NonNull final SwirldStateManager swirldStateManager, @NonNull final ConsensusHandlingMetrics consensusHandlingMetrics, @NonNull final EventStreamManager eventStreamManager, @@ -180,7 +177,6 @@ public ConsensusRoundHandler( this.platformContext = Objects.requireNonNull(platformContext); this.roundAppliedToStateConsumer = roundAppliedToStateConsumer; Objects.requireNonNull(selfId, "selfId must not be null"); - this.settings = settings; this.swirldStateManager = swirldStateManager; this.consensusHandlingMetrics = consensusHandlingMetrics; this.eventStreamManager = eventStreamManager; @@ -188,11 +184,12 @@ public ConsensusRoundHandler( this.softwareVersion = softwareVersion; this.enterFreezePeriod = enterFreezePeriod; + final EventConfig eventConfig = platformContext.getConfiguration().getConfigData(EventConfig.class); final ConsensusConfig consensusConfig = platformContext.getConfiguration().getConfigData(ConsensusConfig.class); eventsAndGenerations = new SignedStateEventsAndGenerations(consensusConfig); - final ConsensusQueue queue = new ConsensusQueue(consensusHandlingMetrics, settings.getMaxEventQueueForCons()); + final ConsensusQueue queue = new ConsensusQueue(consensusHandlingMetrics, eventConfig.maxEventQueueForCons()); queueThread = new QueueThreadConfiguration(threadManager) .setNodeId(selfId) .setHandler(this::applyConsensusRoundToState) diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/AbstractGossip.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/AbstractGossip.java index 321a69e315e3..37b219931053 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/AbstractGossip.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/AbstractGossip.java @@ -21,6 +21,7 @@ import com.swirlds.base.state.LifecyclePhase; import com.swirlds.base.state.Startable; +import com.swirlds.common.config.EventConfig; import com.swirlds.common.context.PlatformContext; import com.swirlds.common.crypto.config.CryptoConfig; import com.swirlds.common.merkle.synchronization.config.ReconnectConfig; @@ -202,7 +203,8 @@ protected AbstractGossip( List.of(swirldStateManager.getTransactionPool(), startUpEventFrozenManager, freezeManager)), criticalQuorum, addressBook, - fallenBehindManager); + fallenBehindManager, + platformContext.getConfiguration().getConfigData(EventConfig.class)); eventTaskCreator = new EventTaskCreator( eventMapper, @@ -210,7 +212,7 @@ protected AbstractGossip( selfId, eventIntakeMetrics, intakeQueue, - StaticSettingsProvider.getSingleton(), + platformContext.getConfiguration().getConfigData(EventConfig.class), syncManager, ThreadLocalRandom::current); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncGossip.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncGossip.java index 126bfcf03ec6..3b6f3b7ea667 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncGossip.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncGossip.java @@ -22,6 +22,7 @@ import com.swirlds.base.state.LifecyclePhase; import com.swirlds.base.time.Time; import com.swirlds.common.config.BasicConfig; +import com.swirlds.common.config.EventConfig; import com.swirlds.common.context.PlatformContext; import com.swirlds.common.merkle.synchronization.config.ReconnectConfig; import com.swirlds.common.notification.NotificationEngine; @@ -171,6 +172,7 @@ public SyncGossip( loadReconnectState, clearAllPipelinesForReconnect); + final EventConfig eventConfig = platformContext.getConfiguration().getConfigData(EventConfig.class); this.eventIntakeLambda = Objects.requireNonNull(eventIntakeLambda); syncConfig = platformContext.getConfiguration().getConfigData(SyncConfig.class); @@ -213,7 +215,7 @@ public SyncGossip( } final PeerAgnosticSyncChecks peerAgnosticSyncChecks = new PeerAgnosticSyncChecks(List.of( - () -> !gossipHalted.get(), () -> intakeQueue.size() < settings.getEventIntakeQueueThrottleSize())); + () -> !gossipHalted.get(), () -> intakeQueue.size() < eventConfig.eventIntakeQueueThrottleSize())); final ReconnectConfig reconnectConfig = platformContext.getConfiguration().getConfigData(ReconnectConfig.class); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncManagerImpl.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncManagerImpl.java index ca489c0fb328..23ebeea73630 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncManagerImpl.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/sync/SyncManagerImpl.java @@ -20,12 +20,12 @@ import static com.swirlds.logging.LogMarker.FREEZE; import static com.swirlds.logging.LogMarker.SYNC; +import com.swirlds.common.config.EventConfig; import com.swirlds.common.metrics.FunctionGauge; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.system.EventCreationRuleResponse; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.address.AddressBook; -import com.swirlds.platform.Settings; import com.swirlds.platform.components.CriticalQuorum; import com.swirlds.platform.components.EventCreationRules; import com.swirlds.platform.event.EventIntakeTask; @@ -55,7 +55,7 @@ public class SyncManagerImpl implements SyncManager, FallenBehindManager { */ private static final int MAXIMUM_NEIGHBORS_TO_QUERY = 10; - private final Settings settings = Settings.getInstance(); + private final EventConfig eventConfig; /** the event intake queue */ private final BlockingQueue intakeQueue; @@ -94,7 +94,8 @@ public SyncManagerImpl( @NonNull final EventCreationRules eventCreationRules, @NonNull final CriticalQuorum criticalQuorum, @NonNull final AddressBook addressBook, - @NonNull final FallenBehindManager fallenBehindManager) { + @NonNull final FallenBehindManager fallenBehindManager, + @NonNull final EventConfig eventConfig) { this.intakeQueue = Objects.requireNonNull(intakeQueue); this.connectionGraph = Objects.requireNonNull(connectionGraph); @@ -105,6 +106,7 @@ public SyncManagerImpl( this.addressBook = Objects.requireNonNull(addressBook); this.fallenBehindManager = Objects.requireNonNull(fallenBehindManager); + this.eventConfig = Objects.requireNonNull(eventConfig); metrics.getOrCreate( new FunctionGauge.Config<>(INTERNAL_CATEGORY, "hasFallenBehind", Object.class, this::hasFallenBehind) @@ -129,7 +131,7 @@ public boolean shouldAcceptSync() { // we shouldn't sync if the event intake queue is too big final int intakeQueueSize = intakeQueue.size(); - if (intakeQueueSize > settings.getEventIntakeQueueThrottleSize()) { + if (intakeQueueSize > eventConfig.eventIntakeQueueThrottleSize()) { logger.debug( SYNC.getMarker(), "don't accept sync because event intake queue is too big, size: {}", @@ -152,7 +154,7 @@ public boolean shouldInitiateSync() { } // we shouldn't sync if the event intake queue is too big - return intakeQueue.size() <= settings.getEventIntakeQueueThrottleSize(); + return intakeQueue.size() <= eventConfig.eventIntakeQueueThrottleSize(); } /** @@ -243,8 +245,9 @@ public boolean shouldCreateEvent(final SyncResult info) { } // check 4: staleEventPrevention - if (settings.getStaleEventPreventionThreshold() > 0 - && info.getEventsRead() > settings.getStaleEventPreventionThreshold() * addressBook.getSize()) { + final int staleEventPreventionThreshold = eventConfig.staleEventPreventionThreshold(); + if (staleEventPreventionThreshold > 0 + && info.getEventsRead() > staleEventPreventionThreshold * addressBook.getSize()) { // if we read too many events during this sync, we skip creating an event to reduce the probability of // having a stale event return false; diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java index 1fd9babb92c3..881f542a5316 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java @@ -28,12 +28,6 @@ import static com.swirlds.platform.SettingConstants.DEADLOCK_CHECK_PERIOD_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.DELAY_SHUFFLE_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.DO_UPNP_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.ENABLE_EVENT_STREAMING_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.EVENTS_LOG_DIR_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.EVENTS_LOG_PERIOD_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.EVENT_INTAKE_QUEUE_SIZE_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.EVENT_INTAKE_QUEUE_THROTTLE_SIZE_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.EVENT_STREAM_QUEUE_CAPACITY_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.FREEZE_SECONDS_AFTER_STARTUP_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.GOSSIP_WITH_DIFFERENT_VERSIONS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.HALF_LIFE_DEFAULT_VALUE; @@ -44,21 +38,17 @@ import static com.swirlds.platform.SettingConstants.LOG4J2_CONFIG_FILE; import static com.swirlds.platform.SettingConstants.LOG_STACK_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.MAX_ADDRESS_SIZE_ALLOWED_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.MAX_EVENT_QUEUE_FOR_CONS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.MAX_INCOMING_SYNCS_INC_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.MAX_OUTGOING_SYNCS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.MAX_TRANSACTION_BYTES_PER_EVENT_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.MAX_TRANSACTION_COUNT_PER_EVENT_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.NUM_CONNECTIONS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.NUM_CRYPTO_THREADS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.RANDOM_EVENT_PROBABILITY_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.RESCUE_CHILDLESS_INVERSE_PROBABILITY_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SETTINGS_TXT; import static com.swirlds.platform.SettingConstants.SHOW_INTERNAL_STATS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SLEEP_CALLER_SKIPS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SLEEP_HEARTBEAT_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SOCKET_IP_TOS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.STALE_EVENT_PREVENTION_THRESHOLD_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.STATS_SKIP_SECONDS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.TCP_NO_DELAY_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.THREAD_DUMP_LOG_DIR_DEFAULT_VALUE; @@ -234,7 +224,6 @@ public void checkGetDefaultSettings() { Assertions.assertEquals(NUM_CRYPTO_THREADS_DEFAULT_VALUE, settings.getNumCryptoThreads()); Assertions.assertEquals(SHOW_INTERNAL_STATS_DEFAULT_VALUE, settings.isShowInternalStats()); Assertions.assertEquals(VERBOSE_STATISTICS_DEFAULT_VALUE, settings.isVerboseStatistics()); - Assertions.assertEquals(MAX_EVENT_QUEUE_FOR_CONS_DEFAULT_VALUE, settings.getMaxEventQueueForCons()); Assertions.assertEquals( THROTTLE_TRANSACTION_QUEUE_SIZE_DEFAULT_VALUE, settings.getThrottleTransactionQueueSize()); Assertions.assertEquals(NUM_CONNECTIONS_DEFAULT_VALUE, settings.getNumConnections()); @@ -274,18 +263,6 @@ public void checkGetDefaultSettings() { Assertions.assertEquals(CSV_FILE_NAME_DEFAULT_VALUE, settings.getCsvFileName()); Assertions.assertEquals(CSV_WRITE_FREQUENCY_DEFAULT_VALUE, settings.getCsvWriteFrequency()); Assertions.assertEquals(CSV_APPEND_DEFAULT_VALUE, settings.isCsvAppend()); - Assertions.assertEquals( - EVENT_INTAKE_QUEUE_THROTTLE_SIZE_DEFAULT_VALUE, settings.getEventIntakeQueueThrottleSize()); - Assertions.assertEquals(EVENT_INTAKE_QUEUE_SIZE_DEFAULT_VALUE, settings.getEventIntakeQueueSize()); - Assertions.assertEquals(RANDOM_EVENT_PROBABILITY_DEFAULT_VALUE, settings.getRandomEventProbability()); - Assertions.assertEquals( - STALE_EVENT_PREVENTION_THRESHOLD_DEFAULT_VALUE, settings.getStaleEventPreventionThreshold()); - Assertions.assertEquals( - RESCUE_CHILDLESS_INVERSE_PROBABILITY_DEFAULT_VALUE, settings.getRescueChildlessInverseProbability()); - Assertions.assertEquals(ENABLE_EVENT_STREAMING_DEFAULT_VALUE, settings.isEnableEventStreaming()); - Assertions.assertEquals(EVENT_STREAM_QUEUE_CAPACITY_DEFAULT_VALUE, settings.getEventStreamQueueCapacity()); - Assertions.assertEquals(EVENTS_LOG_PERIOD_DEFAULT_VALUE, settings.getEventsLogPeriod()); - Assertions.assertEquals(EVENTS_LOG_DIR_DEFAULT_VALUE, settings.getEventsLogDir()); Assertions.assertEquals(THREAD_DUMP_PERIOD_MS_DEFAULT_VALUE, settings.getThreadDumpPeriodMs()); Assertions.assertEquals(THREAD_DUMP_LOG_DIR_DEFAULT_VALUE, settings.getThreadDumpLogDir()); Assertions.assertEquals(JVM_PAUSE_DETECTOR_SLEEP_MS_DEFAULT_VALUE, settings.getJVMPauseDetectorSleepMs()); @@ -330,7 +307,6 @@ public void checkGetLoadedSettings() throws IOException { Assertions.assertEquals(16, settings.getNumCryptoThreads()); Assertions.assertTrue(settings.isShowInternalStats()); Assertions.assertTrue(settings.isVerboseStatistics()); - Assertions.assertEquals(600, settings.getMaxEventQueueForCons()); Assertions.assertEquals(200000, settings.getThrottleTransactionQueueSize()); Assertions.assertEquals(50, settings.getNumConnections()); Assertions.assertEquals(3, settings.getMaxOutgoingSyncs()); @@ -363,15 +339,6 @@ public void checkGetLoadedSettings() throws IOException { Assertions.assertEquals("csvFile", settings.getCsvFileName()); Assertions.assertEquals(4000, settings.getCsvWriteFrequency()); Assertions.assertTrue(settings.isCsvAppend()); - Assertions.assertEquals(2000, settings.getEventIntakeQueueThrottleSize()); - Assertions.assertEquals(15000, settings.getEventIntakeQueueSize()); - Assertions.assertEquals(1, settings.getRandomEventProbability()); - Assertions.assertEquals(10, settings.getStaleEventPreventionThreshold()); - Assertions.assertEquals(15, settings.getRescueChildlessInverseProbability()); - Assertions.assertTrue(settings.isEnableEventStreaming()); - Assertions.assertEquals(1000, settings.getEventStreamQueueCapacity()); - Assertions.assertEquals(70, settings.getEventsLogPeriod()); - Assertions.assertEquals("badEventsStream", settings.getEventsLogDir()); Assertions.assertEquals(1, settings.getThreadDumpPeriodMs()); Assertions.assertEquals("badData/badThreadDump", settings.getThreadDumpLogDir()); Assertions.assertEquals(2000, settings.getJVMPauseDetectorSleepMs()); diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SyncManagerTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SyncManagerTest.java index fe0e22cfc7c9..444f7b1c4c6c 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SyncManagerTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SyncManagerTest.java @@ -31,12 +31,14 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; +import com.swirlds.common.config.EventConfig; import com.swirlds.common.merkle.synchronization.config.ReconnectConfig; import com.swirlds.common.metrics.noop.NoOpMetrics; import com.swirlds.common.system.EventCreationRuleResponse; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.system.events.BaseEvent; +import com.swirlds.config.api.Configuration; import com.swirlds.platform.components.CriticalQuorum; import com.swirlds.platform.components.EventCreationRules; import com.swirlds.platform.eventhandling.EventTransactionPool; @@ -73,6 +75,7 @@ private static class SyncManagerTestData { public SyncManagerImpl syncManager; public CriticalQuorum criticalQuorum; public DummyEventQueue eventQueue; + public Configuration configuration; public SyncManagerTestData() { this(spy(SwirldStateManager.class)); @@ -113,10 +116,13 @@ public EventCreationRuleResponse shouldCreateEvent(BaseEvent selfParent, BaseEve return null; } }; - final ReconnectConfig config = new TestConfigBuilder() + configuration = new TestConfigBuilder() .withValue("reconnect.fallenBehindThreshold", "0.25") - .getOrCreateConfig() - .getConfigData(ReconnectConfig.class); + .withValue("event.eventIntakeQueueThrottleSize", "100") + .withValue("event.staleEventPreventionThreshold", "10") + .getOrCreateConfig(); + final ReconnectConfig reconnectConfig = configuration.getConfigData(ReconnectConfig.class); + final EventConfig eventConfig = configuration.getConfigData(EventConfig.class); eventQueue = new DummyEventQueue(hashgraph); syncManager = new SyncManagerImpl( new NoOpMetrics(), @@ -126,15 +132,15 @@ public EventCreationRuleResponse shouldCreateEvent(BaseEvent selfParent, BaseEve new EventCreationRules(List.of(startUpEventFrozenManager, freezeManager)), criticalQuorum, hashgraph.getAddressBook(), - new FallenBehindManagerImpl(addressBook, selfId, connectionGraph, () -> {}, () -> {}, config)); + new FallenBehindManagerImpl( + addressBook, selfId, connectionGraph, () -> {}, () -> {}, reconnectConfig), + eventConfig); } } protected void resetTestSettings() { - Settings.getInstance().setEventIntakeQueueThrottleSize(100); Settings.getInstance().setMaxIncomingSyncsInc(10); Settings.getInstance().setMaxOutgoingSyncs(10); - Settings.getInstance().setStaleEventPreventionThreshold(10); } /** @@ -393,6 +399,7 @@ void shouldCreateEventLargeRead() { final AddressBook addressBook = test.hashgraph.getAddressBook(); final NodeId ID = addressBook.getNodeId(0); final NodeId OTHER_ID = addressBook.getNodeId(1); + final EventConfig config = test.configuration.getConfigData(EventConfig.class); // If events read is too large then do not create an event test.hashgraph.isInCriticalQuorum.put(ID, true); @@ -400,7 +407,7 @@ void shouldCreateEventLargeRead() { test.syncManager.shouldCreateEvent( OTHER_ID, false, - Settings.getInstance().getStaleEventPreventionThreshold() + config.staleEventPreventionThreshold() * test.hashgraph.getAddressBook().getSize() + 1, 0), diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/TestSettings.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/TestSettings.java index 916aec9b5d4e..a3600c6ff982 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/TestSettings.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/TestSettings.java @@ -20,30 +20,11 @@ import java.util.concurrent.atomic.AtomicLong; public class TestSettings implements SettingsProvider { - public final AtomicInteger rescueChildlessInverseProbability = new AtomicInteger(0); - public final AtomicInteger randomEventProbability = new AtomicInteger(0); - public final AtomicInteger maxEventQueueForCons = new AtomicInteger(0); public final AtomicInteger transactionMaxBytes = new AtomicInteger(0); - public final AtomicInteger signedStateFreq = new AtomicInteger(0); public final AtomicLong delayShuffle = new AtomicLong(0); public final AtomicInteger ipTos = new AtomicInteger(-1); public final AtomicInteger throttleTransactionQueueSize = new AtomicInteger(100_000); - @Override - public int getRescueChildlessInverseProbability() { - return rescueChildlessInverseProbability.get(); - } - - @Override - public int getRandomEventProbability() { - return randomEventProbability.get(); - } - - @Override - public int getMaxEventQueueForCons() { - return maxEventQueueForCons.get(); - } - @Override public int getTransactionMaxBytes() { return transactionMaxBytes.get(); diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/eventhandling/ConsensusRoundHandlerTests.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/eventhandling/ConsensusRoundHandlerTests.java index e6e16449dce2..29920644f04a 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/eventhandling/ConsensusRoundHandlerTests.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/eventhandling/ConsensusRoundHandlerTests.java @@ -29,6 +29,7 @@ import com.swirlds.common.context.PlatformContext; import com.swirlds.common.stream.EventStreamManager; +import com.swirlds.common.system.BasicSoftwareVersion; import com.swirlds.common.system.SwirldState; import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.test.RandomAddressBookGenerator; @@ -36,6 +37,7 @@ import com.swirlds.common.threading.framework.QueueThread; import com.swirlds.common.threading.framework.Stoppable; import com.swirlds.common.threading.utility.ThrowingRunnable; +import com.swirlds.config.api.Configuration; import com.swirlds.platform.internal.ConsensusRound; import com.swirlds.platform.internal.EventImpl; import com.swirlds.platform.metrics.SwirldStateMetrics; @@ -44,6 +46,7 @@ import com.swirlds.platform.state.SwirldStateManagerImpl; import com.swirlds.platform.state.signed.ReservedSignedState; import com.swirlds.test.framework.TestQualifierTags; +import com.swirlds.test.framework.config.TestConfigBuilder; import com.swirlds.test.framework.context.TestPlatformContextBuilder; import java.time.Duration; import java.time.temporal.ChronoUnit; @@ -116,7 +119,6 @@ void queueNotDrainedOnReconnect() { platformContext, getStaticThreadManager(), selfId, - settingsProvider, swirldStateManager, consensusHandlingMetrics, eventStreamManager, @@ -124,7 +126,7 @@ void queueNotDrainedOnReconnect() { e -> {}, () -> {}, (round) -> {}, - null); + new BasicSoftwareVersion(1)); final int numRounds = 500; final ConsensusRound round = mock(ConsensusRound.class); @@ -186,12 +188,17 @@ private void initConsensusHandler(final SwirldState swirldState) { final State state = new State(); state.setSwirldState(swirldState); - when(settingsProvider.getMaxEventQueueForCons()).thenReturn(500); - final AddressBook addressBook = new RandomAddressBookGenerator().build(); + final Configuration configuration = new TestConfigBuilder() + .withValue("event.maxEventQueueForCons", 500) + .getOrCreateConfig(); + final PlatformContext platformContext = TestPlatformContextBuilder.create() + .withConfiguration(configuration) + .build(); + final SwirldStateManager swirldStateManager = new SwirldStateManagerImpl( - TestPlatformContextBuilder.create().build(), + platformContext, addressBook, selfId, preConsensusSystemTransactionManager, @@ -201,14 +208,10 @@ private void initConsensusHandler(final SwirldState swirldState) { () -> false, state); - final PlatformContext platformContext = - TestPlatformContextBuilder.create().build(); - consensusRoundHandler = new ConsensusRoundHandler( platformContext, getStaticThreadManager(), selfId, - settingsProvider, swirldStateManager, consensusHandlingMetrics, eventStreamManager, @@ -216,7 +219,7 @@ private void initConsensusHandler(final SwirldState swirldState) { e -> {}, () -> {}, (round) -> {}, - null); + new BasicSoftwareVersion(1)); consensusRoundHandler.start(); } } diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/TestSettings.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/TestSettings.java index ac4a6afb473f..7e3ddbe9ef09 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/TestSettings.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/TestSettings.java @@ -17,39 +17,14 @@ package com.swirlds.platform.test; import com.swirlds.platform.SettingsProvider; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; public class TestSettings implements SettingsProvider { - public final AtomicInteger rescueChildlessInverseProbability = new AtomicInteger(0); - public final AtomicInteger randomEventProbability = new AtomicInteger(0); - public final AtomicReference throttle7Threshold = new AtomicReference<>(0.0); - public final AtomicReference throttle7Extra = new AtomicReference<>(0.0); - public final AtomicInteger throttle7MaxBytes = new AtomicInteger(0); - public final AtomicBoolean throttle7Enabled = new AtomicBoolean(false); - public final AtomicInteger maxEventQueueForCons = new AtomicInteger(0); public final AtomicInteger transactionMaxBytes = new AtomicInteger(0); - public final AtomicInteger signedStateFreq = new AtomicInteger(0); public final AtomicInteger throttleTransactionQueueSize = new AtomicInteger(100_000); public final AtomicLong delayShuffle = new AtomicLong(0); - @Override - public int getRescueChildlessInverseProbability() { - return rescueChildlessInverseProbability.get(); - } - - @Override - public int getRandomEventProbability() { - return randomEventProbability.get(); - } - - @Override - public int getMaxEventQueueForCons() { - return maxEventQueueForCons.get(); - } - @Override public int getTransactionMaxBytes() { return transactionMaxBytes.get(); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskCreatorTest.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskCreatorTest.java index fbb7e7d00e81..48862a6ecf99 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskCreatorTest.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskCreatorTest.java @@ -26,13 +26,13 @@ import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.when; +import com.swirlds.common.config.EventConfig; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.address.Address; import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.system.events.BaseEventHashedData; import com.swirlds.common.system.events.BaseEventUnhashedData; import com.swirlds.common.test.RandomAddressBookGenerator; -import com.swirlds.platform.SettingsProvider; import com.swirlds.platform.components.EventMapper; import com.swirlds.platform.components.EventTaskCreator; import com.swirlds.platform.event.CreateEventTask; @@ -44,9 +44,10 @@ import com.swirlds.platform.metrics.EventIntakeMetrics; import com.swirlds.test.framework.TestComponentTags; import com.swirlds.test.framework.TestTypeTags; +import com.swirlds.test.framework.config.TestConfigBuilder; import java.util.Random; import java.util.concurrent.BlockingQueue; -import org.junit.jupiter.api.BeforeEach; +import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; @@ -60,22 +61,22 @@ class EventTaskCreatorTest { NodeId selfId; EventIntakeMetrics eventIntakeMetrics; BlockingQueue eventQueueThread; - SettingsProvider setting; SyncManager syncManager; EventTaskCreator taskCreator; Random random; - @BeforeEach - void newMocks() { + private void init() { + final var config = new TestConfigBuilder().getOrCreateConfig().getConfigData(EventConfig.class); + init(config); + } + + private void init(final EventConfig config) { eventMapper = mock(EventMapper.class); - addressBook = mock(AddressBook.class); + addressBook = prepareAddressBook(); address = mock(Address.class); - when(addressBook.getAddress(any())).thenReturn(address); - when(addressBook.copy()).thenReturn(addressBook); selfId = new NodeId(1); eventIntakeMetrics = mock(EventIntakeMetrics.class); eventQueueThread = mock(BlockingQueue.class); - setting = mock(SettingsProvider.class); syncManager = mock(SyncManager.class); random = new MockRandom(); taskCreator = new EventTaskCreator( @@ -84,16 +85,39 @@ void newMocks() { selfId, eventIntakeMetrics, eventQueueThread, - setting, + config, syncManager, () -> random); } + private AddressBook prepareAddressBook() { + // this is a work around instead of refactoring the whole unit test file. + // the implementation of rescue children now iterates over the addresses in the address book. + return new RandomAddressBookGenerator().setSize(5).build(); + } + + @NonNull + private EventConfig configRandomEventProbability() { + return new TestConfigBuilder() + .withValue("event.randomEventProbability", 1) + .getOrCreateConfig() + .getConfigData(EventConfig.class); + } + + @NonNull + private EventConfig configRescueChildlessInverseProbability(final int value) { + return new TestConfigBuilder() + .withValue("event.rescueChildlessInverseProbability", value) + .getOrCreateConfig() + .getConfigData(EventConfig.class); + } + @Test @Tag(TestTypeTags.FUNCTIONAL) @Tag(TestComponentTags.PLATFORM) @DisplayName("test createEvent()") void testCreateEvent() throws InterruptedException { + init(); final NodeId otherId = new NodeId(7); // regular call @@ -116,6 +140,7 @@ void testCreateEvent() throws InterruptedException { @Tag(TestComponentTags.PLATFORM) @DisplayName("test addEvent()") void testAddEvent() throws InterruptedException { + init(); GossipEvent task = new GossipEvent(mock(BaseEventHashedData.class), mock(BaseEventUnhashedData.class)); taskCreator.addEvent(task); verify(eventQueueThread).put(task); @@ -126,15 +151,10 @@ void testAddEvent() throws InterruptedException { @Tag(TestComponentTags.PLATFORM) @DisplayName("test addEvent()") void testEventRescue() throws InterruptedException { - when(setting.getRescueChildlessInverseProbability()).thenReturn(5); - when(addressBook.getSize()).thenReturn(5); - // this is a work around instead of refactoring the whole unit test file. - // the implementation of rescue children now iterates over the addresses in the address book. - final AddressBook newAddressBook = - new RandomAddressBookGenerator().setSize(5).build(); - when(addressBook.iterator()).thenReturn(newAddressBook.iterator()); + init(configRescueChildlessInverseProbability(5)); + EventImpl eventToRescue = mock(EventImpl.class); - when(eventToRescue.getCreatorId()).thenReturn(newAddressBook.getNodeId(2)); + when(eventToRescue.getCreatorId()).thenReturn(addressBook.getNodeId(2)); when(eventMapper.getMostRecentEvent(eventToRescue.getCreatorId())).thenReturn(eventToRescue); taskCreator.rescueChildlessEvents(); @@ -145,12 +165,21 @@ void testEventRescue() throws InterruptedException { eventToRescue.getCreatorId(), captor.getValue().getOtherId(), "otherId should match the senderId of the rescued event"); + } - reset(eventQueueThread); + @Test + @Tag(TestTypeTags.FUNCTIONAL) + @Tag(TestComponentTags.PLATFORM) + @DisplayName("test addEvent()") + void testEventNotRescue() throws InterruptedException { + init(configRescueChildlessInverseProbability(0)); + + final EventImpl eventToRescue = mock(EventImpl.class); + when(eventToRescue.getCreatorId()).thenReturn(addressBook.getNodeId(2)); + when(eventMapper.getMostRecentEvent(eventToRescue.getCreatorId())).thenReturn(eventToRescue); - // test with feature off - when(setting.getRescueChildlessInverseProbability()).thenReturn(0); taskCreator.rescueChildlessEvents(); + verify(eventQueueThread, times(0)).put(any()); } @@ -159,6 +188,7 @@ void testEventRescue() throws InterruptedException { @Tag(TestComponentTags.PLATFORM) @DisplayName("test syncDone()") void testSyncDone_ShouldNotCreateEvent() { + init(); when(syncManager.shouldCreateEvent(any())).thenReturn(false); taskCreator.syncDone(mock(SyncResult.class)); @@ -171,6 +201,7 @@ void testSyncDone_ShouldNotCreateEvent() { @Tag(TestComponentTags.PLATFORM) @DisplayName("test syncDone()") void testSyncDone_ShouldCreateEvent() throws InterruptedException { + init(); when(syncManager.shouldCreateEvent(any())).thenReturn(true); SyncResult syncResult = mock(SyncResult.class); @@ -186,8 +217,8 @@ void testSyncDone_ShouldCreateEvent() throws InterruptedException { @Tag(TestComponentTags.PLATFORM) @DisplayName("test syncDone randomEvent") void testSyncDoneRandomEvent() throws InterruptedException { + init(configRandomEventProbability()); when(syncManager.shouldCreateEvent(any())).thenReturn(true); - when(setting.getRandomEventProbability()).thenReturn(1); SyncResult syncResult = mock(SyncResult.class); when(syncResult.getOtherId()).thenReturn(new NodeId(2)); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowTests.java index 564b6892f489..869b9e2bd4d7 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/eventflow/EventFlowTests.java @@ -575,7 +575,6 @@ protected void init( when(settingsProvider.getTransactionMaxBytes()).thenReturn(TX_MAX_BYTES); when(settingsProvider.getDelayShuffle()).thenReturn(SHUFFLE_DELAY_MS); when(settingsProvider.getThrottleTransactionQueueSize()).thenReturn(THROTTLE_TRANSACTION_QUEUE_SIZE); - when(settingsProvider.getMaxEventQueueForCons()).thenReturn(500); when(settingsProvider.getMaxTransactionBytesPerEvent()).thenReturn(2048); final ConsensusHandlingMetrics consStats = mock(ConsensusHandlingMetrics.class); @@ -637,7 +636,6 @@ protected void init( platformContext, getStaticThreadManager(), selfId, - settingsProvider, swirldStateManager, consStats, eventStreamManager, @@ -655,6 +653,7 @@ private Configuration prepareConfig() { private Configuration prepareConfig(int signedStateFreq) { return new TestConfigBuilder() .withValue("state.signedStateFreq", signedStateFreq) + .withValue("event.maxEventQueueForCons", 500) .getOrCreateConfig(); } From 10643f96d8ddfc29c51ac1d91546f844a2351145 Mon Sep 17 00:00:00 2001 From: Hendrik Ebbers Date: Thu, 15 Jun 2023 09:03:22 +0200 Subject: [PATCH 32/70] ArgumentUtils#throwArgNull replaced by Objects#requireNonNull (for base modules) (#7045) Signed-off-by: Hendrik Ebbers --- .../java/com/swirlds/base/ArgumentUtils.java | 4 ++- .../base/function/CheckedConsumer.java | 4 +-- .../base/function/CheckedFunction.java | 4 +-- .../com/swirlds/base/state/Lifecycle.java | 4 +-- .../sources/LegacyFileConfigSource.java | 4 +-- .../config/sources/MappedConfigSource.java | 5 ++- .../metrics/extensions/CountPerSecond.java | 4 +-- .../QueueThreadMetricsConfiguration.java | 5 ++- .../swirlds/common/utility/CommonUtils.java | 2 +- .../impl/internal/ConfigDataFactory.java | 34 +++++++++---------- .../impl/internal/ConfigDataService.java | 6 ++-- .../impl/internal/ConfigNumberUtils.java | 11 +++--- .../internal/ConfigPropertiesService.java | 4 +-- .../impl/internal/ConfigSourceService.java | 4 +-- .../internal/ConfigValidationService.java | 9 ++--- .../internal/ConfigurationBuilderImpl.java | 10 +++--- .../impl/internal/ConfigurationImpl.java | 14 ++++---- .../impl/internal/ConstraintValidator.java | 12 +++---- .../impl/internal/ConverterService.java | 13 ++++--- 19 files changed, 75 insertions(+), 78 deletions(-) diff --git a/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/ArgumentUtils.java b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/ArgumentUtils.java index abd31f98e271..bf3f07efd205 100644 --- a/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/ArgumentUtils.java +++ b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/ArgumentUtils.java @@ -40,8 +40,10 @@ private ArgumentUtils() throws IllegalAccessException { * * @param argument the argument to check * @param argumentName the name of the argument + * @deprecated use {@link Objects#requireNonNull(Object, String)} instead */ @NonNull + @Deprecated(forRemoval = true) public static T throwArgNull(@Nullable final T argument, @NonNull final String argumentName) throws NullPointerException { if (argument == null) { @@ -52,7 +54,7 @@ public static T throwArgNull(@Nullable final T argument, @NonNull final Stri /** * Throw an {@link IllegalArgumentException} if the supplied {@code String} is blank. Throw an - * {@link NullPointerException} if the supplied {@code String} is null (see {@link #throwArgNull(Object, String)}). + * {@link NullPointerException} if the supplied {@code String} is {@code null}. * * @param argument the argument checked * @param argumentName the name of the argument diff --git a/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/function/CheckedConsumer.java b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/function/CheckedConsumer.java index 8fd613e98479..a14bbd910e74 100644 --- a/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/function/CheckedConsumer.java +++ b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/function/CheckedConsumer.java @@ -16,9 +16,9 @@ package com.swirlds.base.function; -import com.swirlds.base.ArgumentUtils; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Objects; import java.util.function.Consumer; /** @@ -48,7 +48,7 @@ public interface CheckedConsumer { */ @NonNull static CheckedConsumer of(@NonNull final Consumer consumer) { - ArgumentUtils.throwArgNull(consumer, "consumer"); + Objects.requireNonNull(consumer, "consumer must not be null"); return consumer::accept; } } diff --git a/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/function/CheckedFunction.java b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/function/CheckedFunction.java index d666ece7349b..32a3adc1e956 100644 --- a/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/function/CheckedFunction.java +++ b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/function/CheckedFunction.java @@ -16,9 +16,9 @@ package com.swirlds.base.function; -import com.swirlds.base.ArgumentUtils; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Objects; import java.util.function.Function; /** @@ -52,7 +52,7 @@ public interface CheckedFunction { */ @NonNull static CheckedFunction of(@NonNull final Function function) { - ArgumentUtils.throwArgNull(function, "function"); + Objects.requireNonNull(function, "function must not be null"); return function::apply; } } diff --git a/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/state/Lifecycle.java b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/state/Lifecycle.java index 681e98e5b6f5..d92b31ed08a7 100644 --- a/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/state/Lifecycle.java +++ b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/state/Lifecycle.java @@ -16,9 +16,9 @@ package com.swirlds.base.state; -import com.swirlds.base.ArgumentUtils; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Objects; /** * An object with a well-defined start/stop lifecycle. @@ -51,7 +51,7 @@ default void throwIfNotInPhase(@NonNull final LifecyclePhase phase) { * @throws LifecycleException if the object is not in the expected phase */ default void throwIfNotInPhase(@NonNull final LifecyclePhase phase, @Nullable final String errorMessage) { - ArgumentUtils.throwArgNull(phase, "phase"); + Objects.requireNonNull(phase, "phase must not be null"); final LifecyclePhase currentPhase = getLifecyclePhase(); if (currentPhase != phase) { throw new LifecycleException( diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/sources/LegacyFileConfigSource.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/sources/LegacyFileConfigSource.java index bb1f0fa000bd..31adf394f131 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/sources/LegacyFileConfigSource.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/sources/LegacyFileConfigSource.java @@ -16,7 +16,6 @@ package com.swirlds.common.config.sources; -import com.swirlds.base.ArgumentUtils; import com.swirlds.common.utility.CommonUtils; import java.io.File; import java.io.IOException; @@ -28,6 +27,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import java.util.stream.Stream; /** @@ -77,7 +77,7 @@ public LegacyFileConfigSource(final Path filePath) throws IOException { * @throws IOException if the file can not be loaded or parsed */ public LegacyFileConfigSource(final Path filePath, final int ordinal) throws IOException { - this.filePath = ArgumentUtils.throwArgNull(filePath, "filePath"); + this.filePath = Objects.requireNonNull(filePath, "filePath must not be null"); this.ordinal = ordinal; this.internalProperties = Collections.unmodifiableMap(loadSettings(filePath.toFile())); } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/sources/MappedConfigSource.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/sources/MappedConfigSource.java index 8ed020216352..4890afb1e0a7 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/sources/MappedConfigSource.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/sources/MappedConfigSource.java @@ -18,7 +18,6 @@ import static com.swirlds.logging.LogMarker.CONFIG; -import com.swirlds.base.ArgumentUtils; import com.swirlds.config.api.source.ConfigSource; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collections; @@ -73,7 +72,7 @@ public class MappedConfigSource extends AbstractConfigSource { * @param wrappedSource the wrapped config */ public MappedConfigSource(@NonNull final ConfigSource wrappedSource) { - this.wrappedSource = ArgumentUtils.throwArgNull(wrappedSource, "wrappedSource"); + this.wrappedSource = Objects.requireNonNull(wrappedSource, "wrappedSource must not be null"); configMappings = new ConcurrentLinkedQueue<>(); properties = new HashMap<>(); } @@ -94,7 +93,7 @@ public void addMapping(@NonNull final String mappedName, @NonNull final String o * @param configMapping defined mapping */ public void addMapping(@NonNull final ConfigMapping configMapping) { - ArgumentUtils.throwArgNull(configMapping, "configMapping"); + Objects.requireNonNull(configMapping, "configMapping must not be null"); if (configMappings.stream() .map(ConfigMapping::mappedName) diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/CountPerSecond.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/CountPerSecond.java index d4bbdde13d8b..38900bb7ced0 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/CountPerSecond.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/CountPerSecond.java @@ -16,7 +16,6 @@ package com.swirlds.common.metrics.extensions; -import static com.swirlds.base.ArgumentUtils.throwArgNull; import static com.swirlds.common.metrics.FloatFormats.FORMAT_10_2; import static com.swirlds.common.utility.CommonUtils.throwArgBlank; @@ -28,6 +27,7 @@ import com.swirlds.common.time.IntegerEpochTime; import com.swirlds.common.time.OSTime; import com.swirlds.common.utility.Units; +import java.util.Objects; /** * Platform-implementation of {@link CountPerSecond}. The granularity of this metric is a millisecond. This metric needs @@ -164,7 +164,7 @@ private Config( this.category = throwArgBlank(category, "category"); this.name = throwArgBlank(name, "name"); this.description = throwArgBlank(description, "description"); - this.unit = throwArgNull(unit, "unit"); + this.unit = Objects.requireNonNull(unit, "unit must not be null"); this.format = throwArgBlank(format, "format"); } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/config/QueueThreadMetricsConfiguration.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/config/QueueThreadMetricsConfiguration.java index f89bcaf8a365..f83873cebe21 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/config/QueueThreadMetricsConfiguration.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/config/QueueThreadMetricsConfiguration.java @@ -16,12 +16,11 @@ package com.swirlds.common.threading.framework.config; -import static com.swirlds.base.ArgumentUtils.throwArgNull; - import com.swirlds.base.time.Time; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.time.OSTime; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; /** * Configuration for the metrics that will be applied to a queue thread @@ -47,7 +46,7 @@ public class QueueThreadMetricsConfiguration { * The metrics system that will hold metrics */ public QueueThreadMetricsConfiguration(@NonNull final Metrics metrics) { - this.metrics = throwArgNull(metrics, "metrics"); + this.metrics = Objects.requireNonNull(metrics, "metrics must not be null"); } /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/utility/CommonUtils.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/utility/CommonUtils.java index c822bb7d1957..10adfa545558 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/utility/CommonUtils.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/utility/CommonUtils.java @@ -239,7 +239,7 @@ public static byte[] unhex(final String string) { * * @param arg the argument checked * @param argName the name of the argument - * @deprecated use {@link com.swirlds.base.ArgumentUtils#throwArgNull(Object, String)} instead + * @deprecated use {@link java.util.Objects#requireNonNull(Object, String)} instead */ @Deprecated(forRemoval = true) public static T throwArgNull(final T arg, final String argName) { diff --git a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigDataFactory.java b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigDataFactory.java index 3b7116329acd..f8d7f05064de 100644 --- a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigDataFactory.java +++ b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigDataFactory.java @@ -16,8 +16,6 @@ package com.swirlds.config.impl.internal; -import static com.swirlds.base.ArgumentUtils.throwArgNull; - import com.swirlds.common.config.reflection.ConfigReflectionUtils; import com.swirlds.config.api.ConfigData; import com.swirlds.config.api.ConfigProperty; @@ -52,15 +50,15 @@ class ConfigDataFactory { private final ConverterService converterService; ConfigDataFactory(@NonNull final Configuration configuration, @NonNull final ConverterService converterService) { - this.configuration = throwArgNull(configuration, "configuration"); - this.converterService = throwArgNull(converterService, "converterService"); + this.configuration = Objects.requireNonNull(configuration, "configuration must not be null"); + this.converterService = Objects.requireNonNull(converterService, "converterService must not be null"); } @SuppressWarnings("unchecked") @NonNull T createConfigInstance(@NonNull final Class type) throws InvocationTargetException, InstantiationException, IllegalAccessException { - throwArgNull(type, "type"); + Objects.requireNonNull(type, "type must not be null"); if (!type.isAnnotationPresent(ConfigData.class)) { throw new IllegalArgumentException("Can not create config instance for '" + type + "' since " @@ -90,7 +88,7 @@ T createConfigInstance(@NonNull final Class type) @Nullable private Object getValueForRecordComponent( @NonNull final String namePrefix, @NonNull final RecordComponent component) { - throwArgNull(component, "component"); + Objects.requireNonNull(component, "component must not be null"); final String name = createPropertyName(namePrefix, component); final Class valueType = component.getType(); if (hasDefaultValue(component)) { @@ -117,8 +115,8 @@ private Object getValueForRecordComponent( } private static boolean isGenericType(@NonNull final RecordComponent component, @NonNull final Class type) { - throwArgNull(component, "component"); - throwArgNull(type, "type"); + Objects.requireNonNull(component, "component must not be null"); + Objects.requireNonNull(type, "type must not be null"); final ParameterizedType stringSetType = (ParameterizedType) component.getGenericType(); return Objects.equals(type, stringSetType.getRawType()); } @@ -134,7 +132,7 @@ private static Class getGenericSetType(@NonNull final RecordComponent com @SuppressWarnings("unchecked") @NonNull private static Class getGenericListType(@NonNull final RecordComponent component) { - throwArgNull(component, "component"); + Objects.requireNonNull(component, "component must not be null"); if (!isGenericType(component, List.class)) { throw new IllegalArgumentException("Only List interface is supported"); } @@ -148,7 +146,7 @@ private static Class getGenericListType(@NonNull final RecordComponent co @Nullable private Set getDefaultValueSet(@NonNull final RecordComponent component) { - throwArgNull(component, "component"); + Objects.requireNonNull(component, "component must not be null"); final Class type = getGenericSetType(component); final String rawValue = getRawValue(component); if (Objects.equals(ConfigProperty.NULL_DEFAULT_VALUE, rawValue)) { @@ -162,7 +160,7 @@ private Set getDefaultValueSet(@NonNull final RecordComponent component) @SuppressWarnings("unchecked") @Nullable private List getDefaultValues(@NonNull final RecordComponent component) { - throwArgNull(component, "component"); + Objects.requireNonNull(component, "component must not be null"); final Class type = getGenericListType(component); final String rawValue = getRawValue(component); if (Objects.equals(ConfigProperty.NULL_DEFAULT_VALUE, rawValue)) { @@ -184,7 +182,7 @@ private String getRawValue(@NonNull final RecordComponent component) { @NonNull private static String getNamePrefix(@NonNull final Class type) { - throwArgNull(type, "type"); + Objects.requireNonNull(type, "type must not be null"); return Optional.ofNullable(type.getAnnotation(ConfigData.class)) .map(ConfigData::value) .orElse(""); @@ -193,7 +191,7 @@ private static String getNamePrefix(@NonNull final Class t @SuppressWarnings("unchecked") @Nullable private T getDefaultValue(@NonNull final RecordComponent component) { - throwArgNull(component, "component"); + Objects.requireNonNull(component, "component must not be null"); final String rawValue = getRawValue(component); if (Objects.equals(ConfigProperty.NULL_DEFAULT_VALUE, rawValue)) { return null; @@ -203,14 +201,14 @@ private T getDefaultValue(@NonNull final RecordComponent component) { @NonNull private static Optional getRawDefaultValue(@NonNull final RecordComponent component) { - throwArgNull(component, "component"); + Objects.requireNonNull(component, "component must not be null"); return Optional.ofNullable(component.getAnnotation(ConfigProperty.class)) .map(ConfigProperty::defaultValue) .filter(defaultValue -> !Objects.equals(ConfigProperty.UNDEFINED_DEFAULT_VALUE, defaultValue)); } private static boolean hasDefaultValue(@NonNull final RecordComponent component) { - throwArgNull(component, "component"); + Objects.requireNonNull(component, "component must not be null"); return Optional.ofNullable(component.getAnnotation(ConfigProperty.class)) .map(propertyAnnotation -> !Objects.equals(ConfigProperty.UNDEFINED_DEFAULT_VALUE, propertyAnnotation.defaultValue())) @@ -219,7 +217,7 @@ private static boolean hasDefaultValue(@NonNull final RecordComponent component) @NonNull private static String createPropertyName(@NonNull final String prefix, @NonNull final RecordComponent component) { - throwArgNull(component, "component"); + Objects.requireNonNull(component, "component must not be null"); return Optional.ofNullable(component.getAnnotation(ConfigProperty.class)) .map(propertyAnnotation -> { if (!propertyAnnotation.value().isBlank()) { @@ -233,8 +231,8 @@ private static String createPropertyName(@NonNull final String prefix, @NonNull @NonNull private static String createPropertyName(@NonNull final String prefix, @NonNull final String name) { - throwArgNull(prefix, "prefix"); - throwArgNull(name, "name"); + Objects.requireNonNull(prefix, "prefix must not be null"); + Objects.requireNonNull(name, "name must not be null"); if (prefix.isBlank()) { return name; } diff --git a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigDataService.java b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigDataService.java index f0f7f77be287..733330c3a862 100644 --- a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigDataService.java +++ b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigDataService.java @@ -16,12 +16,12 @@ package com.swirlds.config.impl.internal; -import com.swirlds.base.ArgumentUtils; import com.swirlds.config.api.Configuration; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collection; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; @@ -64,7 +64,7 @@ class ConfigDataService implements ConfigLifecycle { * @param generic type of the config data object */ void addConfigDataType(@NonNull final Class type) { - ArgumentUtils.throwArgNull(type, "type"); + Objects.requireNonNull(type, "type must not be null"); throwIfInitialized(); registeredTypes.add(type); } @@ -105,7 +105,7 @@ public boolean isInitialized() { */ @NonNull T getConfigData(@NonNull final Class type) { - ArgumentUtils.throwArgNull(type, "type"); + Objects.requireNonNull(type, "type must not be null"); throwIfNotInitialized(); if (!configDataCache.containsKey(type)) { throw new IllegalArgumentException("No config data record available of type '" + type + "'"); diff --git a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigNumberUtils.java b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigNumberUtils.java index 17808064ca0c..3afab2c5a903 100644 --- a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigNumberUtils.java +++ b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigNumberUtils.java @@ -16,7 +16,6 @@ package com.swirlds.config.impl.internal; -import com.swirlds.base.ArgumentUtils; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Objects; @@ -39,9 +38,9 @@ private ConfigNumberUtils() {} */ public static int compare( @NonNull final T value, @NonNull final Class valueType, @NonNull final Number number) { - ArgumentUtils.throwArgNull(value, "value"); - ArgumentUtils.throwArgNull(valueType, "valueType"); - ArgumentUtils.throwArgNull(number, "number"); + Objects.requireNonNull(value, "value must not be null"); + Objects.requireNonNull(valueType, "valueType must not be null"); + Objects.requireNonNull(number, "number must not be null"); if (Objects.equals(valueType, Integer.class) || Objects.equals(valueType, Integer.TYPE)) { return Integer.compare(value.intValue(), number.intValue()); @@ -72,7 +71,7 @@ public static int compare( * @throws IllegalArgumentException if the given value is not a valid number */ public static long getLongValue(@NonNull final Object value) { - ArgumentUtils.throwArgNull(value, "value"); + Objects.requireNonNull(value, "value must not be null"); if (value instanceof Number number) { return number.longValue(); } @@ -86,7 +85,7 @@ public static long getLongValue(@NonNull final Object value) { * @return true if the given class represents a number */ public static boolean isNumber(@NonNull final Class cls) { - ArgumentUtils.throwArgNull(cls, "cls"); + Objects.requireNonNull(cls, "cls must not be null"); if (Number.class.isAssignableFrom(cls)) { return true; } diff --git a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigPropertiesService.java b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigPropertiesService.java index 0e2fae5a59e5..232644d16ff8 100644 --- a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigPropertiesService.java +++ b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigPropertiesService.java @@ -16,13 +16,13 @@ package com.swirlds.config.impl.internal; -import com.swirlds.base.ArgumentUtils; import com.swirlds.common.utility.CommonUtils; import com.swirlds.config.api.source.ConfigSource; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import java.util.stream.Stream; /** @@ -46,7 +46,7 @@ class ConfigPropertiesService implements ConfigLifecycle { private boolean initialized = false; ConfigPropertiesService(@NonNull final ConfigSourceService configSourceService) { - this.configSourceService = ArgumentUtils.throwArgNull(configSourceService, "configSourceService"); + this.configSourceService = Objects.requireNonNull(configSourceService, "configSourceService must not be null"); internalProperties = new HashMap<>(); } diff --git a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigSourceService.java b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigSourceService.java index b8e7467628ac..6d4d490cec4a 100644 --- a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigSourceService.java +++ b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigSourceService.java @@ -16,11 +16,11 @@ package com.swirlds.config.impl.internal; -import com.swirlds.base.ArgumentUtils; import com.swirlds.config.api.source.ConfigSource; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Comparator; import java.util.List; +import java.util.Objects; import java.util.concurrent.CopyOnWriteArrayList; import java.util.stream.Stream; @@ -45,7 +45,7 @@ class ConfigSourceService implements ConfigLifecycle { void addConfigSource(@NonNull final ConfigSource configSource) { throwIfInitialized(); - ArgumentUtils.throwArgNull(configSource, "configSource"); + Objects.requireNonNull(configSource, "configSource must not be null"); sources.add(configSource); } diff --git a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigValidationService.java b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigValidationService.java index 4eb769e559f2..1d587c13c5b7 100644 --- a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigValidationService.java +++ b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigValidationService.java @@ -29,6 +29,7 @@ import com.swirlds.config.impl.validators.annotation.internal.PositiveConstraintsValidation; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.List; +import java.util.Objects; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.stream.Collectors; @@ -60,7 +61,7 @@ class ConfigValidationService implements ConfigLifecycle { void addValidator(@NonNull final ConfigValidator validator) { throwIfInitialized(); - ArgumentUtils.throwArgNull(validator, "validator"); + Objects.requireNonNull(validator, "validator must not be null"); validators.add(validator); } @@ -91,7 +92,7 @@ public boolean isInitialized() { void validate(@NonNull final Configuration configuration) { throwIfNotInitialized(); - ArgumentUtils.throwArgNull(configuration, "configuration"); + Objects.requireNonNull(configuration, "configuration must not be null"); final List violations = validators.stream().flatMap(v -> v.validate(configuration)).collect(Collectors.toList()); if (!violations.isEmpty()) { @@ -107,8 +108,8 @@ void addConstraint( @NonNull final ConfigPropertyConstraint validator) { throwIfInitialized(); ArgumentUtils.throwArgBlank(propertyName, "propertyName"); - ArgumentUtils.throwArgNull(valueType, "valueType"); - ArgumentUtils.throwArgNull(validator, "validator"); + Objects.requireNonNull(valueType, "valueType must not be null"); + Objects.requireNonNull(validator, "validator must not be null"); constraintValidator.addConstraint(propertyName, valueType, validator); } } diff --git a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigurationBuilderImpl.java b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigurationBuilderImpl.java index 1ad7ca49bac8..9a19034070cf 100644 --- a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigurationBuilderImpl.java +++ b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigurationBuilderImpl.java @@ -16,7 +16,6 @@ package com.swirlds.config.impl.internal; -import com.swirlds.base.ArgumentUtils; import com.swirlds.common.threading.locks.AutoClosableLock; import com.swirlds.common.threading.locks.Locks; import com.swirlds.common.threading.locks.locked.Locked; @@ -27,6 +26,7 @@ import com.swirlds.config.api.validation.ConfigValidator; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Arrays; +import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; /** @@ -113,7 +113,7 @@ public ConfigurationBuilder withSources(@NonNull final ConfigSource... configSou } private void addConfigSource(@NonNull final ConfigSource configSource) { - ArgumentUtils.throwArgNull(configSource, "configSource"); + Objects.requireNonNull(configSource, "configSource must not be null"); if (initialized.get()) { throw new IllegalStateException("ConfigSource can not be added to initialized config"); } @@ -135,7 +135,7 @@ public ConfigurationBuilder withConverters(@NonNull final ConfigConverter... } private void addConverter(@NonNull final ConfigConverter converter) { - ArgumentUtils.throwArgNull(converter, "converter"); + Objects.requireNonNull(converter, "converter must not be null"); if (initialized.get()) { throw new IllegalStateException("Converters can not be added to initialized config"); } @@ -157,7 +157,7 @@ public ConfigurationBuilder withValidators(@NonNull final ConfigValidator... val } private void addValidator(@NonNull final ConfigValidator validator) { - ArgumentUtils.throwArgNull(validator, "validator"); + Objects.requireNonNull(validator, "validator must not be null"); if (initialized.get()) { throw new IllegalStateException("ConfigValidator can not be added to initialized config"); } @@ -179,7 +179,7 @@ public ConfigurationBuilder withConfigDataTypes(@NonNull final Class void addConfigDataType(@NonNull final Class type) { - ArgumentUtils.throwArgNull(type, "type"); + Objects.requireNonNull(type, "type must not be null"); if (initialized.get()) { throw new IllegalStateException("ConfigDataType can not be added to initialized config"); } diff --git a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigurationImpl.java b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigurationImpl.java index ab12e53aa99f..d1b1a365a2c9 100644 --- a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigurationImpl.java +++ b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConfigurationImpl.java @@ -44,9 +44,9 @@ class ConfigurationImpl implements Configuration, ConfigLifecycle { @NonNull final ConfigPropertiesService propertiesService, @NonNull final ConverterService converterService, @NonNull final ConfigValidationService validationService) { - this.propertiesService = ArgumentUtils.throwArgNull(propertiesService, "propertiesService"); - this.converterService = ArgumentUtils.throwArgNull(converterService, "converterService"); - this.validationService = ArgumentUtils.throwArgNull(validationService, "validationService"); + this.propertiesService = Objects.requireNonNull(propertiesService, "propertiesService must not be null"); + this.converterService = Objects.requireNonNull(converterService, "converterService must not be null"); + this.validationService = Objects.requireNonNull(validationService, "validationService must not be null"); this.configDataService = new ConfigDataService(this, converterService); } @@ -67,7 +67,7 @@ public boolean exists(@NonNull final String propertyName) { @Override public T getValue(@NonNull final String propertyName, @NonNull final Class propertyType) { ArgumentUtils.throwArgBlank(propertyName, "propertyName"); - ArgumentUtils.throwArgNull(propertyType, "propertyType"); + Objects.requireNonNull(propertyType, "propertyType must not be null"); final String rawValue = getValue(propertyName); if (Objects.equals(propertyType, String.class)) { return (T) rawValue; @@ -80,7 +80,7 @@ public T getValue(@NonNull final String propertyName, @NonNull final Class T getValue( @NonNull final String propertyName, @NonNull final Class propertyType, @Nullable final T defaultValue) { ArgumentUtils.throwArgBlank(propertyName, "propertyName"); - ArgumentUtils.throwArgNull(propertyType, "propertyType"); + Objects.requireNonNull(propertyType, "propertyType must not be null"); if (!exists(propertyName)) { return defaultValue; } @@ -112,7 +112,7 @@ public List getValues(@NonNull final String propertyName, @Nullable fina @Override public List getValues(@NonNull final String propertyName, @NonNull final Class propertyType) { ArgumentUtils.throwArgBlank(propertyName, "propertyName"); - ArgumentUtils.throwArgNull(propertyType, "propertyType"); + Objects.requireNonNull(propertyType, "propertyType must not be null"); final List values = getValues(propertyName); if (values == null) { return null; @@ -129,7 +129,7 @@ public List getValues( @NonNull final Class propertyType, @Nullable final List defaultValue) { ArgumentUtils.throwArgBlank(propertyName, "propertyName"); - ArgumentUtils.throwArgNull(propertyType, "propertyType"); + Objects.requireNonNull(propertyType, "propertyType must not be null"); if (!exists(propertyName)) { return defaultValue; } diff --git a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConstraintValidator.java b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConstraintValidator.java index fd742d519bba..695f49fab16d 100644 --- a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConstraintValidator.java +++ b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConstraintValidator.java @@ -37,7 +37,7 @@ final class ConstraintValidator implements ConfigValidator { private final Queue constraintData; ConstraintValidator(@NonNull final ConverterService converterService) { - this.converterService = ArgumentUtils.throwArgNull(converterService, "converterService"); + this.converterService = Objects.requireNonNull(converterService, "converterService must not be null"); this.constraintData = new ConcurrentLinkedQueue<>(); } @@ -45,7 +45,7 @@ final class ConstraintValidator implements ConfigValidator { @NonNull @Override public Stream validate(@NonNull final Configuration configuration) { - ArgumentUtils.throwArgNull(configuration, "configuration"); + Objects.requireNonNull(configuration, "configuration must not be null"); return constraintData.stream() .map(d -> { final PropertyMetadata propertyMetadata = @@ -61,8 +61,8 @@ private PropertyMetadata createMetadata( @NonNull final Class valueType, @NonNull final Configuration configuration) { ArgumentUtils.throwArgBlank(propertyName, "propertyName"); - ArgumentUtils.throwArgNull(valueType, "valueType"); - ArgumentUtils.throwArgNull(configuration, "configuration"); + Objects.requireNonNull(valueType, "valueType must not be null"); + Objects.requireNonNull(configuration, "configuration must not be null"); if (configuration.exists(propertyName)) { final ConfigConverter converter = converterService.getConverterForType(valueType); return new PropertyMetadataImpl<>( @@ -78,8 +78,8 @@ void addConstraint( @NonNull final Class valueType, @NonNull final ConfigPropertyConstraint validator) { ArgumentUtils.throwArgBlank(propertyName, "propertyName"); - ArgumentUtils.throwArgNull(valueType, "valueType"); - ArgumentUtils.throwArgNull(validator, "validator"); + Objects.requireNonNull(valueType, "valueType must not be null"); + Objects.requireNonNull(validator, "validator must not be null"); constraintData.add(new ConfigPropertyConstraintData<>(propertyName, valueType, validator)); } diff --git a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConverterService.java b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConverterService.java index f4d08de4b2de..8ecc75689c04 100644 --- a/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConverterService.java +++ b/platform-sdk/swirlds-config-impl/src/main/java/com/swirlds/config/impl/internal/ConverterService.java @@ -16,7 +16,6 @@ package com.swirlds.config.impl.internal; -import com.swirlds.base.ArgumentUtils; import com.swirlds.config.api.converter.ConfigConverter; import com.swirlds.config.impl.converters.BigDecimalConverter; import com.swirlds.config.impl.converters.BigIntegerConverter; @@ -101,7 +100,7 @@ class ConverterService implements ConfigLifecycle { @NonNull private > Class getConverterType(@NonNull final Class converterClass) { - ArgumentUtils.throwArgNull(converterClass, "converterClass"); + Objects.requireNonNull(converterClass, "converterClass must not be null"); return Arrays.stream(converterClass.getGenericInterfaces()) .filter(ParameterizedType.class::isInstance) .map(ParameterizedType.class::cast) @@ -121,7 +120,7 @@ private > Class getConverterType(@NonNull fin @Nullable T convert(@Nullable final String value, @NonNull final Class targetClass) { throwIfNotInitialized(); - ArgumentUtils.throwArgNull(targetClass, "targetClass"); + Objects.requireNonNull(targetClass, "targetClass must not be null"); if (value == null) { return null; } @@ -141,15 +140,15 @@ T convert(@Nullable final String value, @NonNull final Class targetClass) void addConverter(@NonNull final ConfigConverter converter) { throwIfInitialized(); - ArgumentUtils.throwArgNull(converter, "converter"); + Objects.requireNonNull(converter, "converter must not be null"); final Class converterType = getConverterType(converter.getClass()); add(converterType, converter); } private void add(@NonNull final Class converterType, @NonNull final ConfigConverter converter) { throwIfInitialized(); - ArgumentUtils.throwArgNull(converterType, "converterType"); - ArgumentUtils.throwArgNull(converter, "converter"); + Objects.requireNonNull(converterType, "converterType must not be null"); + Objects.requireNonNull(converter, "converter must not be null"); if (converters.containsKey(converterType)) { throw new IllegalStateException("Converter for type '" + converterType + "' already registered"); @@ -208,7 +207,7 @@ public boolean isInitialized() { @Nullable ConfigConverter getConverterForType(@NonNull final Class valueType) { throwIfNotInitialized(); - ArgumentUtils.throwArgNull(valueType, "valueType"); + Objects.requireNonNull(valueType, "valueType must not be null"); return (ConfigConverter) converters.get(valueType); } } From 431b44fcf311bcca5a0dc78424ee215e721cb717 Mon Sep 17 00:00:00 2001 From: Hendrik Ebbers Date: Thu, 15 Jun 2023 14:18:51 +0200 Subject: [PATCH 33/70] portforwarding moved to com.swirlds.platform (#7093) Signed-off-by: Hendrik Ebbers --- .../src/main/java/com/swirlds/platform/Browser.java | 4 ++-- .../main/java/com/swirlds/platform/network/Network.java | 6 +++--- .../{p2p => platform}/portforwarding/PortForwarder.java | 4 ++-- .../{p2p => platform}/portforwarding/PortMapping.java | 6 +++--- .../portforwarding/PortMappingListener.java | 4 ++-- .../portforwarding/portmapper/MappingRefresher.java | 4 ++-- .../portmapper/PortMapperPortForwarder.java | 8 ++++---- .../portforwarding/portmapper/PortPair.java | 4 ++-- 8 files changed, 20 insertions(+), 20 deletions(-) rename platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/{p2p => platform}/portforwarding/PortForwarder.java (92%) rename platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/{p2p => platform}/portforwarding/PortMapping.java (88%) rename platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/{p2p => platform}/portforwarding/PortMappingListener.java (90%) rename platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/{p2p => platform}/portforwarding/portmapper/MappingRefresher.java (92%) rename platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/{p2p => platform}/portforwarding/portmapper/PortMapperPortForwarder.java (97%) rename platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/{p2p => platform}/portforwarding/portmapper/PortPair.java (90%) diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java index f7eb4f3e79a5..2f2050b70796 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java @@ -75,8 +75,6 @@ import com.swirlds.jasperdb.config.JasperDbConfig; import com.swirlds.logging.payloads.NodeAddressMismatchPayload; import com.swirlds.logging.payloads.NodeStartPayload; -import com.swirlds.p2p.portforwarding.PortForwarder; -import com.swirlds.p2p.portforwarding.PortMapping; import com.swirlds.platform.config.AddressBookConfig; import com.swirlds.platform.config.ConfigMappings; import com.swirlds.platform.config.ThreadConfig; @@ -98,6 +96,8 @@ import com.swirlds.platform.health.entropy.OSEntropyChecker; import com.swirlds.platform.health.filesystem.OSFileSystemChecker; import com.swirlds.platform.network.Network; +import com.swirlds.platform.portforwarding.PortForwarder; +import com.swirlds.platform.portforwarding.PortMapping; import com.swirlds.platform.reconnect.emergency.EmergencySignedStateValidator; import com.swirlds.platform.recovery.EmergencyRecoveryManager; import com.swirlds.platform.state.address.AddressBookInitializer; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/network/Network.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/network/Network.java index 4247dfdc5936..16c3f0c0037c 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/network/Network.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/network/Network.java @@ -21,9 +21,9 @@ import com.swirlds.common.threading.framework.config.ThreadConfiguration; import com.swirlds.common.threading.manager.ThreadManager; import com.swirlds.common.utility.CommonUtils; -import com.swirlds.p2p.portforwarding.PortMapping; -import com.swirlds.p2p.portforwarding.PortMappingListener; -import com.swirlds.p2p.portforwarding.portmapper.PortMapperPortForwarder; +import com.swirlds.platform.portforwarding.PortMapping; +import com.swirlds.platform.portforwarding.PortMappingListener; +import com.swirlds.platform.portforwarding.portmapper.PortMapperPortForwarder; import java.net.Inet4Address; import java.net.InetAddress; import java.net.NetworkInterface; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/PortForwarder.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/PortForwarder.java similarity index 92% rename from platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/PortForwarder.java rename to platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/PortForwarder.java index 37e7338429b3..0a397aad0016 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/PortForwarder.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/PortForwarder.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016-2023 Hedera Hashgraph, LLC + * Copyright (C) 2023 Hedera Hashgraph, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.swirlds.p2p.portforwarding; +package com.swirlds.platform.portforwarding; import java.util.List; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/PortMapping.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/PortMapping.java similarity index 88% rename from platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/PortMapping.java rename to platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/PortMapping.java index fbd17adf4d49..f4c09e638c11 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/PortMapping.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/PortMapping.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016-2023 Hedera Hashgraph, LLC + * Copyright (C) 2023 Hedera Hashgraph, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,9 +14,9 @@ * limitations under the License. */ -package com.swirlds.p2p.portforwarding; +package com.swirlds.platform.portforwarding; -import com.swirlds.p2p.portforwarding.PortForwarder.Protocol; +import com.swirlds.platform.portforwarding.PortForwarder.Protocol; public class PortMapping { private final String ip; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/PortMappingListener.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/PortMappingListener.java similarity index 90% rename from platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/PortMappingListener.java rename to platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/PortMappingListener.java index 2cb829c4f70d..54e96da5bc82 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/PortMappingListener.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/PortMappingListener.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016-2023 Hedera Hashgraph, LLC + * Copyright (C) 2023 Hedera Hashgraph, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.swirlds.p2p.portforwarding; +package com.swirlds.platform.portforwarding; public interface PortMappingListener { public void noForwardingDeviceFound(); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/portmapper/MappingRefresher.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/portmapper/MappingRefresher.java similarity index 92% rename from platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/portmapper/MappingRefresher.java rename to platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/portmapper/MappingRefresher.java index db8840509a6a..029388084812 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/portmapper/MappingRefresher.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/portmapper/MappingRefresher.java @@ -14,9 +14,9 @@ * limitations under the License. */ -package com.swirlds.p2p.portforwarding.portmapper; +package com.swirlds.platform.portforwarding.portmapper; -import com.swirlds.p2p.portforwarding.PortForwarder; +import com.swirlds.platform.portforwarding.PortForwarder; public class MappingRefresher implements Runnable { private PortForwarder forwarder; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/portmapper/PortMapperPortForwarder.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/portmapper/PortMapperPortForwarder.java similarity index 97% rename from platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/portmapper/PortMapperPortForwarder.java rename to platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/portmapper/PortMapperPortForwarder.java index 21944140d3eb..c42411a28272 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/portmapper/PortMapperPortForwarder.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/portmapper/PortMapperPortForwarder.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.swirlds.p2p.portforwarding.portmapper; +package com.swirlds.platform.portforwarding.portmapper; import static com.swirlds.logging.LogMarker.PORT_FORWARDING; @@ -30,9 +30,9 @@ import com.offbynull.portmapper.mapper.PortType; import com.swirlds.common.threading.framework.config.ThreadConfiguration; import com.swirlds.common.threading.manager.ThreadManager; -import com.swirlds.p2p.portforwarding.PortForwarder; -import com.swirlds.p2p.portforwarding.PortMapping; -import com.swirlds.p2p.portforwarding.PortMappingListener; +import com.swirlds.platform.portforwarding.PortForwarder; +import com.swirlds.platform.portforwarding.PortMapping; +import com.swirlds.platform.portforwarding.PortMappingListener; import java.util.Iterator; import java.util.LinkedList; import java.util.List; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/portmapper/PortPair.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/portmapper/PortPair.java similarity index 90% rename from platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/portmapper/PortPair.java rename to platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/portmapper/PortPair.java index 39278c3eaf53..7a047709d66b 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/p2p/portforwarding/portmapper/PortPair.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/portforwarding/portmapper/PortPair.java @@ -14,10 +14,10 @@ * limitations under the License. */ -package com.swirlds.p2p.portforwarding.portmapper; +package com.swirlds.platform.portforwarding.portmapper; import com.offbynull.portmapper.mapper.MappedPort; -import com.swirlds.p2p.portforwarding.PortMapping; +import com.swirlds.platform.portforwarding.PortMapping; class PortPair { private final PortMapping specified; From b2e5f579b0363c5101b47ca729a4d7ea0bf1c8dd Mon Sep 17 00:00:00 2001 From: Cody Littley <56973212+cody-littley@users.noreply.github.com> Date: Thu, 15 Jun 2023 08:42:27 -0500 Subject: [PATCH 34/70] Fix test flake in random event test. (#7117) Signed-off-by: Cody Littley --- .../swirlds/platform/test/components/EventTaskCreatorTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskCreatorTest.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskCreatorTest.java index 48862a6ecf99..9f1a68f23685 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskCreatorTest.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskCreatorTest.java @@ -74,7 +74,7 @@ private void init(final EventConfig config) { eventMapper = mock(EventMapper.class); addressBook = prepareAddressBook(); address = mock(Address.class); - selfId = new NodeId(1); + selfId = addressBook.getNodeId(addressBook.getSize() - 1); eventIntakeMetrics = mock(EventIntakeMetrics.class); eventQueueThread = mock(BlockingQueue.class); syncManager = mock(SyncManager.class); From f5353562e6df690b638a159c87f2de854220fc48 Mon Sep 17 00:00:00 2001 From: Jendrik Johannes Date: Thu, 15 Jun 2023 16:07:11 +0200 Subject: [PATCH 35/70] Set all 'spotbugs.annotations' dependencies to 'requires static' (#7088) Signed-off-by: Jendrik Johannes --- hedera-node/cli-clients/src/main/java/module-info.java | 2 +- hedera-node/hapi-fees/src/main/java/module-info.java | 2 +- hedera-node/hapi-utils/src/main/java/module-info.java | 2 +- hedera-node/hapi/src/main/java/module-info.java | 2 +- hedera-node/hapi/src/testFixtures/java/module-info.java | 2 +- hedera-node/hedera-app-spi/build.gradle.kts | 1 + hedera-node/hedera-app-spi/src/main/java/module-info.java | 2 +- .../hedera-app-spi/src/testFixtures/java/module-info.java | 2 +- hedera-node/hedera-app/build.gradle.kts | 1 + hedera-node/hedera-app/src/main/java/module-info.java | 2 +- hedera-node/hedera-app/src/testFixtures/java/module-info.java | 2 +- hedera-node/hedera-config/build.gradle.kts | 1 + hedera-node/hedera-config/src/main/java/module-info.java | 2 +- .../hedera-config/src/testFixtures/java/module-info.java | 2 +- hedera-node/hedera-consensus-service-impl/build.gradle.kts | 1 + .../src/main/java/module-info.java | 2 +- .../hedera-consensus-service/src/main/java/module-info.java | 2 +- hedera-node/hedera-evm-impl/src/main/java/module-info.java | 2 +- hedera-node/hedera-evm/src/main/java/module-info.java | 2 +- hedera-node/hedera-file-service-impl/build.gradle.kts | 1 + .../hedera-file-service-impl/src/main/java/module-info.java | 2 +- hedera-node/hedera-file-service/src/main/java/module-info.java | 2 +- hedera-node/hedera-mono-service/build.gradle.kts | 1 + hedera-node/hedera-mono-service/src/main/java/module-info.java | 2 +- .../hedera-mono-service/src/testFixtures/java/module-info.java | 2 +- hedera-node/hedera-network-admin-service-impl/build.gradle.kts | 1 + .../src/main/java/module-info.java | 2 +- .../hedera-network-admin-service/src/main/java/module-info.java | 2 +- .../hedera-schedule-service-impl/src/main/java/module-info.java | 2 +- .../hedera-schedule-service/src/main/java/module-info.java | 2 +- .../src/main/java/module-info.java | 2 +- .../src/main/java/module-info.java | 2 +- hedera-node/hedera-token-service-impl/build.gradle.kts | 1 + .../hedera-token-service-impl/src/main/java/module-info.java | 2 +- hedera-node/hedera-token-service/src/main/java/module-info.java | 2 +- .../hedera-util-service-impl/src/main/java/module-info.java | 2 +- hedera-node/hedera-util-service/src/main/java/module-info.java | 2 +- 37 files changed, 37 insertions(+), 29 deletions(-) diff --git a/hedera-node/cli-clients/src/main/java/module-info.java b/hedera-node/cli-clients/src/main/java/module-info.java index c9a8c503f1dc..8b1225c45c35 100644 --- a/hedera-node/cli-clients/src/main/java/module-info.java +++ b/hedera-node/cli-clients/src/main/java/module-info.java @@ -9,10 +9,10 @@ requires com.hedera.node.app.hapi.utils; requires com.hedera.node.app.service.mono; requires com.hedera.node.hapi; - requires com.github.spotbugs.annotations; requires com.google.common; requires com.google.protobuf; requires com.swirlds.config; requires com.swirlds.virtualmap; requires org.apache.commons.lang3; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hapi-fees/src/main/java/module-info.java b/hedera-node/hapi-fees/src/main/java/module-info.java index 2583437c7fd8..0c553c6e0001 100644 --- a/hedera-node/hapi-fees/src/main/java/module-info.java +++ b/hedera-node/hapi-fees/src/main/java/module-info.java @@ -21,7 +21,7 @@ requires transitive dagger; requires transitive javax.inject; requires com.fasterxml.jackson.databind; - requires com.github.spotbugs.annotations; requires org.apache.commons.lang3; requires org.apache.logging.log4j; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hapi-utils/src/main/java/module-info.java b/hedera-node/hapi-utils/src/main/java/module-info.java index 6174cc0c304e..22cd1ecedc4a 100644 --- a/hedera-node/hapi-utils/src/main/java/module-info.java +++ b/hedera-node/hapi-utils/src/main/java/module-info.java @@ -25,7 +25,6 @@ requires transitive org.apache.commons.lang3; requires com.hedera.node.app.service.evm; requires com.fasterxml.jackson.databind; - requires com.github.spotbugs.annotations; requires com.google.common; requires com.sun.jna; requires org.apache.commons.codec; @@ -34,4 +33,5 @@ requires org.bouncycastle.pkix; requires org.bouncycastle.provider; requires org.hyperledger.besu.secp256k1; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hapi/src/main/java/module-info.java b/hedera-node/hapi/src/main/java/module-info.java index efd1175dfe7f..9e7485435117 100644 --- a/hedera-node/hapi/src/main/java/module-info.java +++ b/hedera-node/hapi/src/main/java/module-info.java @@ -49,7 +49,7 @@ requires transitive com.google.protobuf; requires transitive grpc.stub; requires transitive io.grpc; - requires com.github.spotbugs.annotations; requires grpc.protobuf; requires org.antlr.antlr4.runtime; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hapi/src/testFixtures/java/module-info.java b/hedera-node/hapi/src/testFixtures/java/module-info.java index 6d31f39bdbed..c18ea6877fad 100644 --- a/hedera-node/hapi/src/testFixtures/java/module-info.java +++ b/hedera-node/hapi/src/testFixtures/java/module-info.java @@ -3,5 +3,5 @@ requires transitive com.hedera.pbj.runtime; requires com.hedera.node.hapi; - requires com.github.spotbugs.annotations; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-app-spi/build.gradle.kts b/hedera-node/hedera-app-spi/build.gradle.kts index 7e5ec9f5c578..a01339241138 100644 --- a/hedera-node/hedera-app-spi/build.gradle.kts +++ b/hedera-node/hedera-app-spi/build.gradle.kts @@ -29,5 +29,6 @@ dependencies { testImplementation(gav("org.junit.jupiter.params")) testImplementation(gav("org.mockito")) testImplementation(gav("org.mockito.junit.jupiter")) + testCompileOnly(gav("com.github.spotbugs.annotations")) } } diff --git a/hedera-node/hedera-app-spi/src/main/java/module-info.java b/hedera-node/hedera-app-spi/src/main/java/module-info.java index 248c02665ac9..0248834a2d25 100644 --- a/hedera-node/hedera-app-spi/src/main/java/module-info.java +++ b/hedera-node/hedera-app-spi/src/main/java/module-info.java @@ -3,7 +3,7 @@ requires transitive com.hedera.pbj.runtime; requires transitive com.swirlds.common; requires transitive com.swirlds.config; - requires com.github.spotbugs.annotations; + requires static com.github.spotbugs.annotations; exports com.hedera.node.app.spi; exports com.hedera.node.app.spi.state; diff --git a/hedera-node/hedera-app-spi/src/testFixtures/java/module-info.java b/hedera-node/hedera-app-spi/src/testFixtures/java/module-info.java index 08867ca0ab52..77a2164f2332 100644 --- a/hedera-node/hedera-app-spi/src/testFixtures/java/module-info.java +++ b/hedera-node/hedera-app-spi/src/testFixtures/java/module-info.java @@ -11,9 +11,9 @@ requires transitive org.apache.logging.log4j; requires transitive org.assertj.core; requires transitive org.junit.jupiter.api; - requires com.github.spotbugs.annotations; requires org.apache.logging.log4j.core; // Temporarily needed until FakePreHandleContext can be removed requires static com.hedera.node.app.service.token; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-app/build.gradle.kts b/hedera-node/hedera-app/build.gradle.kts index 5e2929368e09..5c053c7fef81 100644 --- a/hedera-node/hedera-app/build.gradle.kts +++ b/hedera-node/hedera-app/build.gradle.kts @@ -39,6 +39,7 @@ dependencies { testImplementation(gav("org.mockito.junit.jupiter")) testImplementation(gav("uk.org.webcompere.systemstubs.jupiter")) testImplementation(gav("uk.org.webcompere.systemstubs.core")) + testCompileOnly(gav("com.github.spotbugs.annotations")) itestImplementation(project(":hedera-node:node-app")) itestImplementation(project(":hedera-node:node-app-spi")) diff --git a/hedera-node/hedera-app/src/main/java/module-info.java b/hedera-node/hedera-app/src/main/java/module-info.java index aa6a4f485585..ba80b3131c6c 100644 --- a/hedera-node/hedera-app/src/main/java/module-info.java +++ b/hedera-node/hedera-app/src/main/java/module-info.java @@ -28,7 +28,6 @@ requires com.hedera.node.app.service.schedule; requires com.hedera.node.app.service.token; requires com.hedera.node.app.service.util; - requires com.github.spotbugs.annotations; requires com.google.common; requires com.google.protobuf; requires com.swirlds.fchashmap; @@ -44,6 +43,7 @@ requires org.hyperledger.besu.datatypes; requires org.hyperledger.besu.evm; requires org.slf4j; + requires static com.github.spotbugs.annotations; exports com.hedera.node.app to com.swirlds.platform; diff --git a/hedera-node/hedera-app/src/testFixtures/java/module-info.java b/hedera-node/hedera-app/src/testFixtures/java/module-info.java index 664bd6ad1728..920e4ea0a134 100644 --- a/hedera-node/hedera-app/src/testFixtures/java/module-info.java +++ b/hedera-node/hedera-app/src/testFixtures/java/module-info.java @@ -4,6 +4,6 @@ requires transitive com.hedera.node.app.spi; requires com.hedera.node.app.spi.test.fixtures; requires com.hedera.node.app; - requires com.github.spotbugs.annotations; requires com.swirlds.config; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-config/build.gradle.kts b/hedera-node/hedera-config/build.gradle.kts index c8f16f338bcd..c1d0745a39a2 100644 --- a/hedera-node/hedera-config/build.gradle.kts +++ b/hedera-node/hedera-config/build.gradle.kts @@ -29,5 +29,6 @@ dependencies { testImplementation(gav("org.junit.jupiter.params")) testImplementation(gav("org.mockito")) testImplementation(gav("org.mockito.junit.jupiter")) + testCompileOnly(gav("com.github.spotbugs.annotations")) } } diff --git a/hedera-node/hedera-config/src/main/java/module-info.java b/hedera-node/hedera-config/src/main/java/module-info.java index a3f0107243a1..8191defa2aeb 100644 --- a/hedera-node/hedera-config/src/main/java/module-info.java +++ b/hedera-node/hedera-config/src/main/java/module-info.java @@ -11,6 +11,6 @@ requires transitive com.hedera.node.hapi; requires transitive com.hedera.pbj.runtime; requires transitive com.swirlds.config; - requires com.github.spotbugs.annotations; requires com.swirlds.common; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-config/src/testFixtures/java/module-info.java b/hedera-node/hedera-config/src/testFixtures/java/module-info.java index 07981316bc94..59183ca94c13 100644 --- a/hedera-node/hedera-config/src/testFixtures/java/module-info.java +++ b/hedera-node/hedera-config/src/testFixtures/java/module-info.java @@ -3,7 +3,7 @@ requires transitive com.swirlds.config; requires com.hedera.node.config; - requires com.github.spotbugs.annotations; requires com.swirlds.common; requires com.swirlds.test.framework; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-consensus-service-impl/build.gradle.kts b/hedera-node/hedera-consensus-service-impl/build.gradle.kts index 8f02d3277d71..b479ce839ff9 100644 --- a/hedera-node/hedera-consensus-service-impl/build.gradle.kts +++ b/hedera-node/hedera-consensus-service-impl/build.gradle.kts @@ -34,6 +34,7 @@ dependencies { testImplementation(gav("org.junit.jupiter.api")) testImplementation(gav("org.mockito")) testImplementation(gav("org.mockito.junit.jupiter")) + testCompileOnly(gav("com.github.spotbugs.annotations")) testRuntimeOnly(gav("org.mockito.inline")) } } diff --git a/hedera-node/hedera-consensus-service-impl/src/main/java/module-info.java b/hedera-node/hedera-consensus-service-impl/src/main/java/module-info.java index 2801bc5b65eb..269fc76bd83d 100644 --- a/hedera-node/hedera-consensus-service-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-consensus-service-impl/src/main/java/module-info.java @@ -9,8 +9,8 @@ requires transitive dagger; requires transitive javax.inject; requires com.hedera.node.config; - requires com.github.spotbugs.annotations; requires com.swirlds.config; + requires static com.github.spotbugs.annotations; provides com.hedera.node.app.service.consensus.ConsensusService with ConsensusServiceImpl; diff --git a/hedera-node/hedera-consensus-service/src/main/java/module-info.java b/hedera-node/hedera-consensus-service/src/main/java/module-info.java index 3bab0011456f..4e335e48e3bc 100644 --- a/hedera-node/hedera-consensus-service/src/main/java/module-info.java +++ b/hedera-node/hedera-consensus-service/src/main/java/module-info.java @@ -5,5 +5,5 @@ requires transitive com.hedera.node.app.spi; requires transitive com.hedera.node.hapi; - requires com.github.spotbugs.annotations; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-evm-impl/src/main/java/module-info.java b/hedera-node/hedera-evm-impl/src/main/java/module-info.java index 0b60d1db3d1e..bb3d6ae531d2 100644 --- a/hedera-node/hedera-evm-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-evm-impl/src/main/java/module-info.java @@ -1,4 +1,4 @@ module com.hedera.node.app.service.evm.impl { requires transitive com.hedera.node.app.service.evm; - requires com.github.spotbugs.annotations; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-evm/src/main/java/module-info.java b/hedera-node/hedera-evm/src/main/java/module-info.java index ef6b4f90cd83..9c5d59fdde88 100644 --- a/hedera-node/hedera-evm/src/main/java/module-info.java +++ b/hedera-node/hedera-evm/src/main/java/module-info.java @@ -13,10 +13,10 @@ requires transitive org.hyperledger.besu.secp256k1; requires transitive tuweni.bytes; requires transitive tuweni.units; - requires com.github.spotbugs.annotations; requires com.google.common; requires com.sun.jna; requires org.bouncycastle.provider; + requires static com.github.spotbugs.annotations; exports com.hedera.node.app.service.evm.store.contracts.utils; exports com.hedera.node.app.service.evm.contracts.execution; diff --git a/hedera-node/hedera-file-service-impl/build.gradle.kts b/hedera-node/hedera-file-service-impl/build.gradle.kts index 7199ddd529d0..b4450b10d41b 100644 --- a/hedera-node/hedera-file-service-impl/build.gradle.kts +++ b/hedera-node/hedera-file-service-impl/build.gradle.kts @@ -32,6 +32,7 @@ dependencies { testImplementation(gav("org.mockito.junit.jupiter")) testImplementation(gav("com.google.protobuf")) testImplementation(gav("com.swirlds.common")) + testCompileOnly(gav("com.github.spotbugs.annotations")) testRuntimeOnly(gav("org.mockito.inline")) } } diff --git a/hedera-node/hedera-file-service-impl/src/main/java/module-info.java b/hedera-node/hedera-file-service-impl/src/main/java/module-info.java index 340c48c4b75b..46ab1c3bfb77 100644 --- a/hedera-node/hedera-file-service-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-file-service-impl/src/main/java/module-info.java @@ -10,10 +10,10 @@ requires transitive dagger; requires transitive javax.inject; requires com.fasterxml.jackson.databind; - requires com.github.spotbugs.annotations; requires com.swirlds.config; requires org.apache.commons.lang3; requires org.apache.logging.log4j; + requires static com.github.spotbugs.annotations; provides com.hedera.node.app.service.file.FileService with FileServiceImpl; diff --git a/hedera-node/hedera-file-service/src/main/java/module-info.java b/hedera-node/hedera-file-service/src/main/java/module-info.java index ae360168be4a..a912cc866ee6 100644 --- a/hedera-node/hedera-file-service/src/main/java/module-info.java +++ b/hedera-node/hedera-file-service/src/main/java/module-info.java @@ -6,5 +6,5 @@ requires transitive com.hedera.node.app.spi; requires transitive com.hedera.node.hapi; requires transitive com.hedera.pbj.runtime; - requires com.github.spotbugs.annotations; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-mono-service/build.gradle.kts b/hedera-node/hedera-mono-service/build.gradle.kts index 6386b64060b6..8640f9535cdf 100644 --- a/hedera-node/hedera-mono-service/build.gradle.kts +++ b/hedera-node/hedera-mono-service/build.gradle.kts @@ -40,6 +40,7 @@ dependencies { testImplementation(gav("org.junitpioneer")) testImplementation(gav("org.mockito")) testImplementation(gav("org.mockito.junit.jupiter")) + testCompileOnly(gav("com.github.spotbugs.annotations")) jmhImplementation(project(":hedera-node:node-app-hapi-utils")) jmhImplementation(project(":hedera-node:node-app-spi")) diff --git a/hedera-node/hedera-mono-service/src/main/java/module-info.java b/hedera-node/hedera-mono-service/src/main/java/module-info.java index c6fd465f6e91..020547dd3c0e 100644 --- a/hedera-node/hedera-mono-service/src/main/java/module-info.java +++ b/hedera-node/hedera-mono-service/src/main/java/module-info.java @@ -287,7 +287,6 @@ requires transitive tuweni.bytes; requires transitive tuweni.units; requires com.fasterxml.jackson.core; - requires com.github.spotbugs.annotations; requires com.swirlds.base; requires com.swirlds.logging; requires com.swirlds.platform; @@ -299,4 +298,5 @@ requires org.bouncycastle.provider; requires org.eclipse.collections.impl; requires org.slf4j; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-mono-service/src/testFixtures/java/module-info.java b/hedera-node/hedera-mono-service/src/testFixtures/java/module-info.java index f1e5af6db675..a96df33f19ea 100644 --- a/hedera-node/hedera-mono-service/src/testFixtures/java/module-info.java +++ b/hedera-node/hedera-mono-service/src/testFixtures/java/module-info.java @@ -18,9 +18,9 @@ requires com.hedera.node.app.service.evm; requires com.hedera.node.app.service.mono; requires com.hedera.pbj.runtime; - requires com.github.spotbugs.annotations; requires com.google.common; requires net.i2p.crypto.eddsa; requires org.junit.jupiter.api; requires org.mockito; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-network-admin-service-impl/build.gradle.kts b/hedera-node/hedera-network-admin-service-impl/build.gradle.kts index f0b1b5803f19..0cbb9c92488b 100644 --- a/hedera-node/hedera-network-admin-service-impl/build.gradle.kts +++ b/hedera-node/hedera-network-admin-service-impl/build.gradle.kts @@ -32,6 +32,7 @@ dependencies { testImplementation(gav("org.junit.jupiter.api")) testImplementation(gav("org.mockito")) testImplementation(gav("org.mockito.junit.jupiter")) + testCompileOnly(gav("com.github.spotbugs.annotations")) testRuntimeOnly(gav("org.mockito.inline")) } } diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/module-info.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/module-info.java index 5a79d2195087..e15ba99dfbb7 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/module-info.java @@ -14,9 +14,9 @@ requires com.hedera.node.app.service.evm; requires com.hedera.node.app.service.token; requires com.hedera.node.config; - requires com.github.spotbugs.annotations; requires com.google.common; requires org.apache.logging.log4j; + requires static com.github.spotbugs.annotations; provides com.hedera.node.app.service.networkadmin.FreezeService with FreezeServiceImpl; diff --git a/hedera-node/hedera-network-admin-service/src/main/java/module-info.java b/hedera-node/hedera-network-admin-service/src/main/java/module-info.java index bdd9359629c7..68d11d332a84 100644 --- a/hedera-node/hedera-network-admin-service/src/main/java/module-info.java +++ b/hedera-node/hedera-network-admin-service/src/main/java/module-info.java @@ -6,5 +6,5 @@ requires transitive com.hedera.node.app.spi; requires transitive com.swirlds.common; - requires com.github.spotbugs.annotations; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-schedule-service-impl/src/main/java/module-info.java b/hedera-node/hedera-schedule-service-impl/src/main/java/module-info.java index 8118b1026b5c..a43d9227e10c 100644 --- a/hedera-node/hedera-schedule-service-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-schedule-service-impl/src/main/java/module-info.java @@ -8,9 +8,9 @@ requires transitive com.hedera.pbj.runtime; requires transitive dagger; requires transitive javax.inject; - requires com.github.spotbugs.annotations; requires com.swirlds.jasperdb; requires org.apache.logging.log4j; + requires static com.github.spotbugs.annotations; exports com.hedera.node.app.service.schedule.impl to com.hedera.node.app.service.schedule.impl.test, diff --git a/hedera-node/hedera-schedule-service/src/main/java/module-info.java b/hedera-node/hedera-schedule-service/src/main/java/module-info.java index 832b7436877d..d989fc3d0d9f 100644 --- a/hedera-node/hedera-schedule-service/src/main/java/module-info.java +++ b/hedera-node/hedera-schedule-service/src/main/java/module-info.java @@ -5,5 +5,5 @@ requires transitive com.hedera.node.app.spi; requires transitive com.hedera.node.hapi; - requires com.github.spotbugs.annotations; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-smart-contract-service-impl/src/main/java/module-info.java b/hedera-node/hedera-smart-contract-service-impl/src/main/java/module-info.java index fd0d29a172c8..a2a4fc9a82a5 100644 --- a/hedera-node/hedera-smart-contract-service-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-smart-contract-service-impl/src/main/java/module-info.java @@ -14,8 +14,8 @@ requires transitive tuweni.units; requires com.hedera.node.app.service.mono; requires com.hedera.node.app.service.token; - requires com.github.spotbugs.annotations; requires com.swirlds.jasperdb; + requires static com.github.spotbugs.annotations; provides com.hedera.node.app.service.contract.ContractService with ContractServiceImpl; diff --git a/hedera-node/hedera-smart-contract-service/src/main/java/module-info.java b/hedera-node/hedera-smart-contract-service/src/main/java/module-info.java index bf6893080a1a..9049e895a033 100644 --- a/hedera-node/hedera-smart-contract-service/src/main/java/module-info.java +++ b/hedera-node/hedera-smart-contract-service/src/main/java/module-info.java @@ -4,5 +4,5 @@ uses com.hedera.node.app.service.contract.ContractService; requires transitive com.hedera.node.app.spi; - requires com.github.spotbugs.annotations; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-token-service-impl/build.gradle.kts b/hedera-node/hedera-token-service-impl/build.gradle.kts index ff4534c0f2aa..4e8869c29738 100644 --- a/hedera-node/hedera-token-service-impl/build.gradle.kts +++ b/hedera-node/hedera-token-service-impl/build.gradle.kts @@ -34,6 +34,7 @@ dependencies { testImplementation(gav("org.mockito")) testImplementation(gav("org.mockito.junit.jupiter")) testImplementation(gav("com.swirlds.merkle")) + testCompileOnly(gav("com.github.spotbugs.annotations")) testRuntimeOnly(gav("org.mockito.inline")) } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/module-info.java b/hedera-node/hedera-token-service-impl/src/main/java/module-info.java index 993e546be307..3f5678bae74d 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/module-info.java @@ -8,7 +8,6 @@ requires transitive dagger; requires transitive javax.inject; requires com.hedera.node.app.service.evm; - requires com.github.spotbugs.annotations; requires com.google.common; requires com.google.protobuf; requires com.swirlds.config; @@ -16,6 +15,7 @@ requires org.apache.commons.lang3; requires org.slf4j; requires tuweni.bytes; + requires static com.github.spotbugs.annotations; provides com.hedera.node.app.service.token.TokenService with com.hedera.node.app.service.token.impl.TokenServiceImpl; diff --git a/hedera-node/hedera-token-service/src/main/java/module-info.java b/hedera-node/hedera-token-service/src/main/java/module-info.java index 15aada8995eb..1c38f35a0bf6 100644 --- a/hedera-node/hedera-token-service/src/main/java/module-info.java +++ b/hedera-node/hedera-token-service/src/main/java/module-info.java @@ -6,5 +6,5 @@ requires transitive com.hedera.node.app.spi; requires transitive com.hedera.node.hapi; requires com.hedera.pbj.runtime; - requires com.github.spotbugs.annotations; + requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-util-service-impl/src/main/java/module-info.java b/hedera-node/hedera-util-service-impl/src/main/java/module-info.java index 0fcf2acc4337..15a5f34c0d37 100644 --- a/hedera-node/hedera-util-service-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-util-service-impl/src/main/java/module-info.java @@ -9,11 +9,11 @@ requires com.hedera.node.app.service.network.admin; requires com.hedera.node.config; requires com.hedera.node.hapi; - requires com.github.spotbugs.annotations; requires com.google.common; requires com.swirlds.common; requires com.swirlds.config; requires org.apache.logging.log4j; + requires static com.github.spotbugs.annotations; provides com.hedera.node.app.service.util.UtilService with UtilServiceImpl; diff --git a/hedera-node/hedera-util-service/src/main/java/module-info.java b/hedera-node/hedera-util-service/src/main/java/module-info.java index 483655f113b3..48a43b4a200a 100644 --- a/hedera-node/hedera-util-service/src/main/java/module-info.java +++ b/hedera-node/hedera-util-service/src/main/java/module-info.java @@ -4,5 +4,5 @@ uses com.hedera.node.app.service.util.UtilService; requires transitive com.hedera.node.app.spi; - requires com.github.spotbugs.annotations; + requires static com.github.spotbugs.annotations; } From 2bff52e94db65fdacdb8e3272cd590e0a841efbd Mon Sep 17 00:00:00 2001 From: Cody Littley <56973212+cody-littley@users.noreply.github.com> Date: Thu, 15 Jun 2023 09:29:16 -0500 Subject: [PATCH 36/70] More stable maxRateTest() (#7112) Signed-off-by: Cody Littley --- .../common/test/threading/StoppableThreadTests.java | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/threading/StoppableThreadTests.java b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/threading/StoppableThreadTests.java index 0e3fb368f685..b905682519c5 100644 --- a/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/threading/StoppableThreadTests.java +++ b/platform-sdk/swirlds-unit-tests/common/swirlds-common-test/src/test/java/com/swirlds/common/test/threading/StoppableThreadTests.java @@ -451,10 +451,7 @@ void rateConfigurationTest() { @DisplayName("Max Rate Test") void maxRateTest() throws InterruptedException { final AtomicInteger counter = new AtomicInteger(0); - final InterruptableRunnable work = () -> { - MILLISECONDS.sleep(1); - counter.getAndIncrement(); - }; + final InterruptableRunnable work = counter::getAndIncrement; final StoppableThread thread0 = new StoppableThreadConfiguration<>(getStaticThreadManager()) .setMaximumRate(5) @@ -488,7 +485,7 @@ void maxRateTest() throws InterruptedException { SECONDS.sleep(1); thread2.stop(); assertTrue( - counter.get() > 450 && counter.get() < 550, + counter.get() > 400 && counter.get() < 550, "counter should have value close to 500, has " + counter.get() + " instead"); } From 73472776a7e81cdc6e1a0069af3946065698b7b2 Mon Sep 17 00:00:00 2001 From: Cody Littley <56973212+cody-littley@users.noreply.github.com> Date: Thu, 15 Jun 2023 09:31:02 -0500 Subject: [PATCH 37/70] Fix bug where round has no transactions. (#7114) Signed-off-by: Cody Littley --- .../demo/consistency/ConsistencyTestingToolRound.java | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/platform-sdk/platform-apps/tests/ConsistencyTestingTool/src/main/java/com/swirlds/demo/consistency/ConsistencyTestingToolRound.java b/platform-sdk/platform-apps/tests/ConsistencyTestingTool/src/main/java/com/swirlds/demo/consistency/ConsistencyTestingToolRound.java index 31c05ba9c826..8e5396c91f3d 100644 --- a/platform-sdk/platform-apps/tests/ConsistencyTestingTool/src/main/java/com/swirlds/demo/consistency/ConsistencyTestingToolRound.java +++ b/platform-sdk/platform-apps/tests/ConsistencyTestingTool/src/main/java/com/swirlds/demo/consistency/ConsistencyTestingToolRound.java @@ -154,11 +154,12 @@ public String toString() { builder.append(TRANSACTIONS_STRING); builder.append("["); - transactionsContents.forEach(transaction -> { - builder.append(transaction); - builder.append(LIST_ELEMENT_SEPARATOR); - }); - builder.delete(builder.length() - LIST_ELEMENT_SEPARATOR.length(), builder.length()); + for (int index = 0; index < transactionsContents.size(); index++) { + builder.append(transactionsContents.get(index)); + if (index != transactionsContents.size() - 1) { + builder.append(LIST_ELEMENT_SEPARATOR); + } + } builder.append("]\n"); return builder.toString(); From 56a83a133fd9bd9c7eeabe4037142aa538270bd0 Mon Sep 17 00:00:00 2001 From: Matt Hess Date: Thu, 15 Jun 2023 08:37:16 -0600 Subject: [PATCH 38/70] Implement token account wipe (#7086) Signed-off-by: Matt Hess --- .../dispatcher/MonoTransactionDispatcher.java | 6 + .../MonoTransactionDispatcherTest.java | 12 + .../token/impl/handlers/BaseTokenHandler.java | 18 +- .../handlers/TokenAccountWipeHandler.java | 159 ++- .../token/impl/handlers/TokenBurnHandler.java | 26 +- .../impl/handlers/TokenDeleteHandler.java | 3 +- .../token/impl/util/IdConvenienceUtils.java | 79 -- .../impl/util/TokenRelListCalculator.java | 6 +- .../TokenSupplyChangeOpsValidator.java | 63 +- .../handlers/TokenAccountWipeHandlerTest.java | 957 +++++++++++++++++- .../test/handlers/TokenBurnHandlerTest.java | 85 +- .../test/handlers/TokenDeleteHandlerTest.java | 11 +- ...TokenDissociateFromAccountHandlerTest.java | 7 +- .../test/handlers/util/ParityTestBase.java | 42 +- .../test/util/IdConvenienceUtilsTest.java | 75 -- 15 files changed, 1284 insertions(+), 265 deletions(-) delete mode 100644 hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/IdConvenienceUtils.java delete mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/IdConvenienceUtilsTest.java diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcher.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcher.java index 66367c6e0f20..ad0702bfd98a 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcher.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcher.java @@ -100,6 +100,7 @@ public void dispatchHandle(@NonNull final HandleContext context) { case TOKEN_FEE_SCHEDULE_UPDATE -> dispatchTokenFeeScheduleUpdate(context); case TOKEN_DELETION -> dispatchTokenDeletion(context); case TOKEN_BURN -> dispatchTokenBurn(context); + case TOKEN_WIPE -> dispatchTokenWipe(context); // ------------------ admin -------------------------- case FREEZE -> dispatchFreeze(context); // ------------------ util -------------------------- @@ -265,6 +266,11 @@ private void dispatchTokenBurn(@NonNull final HandleContext handleContext) { handler.handle(handleContext); } + private void dispatchTokenWipe(@NonNull final HandleContext handleContext) { + final var handler = handlers.tokenAccountWipeHandler(); + handler.handle(handleContext); + } + private void dispatchCryptoApproveAllowance(final HandleContext handleContext) { final var handler = handlers.cryptoApproveAllowanceHandler(); handler.handle(handleContext); diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java index f55cfbf32bb5..d4f737e8697a 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/dispatcher/MonoTransactionDispatcherTest.java @@ -671,6 +671,18 @@ void dispatchesTokenCreateAsExpected() { verify(handleContext).body(); } + @Test + void dispatchesTokenWipeAsExpected() { + final var txnBody = TransactionBody.newBuilder() + .tokenWipe(TokenWipeAccountTransactionBody.DEFAULT) + .build(); + given(handleContext.body()).willReturn(txnBody); + + dispatcher.dispatchHandle(handleContext); + + verify(handleContext).body(); + } + @Test void dispatchesCryptoCreateAsExpected() { final var txnBody = TransactionBody.newBuilder() diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java index 50bd9c948c58..0fce3d4d91c1 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java @@ -16,9 +16,13 @@ package com.hedera.node.app.service.token.impl.handlers; -import static com.hedera.hapi.node.base.ResponseCodeEnum.*; +import static com.hedera.hapi.node.base.ResponseCodeEnum.FAIL_INVALID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INSUFFICIENT_TOKEN_BALANCE; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_MINT_AMOUNT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TREASURY_ACCOUNT_FOR_TOKEN; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_SUPPLY_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_MAX_SUPPLY_REACHED; import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; -import static com.hedera.node.app.service.token.impl.util.IdConvenienceUtils.isValidTokenNum; import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; import static java.util.Objects.requireNonNull; @@ -285,4 +289,14 @@ private List createTokenRelsToAccount( public static TokenID asToken(final long num) { return TokenID.newBuilder().tokenNum(num).build(); } + + /** + * Determines if a given token number is valid + * + * @param tokenNum the token number to check + * @return true if the token number is valid + */ + public static boolean isValidTokenNum(final long tokenNum) { + return tokenNum > 0; + } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAccountWipeHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAccountWipeHandler.java index 75b4eb6c29f1..ede617a90c81 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAccountWipeHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAccountWipeHandler.java @@ -16,18 +16,45 @@ package com.hedera.node.app.service.token.impl.handlers; +import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_DOES_NOT_OWN_WIPED_NFT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_NFT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_WIPING_AMOUNT; +import static com.hedera.node.app.service.token.impl.validators.TokenSupplyChangeOpsValidator.verifyTokenInstanceAmounts; +import static com.hedera.node.app.spi.workflows.HandleException.validateFalse; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck; import static java.util.Objects.requireNonNull; +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.HederaFunctionality; +import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenType; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.state.token.TokenRelation; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.service.token.ReadableAccountStore; +import com.hedera.node.app.service.token.ReadableTokenRelationStore; import com.hedera.node.app.service.token.ReadableTokenStore; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.WritableNftStore; +import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; +import com.hedera.node.app.service.token.impl.WritableTokenStore; +import com.hedera.node.app.service.token.impl.util.TokenHandlerHelper; +import com.hedera.node.app.service.token.impl.validators.TokenSupplyChangeOpsValidator; +import com.hedera.node.app.spi.validation.ExpiryValidator; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.PreHandleContext; import com.hedera.node.app.spi.workflows.TransactionHandler; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.LinkedHashSet; +import java.util.List; import javax.inject.Inject; import javax.inject.Singleton; @@ -36,10 +63,13 @@ * HederaFunctionality#TOKEN_ACCOUNT_WIPE}. */ @Singleton -public class TokenAccountWipeHandler implements TransactionHandler { +public final class TokenAccountWipeHandler implements TransactionHandler { + @NonNull + private final TokenSupplyChangeOpsValidator validator; + @Inject - public TokenAccountWipeHandler() { - // Exists for injection + public TokenAccountWipeHandler(@NonNull final TokenSupplyChangeOpsValidator validator) { + this.validator = validator; } @Override @@ -54,8 +84,129 @@ public void preHandle(@NonNull final PreHandleContext context) throws PreCheckEx } } + @Override + public void pureChecks(@NonNull final TransactionBody txn) throws PreCheckException { + final var op = txn.tokenWipeOrThrow(); + + // All the pure checks for burning a token must also be checked for wiping a token + verifyTokenInstanceAmounts(op.amount(), op.serialNumbers(), op.hasToken(), INVALID_WIPING_AMOUNT); + + validateTruePreCheck(op.hasAccount(), INVALID_ACCOUNT_ID); + } + @Override public void handle(@NonNull final HandleContext context) throws HandleException { - throw new UnsupportedOperationException("Not implemented"); + requireNonNull(context); + // Set up the stores and helper objects needed + final var accountStore = context.writableStore(WritableAccountStore.class); + final var tokenStore = context.writableStore(WritableTokenStore.class); + final var tokenRelStore = context.writableStore(WritableTokenRelationStore.class); + final var nftStore = context.writableStore(WritableNftStore.class); + final var expiryValidator = context.expiryValidator(); + + // Assign relevant variables + final var txn = context.body(); + final var op = txn.tokenWipeOrThrow(); + final var accountId = op.account(); + final var tokenId = op.token(); + final var fungibleWipeCount = op.amount(); + // Wrapping the serial nums this way de-duplicates the serial nums: + final var nftSerialNums = new ArrayList<>(new LinkedHashSet<>(op.serialNumbers())); + + // Validate the semantics of the transaction + final var validated = validateSemantics( + accountId, + tokenId, + fungibleWipeCount, + nftSerialNums, + accountStore, + tokenStore, + tokenRelStore, + expiryValidator); + final var acct = validated.account(); + final var token = validated.token(); + + final long newTotalSupply; + final long newAccountBalance; + final Account.Builder updatedAcctBuilder = acct.copyBuilder(); + if (token.tokenType() == TokenType.FUNGIBLE_COMMON) { + // Check that the new total supply will not be negative + newTotalSupply = token.totalSupply() - fungibleWipeCount; + validateTrue(newTotalSupply >= 0, INVALID_WIPING_AMOUNT); + + // Check that the new token balance will not be negative + newAccountBalance = validated.accountTokenRel().balance() - fungibleWipeCount; + validateTrue(newAccountBalance >= 0, INVALID_WIPING_AMOUNT); + } else { + // Check that the new total supply will not be negative + newTotalSupply = token.totalSupply() - nftSerialNums.size(); + validateTrue(newTotalSupply >= 0, INVALID_WIPING_AMOUNT); + + // Validate that there is at least one NFT to wipe + validateFalse(nftSerialNums.isEmpty(), INVALID_WIPING_AMOUNT); + + // Load and validate the nfts + for (final Long nftSerial : nftSerialNums) { + final var nft = nftStore.get(tokenId, nftSerial); + validateTrue(nft != null, INVALID_NFT_ID); + + final var nftOwner = nft.ownerNumber(); + validateTrue(nftOwner == accountId.accountNum(), ACCOUNT_DOES_NOT_OWN_WIPED_NFT); + } + + // Check that the new token balance will not be negative + newAccountBalance = validated.accountTokenRel().balance() - nftSerialNums.size(); + validateTrue(newAccountBalance >= 0, INVALID_WIPING_AMOUNT); + + // Update the NFT count for the account + updatedAcctBuilder.numberOwnedNfts(acct.numberOwnedNfts() - nftSerialNums.size()); + + // Remove the NFTs + nftSerialNums.forEach(serialNum -> nftStore.remove(tokenId, serialNum)); + } + + // Finally, record all the changes + if (newAccountBalance == 0) { + updatedAcctBuilder.numberPositiveBalances(acct.numberPositiveBalances() - 1); + } + accountStore.put(updatedAcctBuilder.build()); + tokenStore.put(token.copyBuilder().totalSupply(newTotalSupply).build()); + tokenRelStore.put(validated + .accountTokenRel() + .copyBuilder() + .balance(newAccountBalance) + .build()); + // Note: record(s) for this operation will be built in a token finalization method so that we keep track of all + // changes for records + } + + private ValidationResult validateSemantics( + @NonNull final AccountID accountId, + @NonNull final TokenID tokenId, + final long fungibleWipeCount, + @NonNull final List nftSerialNums, + @NonNull final ReadableAccountStore accountStore, + @NonNull final ReadableTokenStore tokenStore, + @NonNull final ReadableTokenRelationStore tokenRelStore, + @NonNull final ExpiryValidator expiryValidator) { + validateTrue(fungibleWipeCount > -1, INVALID_WIPING_AMOUNT); + + final var account = + TokenHandlerHelper.getIfUsable(accountId, accountStore, expiryValidator, INVALID_ACCOUNT_ID); + + validator.validateWipe(fungibleWipeCount, nftSerialNums); + + final var token = TokenHandlerHelper.getIfUsable(tokenId, tokenStore); + validateTrue(token.wipeKey() != null, ResponseCodeEnum.TOKEN_HAS_NO_WIPE_KEY); + + final var accountRel = TokenHandlerHelper.getIfUsable(accountId, tokenId, tokenRelStore); + validateFalse( + token.treasuryAccountNumber() == accountRel.accountNumber(), + ResponseCodeEnum.CANNOT_WIPE_TOKEN_TREASURY_ACCOUNT); + + return new ValidationResult(account, token, accountRel); } + + private record ValidationResult( + @NonNull Account account, @NonNull Token token, @NonNull TokenRelation accountTokenRel) {} } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java index 8068380e294d..d401316746cb 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java @@ -19,11 +19,9 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.*; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_NFT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; -import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSACTION_BODY; import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.validators.TokenSupplyChangeOpsValidator.verifyTokenInstanceAmounts; import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; -import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; -import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.HederaFunctionality; @@ -63,7 +61,6 @@ public final class TokenBurnHandler extends BaseTokenHandler implements Transact @Inject public TokenBurnHandler(@NonNull final TokenSupplyChangeOpsValidator validator) { - // Exists for injection this.validator = requireNonNull(validator); } @@ -84,22 +81,7 @@ public void preHandle(@NonNull final PreHandleContext context) throws PreCheckEx @Override public void pureChecks(@NonNull final TransactionBody txn) throws PreCheckException { final var op = txn.tokenBurnOrThrow(); - final var fungibleCount = op.amount(); - final var serialNums = op.serialNumbers(); - - validateTruePreCheck(op.hasToken(), INVALID_TOKEN_ID); - - // If a positive fungible amount is present, the NFT serial numbers must be empty - validateFalsePreCheck(fungibleCount > 0 && !serialNums.isEmpty(), INVALID_TRANSACTION_BODY); - - validateFalsePreCheck(fungibleCount < 0, INVALID_TOKEN_BURN_AMOUNT); - - // Validate the NFT serial numbers - if (fungibleCount < 1 && !serialNums.isEmpty()) { - for (final var serialNumber : op.serialNumbers()) { - validateTruePreCheck(serialNumber > 0, INVALID_NFT_ID); - } - } + verifyTokenInstanceAmounts(op.amount(), op.serialNumbers(), op.hasToken(), INVALID_TOKEN_BURN_AMOUNT); } @Override @@ -166,8 +148,8 @@ private ValidationResult validateSemantics( @NonNull final TokenID tokenId, final long fungibleBurnCount, @NonNull final List nftSerialNums, - final ReadableTokenStore tokenStore, - final ReadableTokenRelationStore tokenRelStore) { + @NonNull final ReadableTokenStore tokenStore, + @NonNull final ReadableTokenRelationStore tokenRelStore) { validateTrue(fungibleBurnCount >= 0, INVALID_TOKEN_BURN_AMOUNT); validator.validateBurn(fungibleBurnCount, nftSerialNums); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDeleteHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDeleteHandler.java index d8dd36aeaebb..606620fb2b4e 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDeleteHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenDeleteHandler.java @@ -29,7 +29,6 @@ import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.WritableTokenStore; -import com.hedera.node.app.service.token.impl.util.IdConvenienceUtils; import com.hedera.node.app.service.token.impl.util.TokenHandlerHelper; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; @@ -86,7 +85,7 @@ public void handle(@NonNull final HandleContext context) throws HandleException tokenStore.put(updatedToken); // Update the token treasury account's treasury titles count - final var account = accountStore.get(IdConvenienceUtils.fromAccountNum(token.treasuryAccountNumber())); + final var account = accountStore.get(BaseCryptoHandler.asAccount(token.treasuryAccountNumber())); final var updatedAccount = account.copyBuilder() .numberTreasuryTitles(account.numberTreasuryTitles() - 1) .build(); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/IdConvenienceUtils.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/IdConvenienceUtils.java deleted file mode 100644 index 6071c0e45210..000000000000 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/IdConvenienceUtils.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.service.token.impl.util; - -import com.hedera.hapi.node.base.AccountID; -import com.hedera.hapi.node.base.TokenID; -import edu.umd.cs.findbugs.annotations.NonNull; - -public final class IdConvenienceUtils { - - private IdConvenienceUtils() { - throw new UnsupportedOperationException("Utility Class"); - } - - /** - * Constructs an {@code AccountID} from the given account number - * - * @param accountNum the token number to construct a {@code AccountID} from - * @return the constructed {@code AccountID} - * @throws IllegalArgumentException if the given account number is not valid - */ - @NonNull - public static AccountID fromAccountNum(final long accountNum) { - if (!isValidAccountNum(accountNum)) { - throw new IllegalArgumentException("Account number must be positive"); - } - return AccountID.newBuilder().accountNum(accountNum).build(); - } - - /** - * Constructs a {@code TokenID} from the given token number - * - * @param tokenNum the token number to construct a {@code TokenID} from - * @return the constructed {@code TokenID} - * @throws IllegalArgumentException if the given token number is not valid - */ - @NonNull - public static TokenID fromTokenNum(final long tokenNum) { - if (!isValidTokenNum(tokenNum)) { - throw new IllegalArgumentException("Token number must be positive"); - } - - return TokenID.newBuilder().tokenNum(tokenNum).build(); - } - - /** - * Determines if a given token number is valid - * - * @param tokenNum the token number to check - * @return true if the token number is valid - */ - public static boolean isValidTokenNum(final long tokenNum) { - return tokenNum > 0; - } - - /** - * Determines if a given account number is valid - * - * @param accountNum the account number to check - * @return true if the account number is valid - */ - public static boolean isValidAccountNum(final long accountNum) { - return accountNum > 0; - } -} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenRelListCalculator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenRelListCalculator.java index 66dc3f40062e..858f35c130a3 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenRelListCalculator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenRelListCalculator.java @@ -16,7 +16,6 @@ package com.hedera.node.app.service.token.impl.util; -import static com.hedera.node.app.service.token.impl.util.IdConvenienceUtils.fromAccountNum; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.AccountID; @@ -24,6 +23,7 @@ import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.node.app.service.token.ReadableTokenRelationStore; +import com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.ArrayList; @@ -119,7 +119,7 @@ public TokenRelsRemovalResult removeTokenRels( final var cleanedTokenRelsToDelete = filterNullsAndDuplicates(tokenRelsToDelete); final var currentHeadTokenNum = account.headTokenNumber(); - final var accountId = fromAccountNum(account.accountNumber()); + final var accountId = BaseCryptoHandler.asAccount(account.accountNumber()); // We'll create this mapping of (tokenId -> tokenRel) to make it easier to check if a token rel is in the list // of token rels to delete. It's only for ease of lookup and doesn't affect the algorithm @@ -271,7 +271,7 @@ private long calculateHeadTokenAfterDeletions( @NonNull final Account account, @NonNull final Map updatedTokenRels, @NonNull final Map tokenRelsToDeleteByTokenId) { - final var accountId = IdConvenienceUtils.fromAccountNum(account.accountNumber()); + final var accountId = BaseCryptoHandler.asAccount(account.accountNumber()); // Calculate the new head token number by walking the linked token rels until we find a token rel that is not in // the list of token rels to delete diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java index 607bc7d3f8e5..d7c0b2796937 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java @@ -16,11 +16,20 @@ package com.hedera.node.app.service.token.impl.validators; -import static com.hedera.hapi.node.base.ResponseCodeEnum.*; +import static com.hedera.hapi.node.base.ResponseCodeEnum.BATCH_SIZE_LIMIT_EXCEEDED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_NFT_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSACTION_BODY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.METADATA_TOO_LONG; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck; import static java.util.Objects.requireNonNull; +import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.config.ConfigProvider; import com.hedera.node.config.data.TokensConfig; import com.hedera.pbj.runtime.io.buffer.Bytes; @@ -70,8 +79,51 @@ public void validateBurn(final long fungibleCount, final List nftSerialNum validateCommon(fungibleCount, nftSerialNums.size(), TokensConfig::nftsMaxBatchSizeBurn); } - @SuppressWarnings("unused") - // @future('6389'): This method will be used when token wipe is implemented + /** + * Checks that the transaction input data for a token operation is valid, specifically for operations + * that change the supply of a token (i.e. a token's "instances"). + * + *

+ * This method is static, so we can call it from handler pure checks methods without relying on any object instance + * + * @param fungibleAmount the amount of fungible tokens to burn + * @param serialNums the list of NFT serial numbers to burn + * @param hasToken whether the transaction body has a token ID + * @param invalidAmountResponseCode the response code to throw if the {@code fungibleAmount} param is invalid + * @throws PreCheckException if the transaction data is invalid + */ + public static void verifyTokenInstanceAmounts( + final long fungibleAmount, + final @NonNull List serialNums, + final boolean hasToken, + @NonNull final ResponseCodeEnum invalidAmountResponseCode) + throws PreCheckException { + validateTruePreCheck(hasToken, INVALID_TOKEN_ID); + + // If a positive fungible fungibleAmount is present, the NFT serial numbers must be empty + validateFalsePreCheck(fungibleAmount > 0 && !serialNums.isEmpty(), INVALID_TRANSACTION_BODY); + + // The fungible amount must not be negative, regardless of use case + validateFalsePreCheck(fungibleAmount < 0, invalidAmountResponseCode); + + // If no fungible fungibleAmount is present, at least one NFT serial number must be present + validateFalsePreCheck(fungibleAmount == 0 && serialNums.isEmpty(), invalidAmountResponseCode); + + // Validate the NFT serial numbers + if (fungibleAmount < 1 && !serialNums.isEmpty()) { + for (final var serialNumber : serialNums) { + validateTruePreCheck(serialNumber > 0, INVALID_NFT_ID); + } + } + } + + /** + * Validate the transaction data for a token mint operation + * + * @param fungibleCount the number of fungible tokens to wipe + * @param nftSerialNums the list of NFT serial numbers to wipe + * @throws HandleException if the transaction data is invalid + */ public void validateWipe(final long fungibleCount, final List nftSerialNums) { validateCommon(fungibleCount, nftSerialNums.size(), TokensConfig::nftsMaxBatchSizeWipe); } @@ -90,7 +142,7 @@ private void validateCommon( // Get needed configurations final var nftsAreEnabled = tokensConfig.nftsAreEnabled(); final var maxNftBatchOpSize = batchSizeGetter.applyAsInt(tokensConfig); - // validate nft count and fungible count are valid + // Validate the NFT count and fungible count are valid validateCounts(nftCount, fungibleCount, nftsAreEnabled, maxNftBatchOpSize); } @@ -103,6 +155,9 @@ private void validateCommon( */ private void validateCounts( final int nftCount, final long fungibleCount, final boolean nftsAreEnabled, final long maxBatchSize) { + if (fungibleCount > 0) { + validateTrue(fungibleCount <= maxBatchSize, BATCH_SIZE_LIMIT_EXCEEDED); + } if (nftCount > 0) { validateTrue(nftsAreEnabled, NOT_SUPPORTED); } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAccountWipeHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAccountWipeHandlerTest.java index 678d4f4f1992..ad754a3e22bd 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAccountWipeHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAccountWipeHandlerTest.java @@ -16,57 +16,968 @@ package com.hedera.node.app.service.token.impl.test.handlers; +import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_DOES_NOT_OWN_WIPED_NFT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.BATCH_SIZE_LIMIT_EXCEEDED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.CANNOT_WIPE_TOKEN_TREASURY_ACCOUNT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_NFT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TRANSACTION_BODY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_WIPING_AMOUNT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_WIPE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_PAUSED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_NOT_ASSOCIATED_TO_ACCOUNT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_WAS_DELETED; import static com.hedera.node.app.spi.fixtures.Assertions.assertThrowsPreCheck; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; import static com.hedera.test.factories.scenarios.TokenWipeScenarios.VALID_WIPE_WITH_EXTANT_TOKEN; import static com.hedera.test.factories.scenarios.TokenWipeScenarios.WIPE_FOR_TOKEN_WITHOUT_KEY; import static com.hedera.test.factories.scenarios.TokenWipeScenarios.WIPE_WITH_MISSING_TOKEN; import static com.hedera.test.factories.scenarios.TxnHandlingScenario.TOKEN_WIPE_KT; import static com.hedera.test.factories.txns.SignedTxnFactory.DEFAULT_PAYER_KT; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.notNull; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.lenient; +import static org.mockito.Mockito.mock; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TokenType; +import com.hedera.hapi.node.base.TransactionID; +import com.hedera.hapi.node.state.common.UniqueTokenId; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.hapi.node.state.token.Nft; +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.state.token.TokenRelation; +import com.hedera.hapi.node.token.TokenBurnTransactionBody; +import com.hedera.hapi.node.token.TokenWipeAccountTransactionBody; +import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.service.token.ReadableTokenStore; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.WritableNftStore; +import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; +import com.hedera.node.app.service.token.impl.WritableTokenStore; +import com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler; +import com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler; import com.hedera.node.app.service.token.impl.handlers.TokenAccountWipeHandler; import com.hedera.node.app.service.token.impl.test.handlers.util.ParityTestBase; +import com.hedera.node.app.service.token.impl.validators.TokenSupplyChangeOpsValidator; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; +import com.hedera.node.app.spi.validation.ExpiryValidator; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.VersionedConfiguration; +import com.hedera.node.config.data.TokensConfig; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; class TokenAccountWipeHandlerTest extends ParityTestBase { - private final TokenAccountWipeHandler subject = new TokenAccountWipeHandler(); + private static final AccountID ACCOUNT_4680 = BaseCryptoHandler.asAccount(4680); + private static final AccountID TREASURY_ACCOUNT_9876 = BaseCryptoHandler.asAccount(9876); + private static final TokenID TOKEN_531 = BaseTokenHandler.asToken(531); + private final ConfigProvider configProvider = mock(ConfigProvider.class); - @Test - void tokenWipeWithValidExtantTokenScenario() throws PreCheckException { - final var theTxn = txnFrom(VALID_WIPE_WITH_EXTANT_TOKEN); + private final TokenSupplyChangeOpsValidator validator = new TokenSupplyChangeOpsValidator(configProvider); + private final TokenAccountWipeHandler subject = new TokenAccountWipeHandler(validator); - final var context = new FakePreHandleContext(readableAccountStore, theTxn); - context.registerStore(ReadableTokenStore.class, readableTokenStore); - subject.preHandle(context); + @Nested + class PureChecks { + @SuppressWarnings("DataFlowIssue") + @Test + void nullArgsThrows() { + assertThatThrownBy(() -> subject.pureChecks(null)).isInstanceOf(NullPointerException.class); + } - assertEquals(1, context.requiredNonPayerKeys().size()); - assertThat(context.requiredNonPayerKeys(), contains(TOKEN_WIPE_KT.asPbjKey())); + @Test + void noWipeTxnPresent() { + final var nonWipeTxnBody = TokenBurnTransactionBody.newBuilder(); + final var txn = TransactionBody.newBuilder() + .transactionID( + TransactionID.newBuilder().accountID(ACCOUNT_4680).build()) + .tokenBurn(nonWipeTxnBody) + .build(); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)).isInstanceOf(NullPointerException.class); + } + + @Test + void noAccountIdPresent() { + final var txn = newWipeTxn(null, TOKEN_531, 1); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_ACCOUNT_ID)); + } + + @Test + void noTokenPresent() { + final var txn = newWipeTxn(ACCOUNT_4680, null, 1); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TOKEN_ID)); + } + + @Test + void fungibleAndNonFungibleGiven() { + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 1, 1L); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TRANSACTION_BODY)); + } + + @Test + void nonPositiveFungibleAmountGiven() { + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, -1); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_WIPING_AMOUNT)); + } + + @Test + void emptyNftSerialNumbers() { + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 0); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_WIPING_AMOUNT)); + } + + @Test + void invalidNftSerialNumber() { + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 0, 1L, 2L, 0L); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_NFT_ID)); + } } - @Test - void tokenWipeWithMissingTokenScenario() throws PreCheckException { - final var theTxn = txnFrom(WIPE_WITH_MISSING_TOKEN); + @Nested + // Tests that check prehandle parity with old prehandle code + class PreHandle { + @SuppressWarnings("DataFlowIssue") + @Test + void nullArgsThrows() { + assertThatThrownBy(() -> subject.preHandle(null)).isInstanceOf(NullPointerException.class); + } + + @Test + void tokenWipeWithValidExtantTokenScenario() throws PreCheckException { + final var theTxn = txnFrom(VALID_WIPE_WITH_EXTANT_TOKEN); + + final var context = new FakePreHandleContext(readableAccountStore, theTxn); + context.registerStore(ReadableTokenStore.class, readableTokenStore); + subject.preHandle(context); + + assertEquals(1, context.requiredNonPayerKeys().size()); + assertThat(context.requiredNonPayerKeys(), contains(TOKEN_WIPE_KT.asPbjKey())); + } + + @Test + void tokenWipeWithMissingTokenScenario() throws PreCheckException { + final var theTxn = txnFrom(WIPE_WITH_MISSING_TOKEN); + + final var context = new FakePreHandleContext(readableAccountStore, theTxn); + context.registerStore(ReadableTokenStore.class, readableTokenStore); + assertThrowsPreCheck(() -> subject.preHandle(context), INVALID_TOKEN_ID); + } - final var context = new FakePreHandleContext(readableAccountStore, theTxn); - context.registerStore(ReadableTokenStore.class, readableTokenStore); - assertThrowsPreCheck(() -> subject.preHandle(context), INVALID_TOKEN_ID); + @Test + void tokenWipeWithoutKeyScenario() throws PreCheckException { + final var theTxn = txnFrom(WIPE_FOR_TOKEN_WITHOUT_KEY); + + final var context = new FakePreHandleContext(readableAccountStore, theTxn); + context.registerStore(ReadableTokenStore.class, readableTokenStore); + subject.preHandle(context); + + assertEquals(context.payerKey(), DEFAULT_PAYER_KT.asPbjKey()); + assertEquals(0, context.requiredNonPayerKeys().size()); + } } - @Test - void tokenWipeWithoutKeyScenario() throws PreCheckException { - final var theTxn = txnFrom(WIPE_FOR_TOKEN_WITHOUT_KEY); + @Nested + @ExtendWith(MockitoExtension.class) + class Handle { + @Mock + private ExpiryValidator validator; + + private WritableTokenStore writableTokenStore; + private WritableNftStore writableNftStore; + + @SuppressWarnings("DataFlowIssue") + @Test + void nullArgsThrows() { + assertThatThrownBy(() -> subject.handle(null)).isInstanceOf(NullPointerException.class); + } + + @Test + void invalidFungibleAmount() { + mockConfig(); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, -1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_WIPING_AMOUNT)); + } + + @Test + void accountDoesntExist() { + mockConfig(); + // Both stores are intentionally empty + writableAccountStore = newWritableStoreWithAccounts(); + writableTokenStore = newWritableStoreWithTokens(); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_ACCOUNT_ID)); + } + + @Test + void fungibleAmountExceedsBatchSize() { + final var maxBatchSize = 5; + mockConfig(maxBatchSize, true); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .build()); + writableTokenStore = newWritableStoreWithTokens(); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, maxBatchSize + 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(BATCH_SIZE_LIMIT_EXCEEDED)); + } + + @Test + void nftAmountExceedsBatchSize() { + mockConfig(2, true); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .build()); + writableTokenStore = newWritableStoreWithTokens(); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 0, 1L, 2L, 3L); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(BATCH_SIZE_LIMIT_EXCEEDED)); + } + + @Test + void tokenIdNotFound() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .build()); + writableTokenStore = newWritableStoreWithTokens(); // Intentionally empty + final var txn = newWipeTxn(ACCOUNT_4680, BaseTokenHandler.asToken(999), 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_TOKEN_ID)); + } + + @Test + void tokenIsDeleted() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .build()); + writableTokenStore = newWritableStoreWithTokens(newFungibleToken531(5) + .copyBuilder() + .deleted(true) // Intentionally deleted + .build()); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_WAS_DELETED)); + } + + @Test + void tokenIsPaused() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .build()); + writableTokenStore = newWritableStoreWithTokens(newNftToken531(5) + .copyBuilder() + .paused(true) // Intentionally paused + .build()); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_IS_PAUSED)); + } + + @Test + void tokenDoesntHaveWipeKey() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .build()); + final var totalFungibleSupply = 5; + writableTokenStore = newWritableStoreWithTokens(newFungibleToken531(totalFungibleSupply) + .copyBuilder() + .wipeKey((Key) null) // Intentionally missing wipe key + .build()); + writableTokenRelStore = newWritableStoreWithTokenRels(newAccount4680Token531Rel(totalFungibleSupply)); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, totalFungibleSupply + 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_HAS_NO_WIPE_KEY)); + } + + @Test + void accountRelDoesntExist() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .build()); + writableTokenStore = newWritableStoreWithTokens(newNftToken531(5)); + // Intentionally has no token rels: + writableTokenRelStore = newWritableStoreWithTokenRels(); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_NOT_ASSOCIATED_TO_ACCOUNT)); + } + + @Test + void givenAccountIsTreasury() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .build()); + writableTokenStore = newWritableStoreWithTokens(newNftToken531(5)); + writableTokenRelStore = newWritableStoreWithTokenRels(newTreasuryToken531Rel(0)); + final var txn = newWipeTxn(TREASURY_ACCOUNT_9876, TOKEN_531, 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(CANNOT_WIPE_TOKEN_TREASURY_ACCOUNT)); + } + + @Test + void fungibleAmountNegatesSupply() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .numberTreasuryTitles(0) + .numberPositiveBalances(1) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(0) + .build()); + final var totalTokenSupply = 5; + writableTokenStore = newWritableStoreWithTokens(newFungibleToken531(totalTokenSupply)); + writableTokenRelStore = newWritableStoreWithTokenRels( + newTreasuryToken531Rel(0), newAccount4680Token531Rel(totalTokenSupply)); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, totalTokenSupply + 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_WIPING_AMOUNT)); + } + + @Test + void fungibleAmountNegatesBalance() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .numberTreasuryTitles(0) + .numberPositiveBalances(1) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(0) + .build()); + final var currentTokenBalance = 3; + writableTokenStore = newWritableStoreWithTokens(newFungibleToken531(currentTokenBalance + 2)); + writableTokenRelStore = newWritableStoreWithTokenRels( + newTreasuryToken531Rel(0), newAccount4680Token531Rel(currentTokenBalance)); + // The fungible amount is less than the total token supply, but one more than the token balance. Therefore, + // we should see an error thrown + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, currentTokenBalance + 1); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_WIPING_AMOUNT)); + } + + @Test + void fungibleAmountBurnedWithLeftoverAccountBalance() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .numberTreasuryTitles(0) + .numberPositiveBalances(1) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(1) + .build()); + writableTokenStore = newWritableStoreWithTokens(newFungibleToken531(5)); + writableTokenRelStore = + newWritableStoreWithTokenRels(newTreasuryToken531Rel(1), newAccount4680Token531Rel(4)); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 3); + final var context = mockContext(txn); + + subject.handle(context); - final var context = new FakePreHandleContext(readableAccountStore, theTxn); - context.registerStore(ReadableTokenStore.class, readableTokenStore); - subject.preHandle(context); + final var acct = writableAccountStore.get(ACCOUNT_4680); + Assertions.assertThat(acct.numberPositiveBalances()).isEqualTo(1); + final var treasuryAcct = writableAccountStore.get(TREASURY_ACCOUNT_9876); + Assertions.assertThat(treasuryAcct.numberTreasuryTitles()).isEqualTo(1); + Assertions.assertThat(treasuryAcct.numberPositiveBalances()).isEqualTo(1); + final var token = writableTokenStore.get(TOKEN_531); + Assertions.assertThat(token.totalSupply()).isEqualTo(2); + final var acctTokenRel = writableTokenRelStore.get(ACCOUNT_4680, TOKEN_531); + Assertions.assertThat(acctTokenRel.balance()).isEqualTo(1); + final var treasuryTokenRel = writableTokenRelStore.get(TREASURY_ACCOUNT_9876, TOKEN_531); + // Nothing should've happened to the treasury token balance + Assertions.assertThat(treasuryTokenRel.balance()).isEqualTo(1); + } + + @Test + void fungibleAmountBurnedWithNoLeftoverAccountBalance() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .numberTreasuryTitles(0) + .numberPositiveBalances(1) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(1) + .build()); + writableTokenStore = newWritableStoreWithTokens(newFungibleToken531(5)); + writableTokenRelStore = + newWritableStoreWithTokenRels(newTreasuryToken531Rel(3), newAccount4680Token531Rel(2)); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 2); + final var context = mockContext(txn); + + subject.handle(context); + + final var acct = writableAccountStore.get(ACCOUNT_4680); + Assertions.assertThat(acct.numberPositiveBalances()).isZero(); + final var treasuryAcct = writableAccountStore.get(TREASURY_ACCOUNT_9876); + Assertions.assertThat(treasuryAcct.numberTreasuryTitles()).isEqualTo(1); + Assertions.assertThat(treasuryAcct.numberPositiveBalances()).isEqualTo(1); + final var token = writableTokenStore.get(TOKEN_531); + Assertions.assertThat(token.totalSupply()).isEqualTo(3); + final var acctTokenRel = writableTokenRelStore.get(ACCOUNT_4680, TOKEN_531); + Assertions.assertThat(acctTokenRel.balance()).isZero(); + final var treasuryTokenRel = writableTokenRelStore.get(TREASURY_ACCOUNT_9876, TOKEN_531); + // Nothing should've happened to the treasury token balance + Assertions.assertThat(treasuryTokenRel.balance()).isEqualTo(3); + } + + @Test + void nftSerialNumDoesntExist() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .numberTreasuryTitles(1) + .build()); + writableTokenStore = newWritableStoreWithTokens(newNftToken531(10)); + writableTokenRelStore = newWritableStoreWithTokenRels(newAccount4680Token531Rel(0)); + writableNftStore = newWritableStoreWithNfts(); // Intentionally empty + + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 0, 1L); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_NFT_ID)); + } + + @Test + void nftNotOwnedByAccount() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .numberTreasuryTitles(1) + .build()); + writableTokenStore = newWritableStoreWithTokens(newNftToken531(10)); + writableTokenRelStore = newWritableStoreWithTokenRels(newAccount4680Token531Rel(0)); + writableNftStore = newWritableStoreWithNfts(Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_531.tokenNum()) + .serialNumber(1) + .build()) + .ownerNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .build()); + + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 0, 1L); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(ACCOUNT_DOES_NOT_OWN_WIPED_NFT)); + } + + @Test + void numNftSerialsNegatesSupply() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .numberTreasuryTitles(0) + .numberPositiveBalances(1) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(0) + .build()); + final var totalTokenSupply = 1; + writableTokenStore = newWritableStoreWithTokens(newNftToken531(totalTokenSupply)); + writableTokenRelStore = newWritableStoreWithTokenRels( + newTreasuryToken531Rel(0), newAccount4680Token531Rel(totalTokenSupply)); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 0, 1L, 2L); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_WIPING_AMOUNT)); + } + + @Test + void nftSerialNumsIsEmpty() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .numberTreasuryTitles(0) + .numberPositiveBalances(1) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(0) + .build()); + writableTokenStore = newWritableStoreWithTokens(newNftToken531(5)); + writableTokenRelStore = + newWritableStoreWithTokenRels(newTreasuryToken531Rel(0), newAccount4680Token531Rel(5)); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 0); + final var context = mockContext(txn); + + assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_WIPING_AMOUNT)); + } + + @Test + void nftSerialsWipedWithLeftoverNftSerials() { + // i.e. leftover NFT serials remaining with the owning account + + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .numberTreasuryTitles(0) + .numberPositiveBalances(1) + .numberOwnedNfts(3) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(1) + .numberOwnedNfts(1) + .build()); + writableTokenStore = newWritableStoreWithTokens(newNftToken531(10)); + writableTokenRelStore = + newWritableStoreWithTokenRels(newAccount4680Token531Rel(3), newTreasuryToken531Rel(1)); + writableNftStore = newWritableStoreWithNfts( + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_531.tokenNum()) + .serialNumber(1) + .build()) + .ownerNumber(0) // treasury owns this NFT + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_531.tokenNum()) + .serialNumber(2) + .build()) + .ownerNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_531.tokenNum()) + .serialNumber(3) + .build()) + .ownerNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_531.tokenNum()) + .serialNumber(4) + .build()) + .ownerNumber(ACCOUNT_4680.accountNumOrThrow()) + .build()); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 0, 2L, 3L); + final var context = mockContext(txn); + + subject.handle(context); + + final var acct = writableAccountStore.get(ACCOUNT_4680); + Assertions.assertThat(acct.numberOwnedNfts()).isEqualTo(1); + Assertions.assertThat(acct.numberPositiveBalances()).isEqualTo(1); + final var treasuryAcct = writableAccountStore.get(TREASURY_ACCOUNT_9876); + // The treasury still owns its NFT, so its counts shouldn't change + Assertions.assertThat(treasuryAcct.numberOwnedNfts()).isEqualTo(1); + Assertions.assertThat(treasuryAcct.numberTreasuryTitles()).isEqualTo(1); + Assertions.assertThat(treasuryAcct.numberPositiveBalances()).isEqualTo(1); + final var token = writableTokenStore.get(TOKEN_531); + // Verify that 2 NFTs were removed from the total supply + Assertions.assertThat(token.totalSupply()).isEqualTo(8); + final var tokenRel = writableTokenRelStore.get(ACCOUNT_4680, TOKEN_531); + Assertions.assertThat(tokenRel.balance()).isEqualTo(1); + // Verify the treasury's NFT wasn't removed + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 1))) + .isNotNull(); + // Verify that two of the account's NFTs were removed, and that the final one remains + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 2))) + .isNull(); + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 3))) + .isNull(); + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 4))) + .isNotNull(); + } + + @Test + void nftSerialsWipedWithNoLeftoverNftSerials() { + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .numberTreasuryTitles(0) + .numberPositiveBalances(1) + .numberOwnedNfts(3) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(1) + .numberOwnedNfts(1) + .build()); + writableTokenStore = newWritableStoreWithTokens(newNftToken531(10)); + writableTokenRelStore = + newWritableStoreWithTokenRels(newAccount4680Token531Rel(3), newTreasuryToken531Rel(1)); + writableNftStore = newWritableStoreWithNfts( + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_531.tokenNum()) + .serialNumber(1) + .build()) + .ownerNumber(0) // treasury owns this NFT + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_531.tokenNum()) + .serialNumber(2) + .build()) + .ownerNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_531.tokenNum()) + .serialNumber(3) + .build()) + .ownerNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_531.tokenNum()) + .serialNumber(4) + .build()) + .ownerNumber(ACCOUNT_4680.accountNumOrThrow()) + .build()); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 0, 2L, 3L, 4L); + final var context = mockContext(txn); + + subject.handle(context); + + final var acct = writableAccountStore.get(ACCOUNT_4680); + Assertions.assertThat(acct.numberOwnedNfts()).isZero(); + Assertions.assertThat(acct.numberPositiveBalances()).isZero(); + final var treasuryAcct = writableAccountStore.get(TREASURY_ACCOUNT_9876); + // The treasury still owns its NFT, so its counts shouldn't change + Assertions.assertThat(treasuryAcct.numberOwnedNfts()).isEqualTo(1); + Assertions.assertThat(treasuryAcct.numberTreasuryTitles()).isEqualTo(1); + Assertions.assertThat(treasuryAcct.numberPositiveBalances()).isEqualTo(1); + final var token = writableTokenStore.get(TOKEN_531); + // Verify that 3 NFTs were removed from the total supply + Assertions.assertThat(token.totalSupply()).isEqualTo(7); + final var tokenRel = writableTokenRelStore.get(ACCOUNT_4680, TOKEN_531); + Assertions.assertThat(tokenRel.balance()).isZero(); + // Verify the treasury's NFT wasn't removed + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 1))) + .isNotNull(); + // Verify that the account's NFTs were removed + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 2))) + .isNull(); + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 3))) + .isNull(); + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 4))) + .isNull(); + } + + @Test + void duplicateNftSerials() { + // This is a success case, and should be identical to the case without no duplicates above + + mockConfig(); + mockOkExpiryValidator(); + writableAccountStore = newWritableStoreWithAccounts( + Account.newBuilder() + .accountNumber(ACCOUNT_4680.accountNumOrThrow()) + .numberTreasuryTitles(0) + .numberPositiveBalances(1) + .numberOwnedNfts(3) + .build(), + Account.newBuilder() + .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .numberTreasuryTitles(1) + .numberPositiveBalances(1) + .numberOwnedNfts(1) + .build()); + writableTokenStore = newWritableStoreWithTokens(newNftToken531(10)); + writableTokenRelStore = + newWritableStoreWithTokenRels(newAccount4680Token531Rel(3), newTreasuryToken531Rel(1)); + writableNftStore = newWritableStoreWithNfts( + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_531.tokenNum()) + .serialNumber(1) + .build()) + .ownerNumber(0) // treasury owns this NFT + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_531.tokenNum()) + .serialNumber(2) + .build()) + .ownerNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_531.tokenNum()) + .serialNumber(3) + .build()) + .ownerNumber(ACCOUNT_4680.accountNumOrThrow()) + .build(), + Nft.newBuilder() + .id(UniqueTokenId.newBuilder() + .tokenTypeNumber(TOKEN_531.tokenNum()) + .serialNumber(4) + .build()) + .ownerNumber(ACCOUNT_4680.accountNumOrThrow()) + .build()); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 0, 2L, 2L, 3L, 3L, 4L, 4L, 2L, 3L, 4L); + final var context = mockContext(txn); + + subject.handle(context); + + final var acct = writableAccountStore.get(ACCOUNT_4680); + Assertions.assertThat(acct.numberOwnedNfts()).isZero(); + Assertions.assertThat(acct.numberPositiveBalances()).isZero(); + final var treasuryAcct = writableAccountStore.get(TREASURY_ACCOUNT_9876); + // The treasury still owns its NFT, so its counts shouldn't change + Assertions.assertThat(treasuryAcct.numberOwnedNfts()).isEqualTo(1); + Assertions.assertThat(treasuryAcct.numberTreasuryTitles()).isEqualTo(1); + Assertions.assertThat(treasuryAcct.numberPositiveBalances()).isEqualTo(1); + final var token = writableTokenStore.get(TOKEN_531); + // Verify that 3 NFTs were removed from the total supply + Assertions.assertThat(token.totalSupply()).isEqualTo(7); + final var tokenRel = writableTokenRelStore.get(ACCOUNT_4680, TOKEN_531); + Assertions.assertThat(tokenRel.balance()).isZero(); + // Verify the treasury's NFT wasn't removed + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 1))) + .isNotNull(); + // Verify that the account's NFTs were removed + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 2))) + .isNull(); + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 3))) + .isNull(); + Assertions.assertThat(writableNftStore.get(new UniqueTokenId(TOKEN_531.tokenNum(), 4))) + .isNull(); + } + + private Token newFungibleToken531(final long totalSupply) { + return newToken531(TokenType.FUNGIBLE_COMMON, totalSupply); + } + + private Token newNftToken531(final long totalSupply) { + return newToken531(TokenType.NON_FUNGIBLE_UNIQUE, totalSupply); + } + + private Token newToken531(final TokenType type, final long totalSupply) { + return Token.newBuilder() + .tokenNumber(TOKEN_531.tokenNum()) + .tokenType(type) + .treasuryAccountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) + .wipeKey(TOKEN_WIPE_KT.asPbjKey()) + .totalSupply(totalSupply) + .build(); + } + + private TokenRelation newTreasuryToken531Rel(final long balance) { + return newToken531Rel(TREASURY_ACCOUNT_9876, balance); + } + + private TokenRelation newAccount4680Token531Rel(final long balance) { + return newToken531Rel(ACCOUNT_4680, balance); + } + + private TokenRelation newToken531Rel(final AccountID accountId, final long balance) { + final var builder = TokenRelation.newBuilder() + .accountNumber(accountId.accountNumOrThrow()) + .tokenNumber(TOKEN_531.tokenNum()); + if (balance > 0) builder.balance(balance); + return builder.build(); + } + + private void mockOkExpiryValidator() { + given(validator.expirationStatus(notNull(), anyBoolean(), anyLong())) + .willReturn(OK); + } + + private HandleContext mockContext(TransactionBody txn) { + final var context = mock(HandleContext.class); + + given(context.body()).willReturn(txn); + + given(context.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + given(context.writableStore(WritableTokenStore.class)).willReturn(writableTokenStore); + given(context.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); + given(context.writableStore(WritableNftStore.class)).willReturn(writableNftStore); + + given(context.expiryValidator()).willReturn(validator); + + return context; + } + + private void mockConfig() { + mockConfig(100, true); + } + + private void mockConfig(final int maxBatchSize, final boolean nftsEnabled) { + final var mockTokensConfig = mock(TokensConfig.class); + lenient().when(mockTokensConfig.nftsAreEnabled()).thenReturn(nftsEnabled); + lenient().when(mockTokensConfig.nftsMaxBatchSizeWipe()).thenReturn(maxBatchSize); + + final var mockConfig = mock(VersionedConfiguration.class); + lenient().when(mockConfig.getConfigData(TokensConfig.class)).thenReturn(mockTokensConfig); + + given(configProvider.getConfiguration()).willReturn(mockConfig); + } + } - assertEquals(context.payerKey(), DEFAULT_PAYER_KT.asPbjKey()); - assertEquals(0, context.requiredNonPayerKeys().size()); + private TransactionBody newWipeTxn(AccountID accountId, TokenID token, long fungibleAmount, Long... nftSerialNums) { + final TokenWipeAccountTransactionBody.Builder wipeTxnBodyBuilder = TokenWipeAccountTransactionBody.newBuilder(); + if (accountId != null) wipeTxnBodyBuilder.account(accountId); + if (token != null) wipeTxnBodyBuilder.token(token); + wipeTxnBodyBuilder.amount(fungibleAmount); + wipeTxnBodyBuilder.serialNumbers(nftSerialNums); + return TransactionBody.newBuilder() + .transactionID( + TransactionID.newBuilder().accountID(ACCOUNT_4680).build()) + .tokenWipe(wipeTxnBodyBuilder) + .build(); } } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java index 0fe7344b7b3b..64a795b387b9 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java @@ -31,7 +31,6 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_NOT_ASSOCIATED_TO_ACCOUNT; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_WAS_DELETED; import static com.hedera.hapi.node.base.ResponseCodeEnum.TREASURY_MUST_OWN_BURNED_NFT; -import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ACCOUNTS_KEY; import static com.hedera.node.app.spi.fixtures.Assertions.assertThrowsPreCheck; import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; import static com.hedera.test.factories.scenarios.TokenBurnScenarios.BURN_FOR_TOKEN_WITHOUT_SUPPLY; @@ -57,18 +56,18 @@ import com.hedera.hapi.node.state.token.Nft; import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.state.token.TokenRelation; +import com.hedera.hapi.node.token.TokenAssociateTransactionBody; import com.hedera.hapi.node.token.TokenBurnTransactionBody; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.service.token.ReadableTokenStore; -import com.hedera.node.app.service.token.impl.TokenServiceImpl; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.WritableNftStore; import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; import com.hedera.node.app.service.token.impl.WritableTokenStore; +import com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler; +import com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler; import com.hedera.node.app.service.token.impl.handlers.TokenBurnHandler; import com.hedera.node.app.service.token.impl.test.handlers.util.ParityTestBase; -import com.hedera.node.app.service.token.impl.util.IdConvenienceUtils; import com.hedera.node.app.service.token.impl.validators.TokenSupplyChangeOpsValidator; import com.hedera.node.app.spi.fixtures.state.MapWritableKVState; import com.hedera.node.app.spi.fixtures.state.MapWritableStates; @@ -79,7 +78,6 @@ import com.hedera.node.config.ConfigProvider; import com.hedera.node.config.VersionedConfiguration; import com.hedera.node.config.data.TokensConfig; -import java.util.HashMap; import java.util.Map; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Nested; @@ -89,15 +87,32 @@ @ExtendWith(MockitoExtension.class) class TokenBurnHandlerTest extends ParityTestBase { - private static final AccountID ACCOUNT_1339 = IdConvenienceUtils.fromAccountNum(1339); - private static final TokenID TOKEN_123 = IdConvenienceUtils.fromTokenNum(123); + private static final AccountID ACCOUNT_1339 = BaseCryptoHandler.asAccount(1339); + private static final TokenID TOKEN_123 = BaseTokenHandler.asToken(123); - private ConfigProvider configProvider = mock(ConfigProvider.class); + private final ConfigProvider configProvider = mock(ConfigProvider.class); private TokenSupplyChangeOpsValidator validator = new TokenSupplyChangeOpsValidator(configProvider); private final TokenBurnHandler subject = new TokenBurnHandler(validator); @Nested class PureChecks { + @SuppressWarnings("DataFlowIssue") + @Test + void nullArgsThrows() { + assertThatThrownBy(() -> subject.pureChecks(null)).isInstanceOf(NullPointerException.class); + } + + @Test + void noBurnTxnPresent() { + final var nonBurnTxnBody = TokenAssociateTransactionBody.newBuilder(); + final var txn = TransactionBody.newBuilder() + .transactionID( + TransactionID.newBuilder().accountID(ACCOUNT_1339).build()) + .tokenAssociate(nonBurnTxnBody) + .build(); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)).isInstanceOf(NullPointerException.class); + } + @Test void noTokenPresent() { final var txn = newBurnTxn(null, 1); @@ -122,6 +137,14 @@ void nonPositiveFungibleAmountGiven() { .has(responseCode(INVALID_TOKEN_BURN_AMOUNT)); } + @Test + void emptyNftSerialNumbers() { + final var txn = newBurnTxn(TOKEN_123, 0); + Assertions.assertThatThrownBy(() -> subject.pureChecks(txn)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(INVALID_TOKEN_BURN_AMOUNT)); + } + @Test void invalidNftSerialNumber() { final var txn = newBurnTxn(TOKEN_123, 0, 1L, 2L, 0L); @@ -201,7 +224,7 @@ void invalidFungibleAmount() { void tokenIdNotFound() { mockConfig(); writableTokenStore = newWritableStoreWithTokens(); - final var txn = newBurnTxn(IdConvenienceUtils.fromTokenNum(999), 1); + final var txn = newBurnTxn(BaseTokenHandler.asToken(999), 1); final var context = mockContext(txn); assertThatThrownBy(() -> subject.handle(context)) @@ -289,6 +312,19 @@ void tokenTreasuryRelDoesntExist() { .has(responseCode(TOKEN_NOT_ASSOCIATED_TO_ACCOUNT)); } + @Test + void fungibleAmountExceedsBatchSize() { + mockConfig(1, true); + validator = new TokenSupplyChangeOpsValidator(configProvider); + + final var txn = newBurnTxn(TOKEN_123, 2); + final var context = mockContext(txn); + + Assertions.assertThatThrownBy(() -> subject.handle(context)) + .isInstanceOf(HandleException.class) + .has(responseCode(BATCH_SIZE_LIMIT_EXCEEDED)); + } + @Test void fungibleTokenTreasuryAccountDoesntExist() { mockConfig(); @@ -440,7 +476,7 @@ void fungibleAmountBurnedWithZeroTreasuryBalance() { @Test void nftsGivenButNotEnabled() { - mockConfig(100, false, 100); + mockConfig(100, false); validator = new TokenSupplyChangeOpsValidator(configProvider); final var txn = newBurnTxn(TOKEN_123, 0, 1L); @@ -453,7 +489,7 @@ void nftsGivenButNotEnabled() { @Test void nftSerialCountExceedsBatchSize() { - mockConfig(1, true, 100); + mockConfig(1, true); validator = new TokenSupplyChangeOpsValidator(configProvider); final var txn = newBurnTxn(TOKEN_123, 0, 1L, 2L); @@ -838,40 +874,19 @@ private HandleContext mockContext(TransactionBody txn) { } private void mockConfig() { - mockConfig(100, true, 100); + mockConfig(100, true); } - private void mockConfig(final int maxBatchSize, final boolean nftsEnabled, final int maxMetadataBytes) { + private void mockConfig(final int maxBatchSize, final boolean nftsEnabled) { final var mockTokensConfig = mock(TokensConfig.class); - lenient().when(mockTokensConfig.nftsMaxBatchSizeBurn()).thenReturn(maxBatchSize); lenient().when(mockTokensConfig.nftsAreEnabled()).thenReturn(nftsEnabled); - lenient().when(mockTokensConfig.nftsMaxMetadataBytes()).thenReturn(maxMetadataBytes); + lenient().when(mockTokensConfig.nftsMaxBatchSizeBurn()).thenReturn(maxBatchSize); final var mockConfig = mock(VersionedConfiguration.class); lenient().when(mockConfig.getConfigData(TokensConfig.class)).thenReturn(mockTokensConfig); given(configProvider.getConfiguration()).willReturn(mockConfig); } - - protected WritableTokenRelationStore newWritableStoreWithTokenRels(final TokenRelation... tokenRels) { - final var backingMap = new HashMap(); - for (final TokenRelation tokenRel : tokenRels) { - backingMap.put(EntityNumPair.fromLongs(tokenRel.accountNumber(), tokenRel.tokenNumber()), tokenRel); - } - - final var wrappingState = new MapWritableKVState<>(ACCOUNTS_KEY, backingMap); - return new WritableTokenRelationStore( - new MapWritableStates(Map.of(TokenServiceImpl.TOKEN_RELS_KEY, wrappingState))); - } - - private WritableNftStore newWritableStoreWithNfts(Nft... nfts) { - final var nftStateBuilder = MapWritableKVState.builder(TokenServiceImpl.NFTS_KEY); - for (final Nft nft : nfts) { - nftStateBuilder.value(nft.id(), nft); - } - return new WritableNftStore( - new MapWritableStates(Map.of(TokenServiceImpl.NFTS_KEY, nftStateBuilder.build()))); - } } private TransactionBody newBurnTxn(TokenID token, long fungibleAmount, Long... nftSerialNums) { diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDeleteHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDeleteHandlerTest.java index b58509fb5593..da6689b7a112 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDeleteHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDeleteHandlerTest.java @@ -41,10 +41,11 @@ import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.WritableTokenStore; +import com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler; +import com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler; import com.hedera.node.app.service.token.impl.handlers.TokenDeleteHandler; import com.hedera.node.app.service.token.impl.test.handlers.util.ParityTestBase; import com.hedera.node.app.service.token.impl.test.util.SigReqAdapterUtils; -import com.hedera.node.app.service.token.impl.util.IdConvenienceUtils; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; @@ -54,8 +55,8 @@ import org.junit.jupiter.api.Test; class TokenDeleteHandlerTest extends ParityTestBase { - private static final AccountID ACCOUNT_1339 = IdConvenienceUtils.fromAccountNum(1339); - private static final TokenID TOKEN_987_ID = IdConvenienceUtils.fromTokenNum(987L); + private static final AccountID ACCOUNT_1339 = BaseCryptoHandler.asAccount(1339); + private static final TokenID TOKEN_987_ID = BaseTokenHandler.asToken(987L); private final TokenDeleteHandler subject = new TokenDeleteHandler(); @@ -183,7 +184,7 @@ void rejectsTokenWithoutAdminKey() { @Test void deletesValidToken() { // Verify that the treasury account's treasury titles count is correct before the test - final var treasuryAcctId = IdConvenienceUtils.fromAccountNum(3); + final var treasuryAcctId = BaseCryptoHandler.asAccount(3); final var treasuryAcct = writableAccountStore.get(treasuryAcctId); Assertions.assertThat(treasuryAcct.numberTreasuryTitles()).isEqualTo(2); @@ -192,7 +193,7 @@ void deletesValidToken() { // Create the context and transaction final var context = mockContext(); - final var token535Id = IdConvenienceUtils.fromTokenNum(535); + final var token535Id = BaseTokenHandler.asToken(535); final var txn = newDissociateTxn(token535Id); given(context.body()).willReturn(txn); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java index 9674be46da94..80d3d41820fa 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenDissociateFromAccountHandlerTest.java @@ -57,9 +57,10 @@ import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; +import com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler; +import com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler; import com.hedera.node.app.service.token.impl.handlers.TokenDissociateFromAccountHandler; import com.hedera.node.app.service.token.impl.test.handlers.util.ParityTestBase; -import com.hedera.node.app.service.token.impl.util.IdConvenienceUtils; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.validation.EntityType; import com.hedera.node.app.spi.validation.ExpiryValidator; @@ -86,7 +87,7 @@ class TokenDissociateFromAccountHandlerTest extends ParityTestBase { private static final AccountID ACCOUNT_1339 = AccountID.newBuilder().accountNum(MISC_ACCOUNT.getAccountNum()).build(); - private static final AccountID ACCOUNT_2020 = IdConvenienceUtils.fromAccountNum(2020); + private static final AccountID ACCOUNT_2020 = BaseCryptoHandler.asAccount(2020); private static final TokenID TOKEN_555_ID = TokenID.newBuilder().tokenNum(555).build(); private static final TokenID TOKEN_666_ID = @@ -510,7 +511,7 @@ void tokenRelAndTreasuryTokenRelAreUpdatedForFungible() { @Test void multipleTokenRelsAreRemoved() { // Represents a token that won't be found - final var token444Id = IdConvenienceUtils.fromTokenNum(444); + final var token444Id = BaseTokenHandler.asToken(444); // Represents a token that is deleted final var token555 = Token.newBuilder() .tokenNumber(TOKEN_555_ID.tokenNum()) diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/ParityTestBase.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/ParityTestBase.java index 9e460d223304..21ffd77fd6e5 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/ParityTestBase.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/ParityTestBase.java @@ -21,24 +21,31 @@ import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ACCOUNTS_KEY; import static com.hedera.node.app.service.token.impl.TokenServiceImpl.ALIASES_KEY; import static com.hedera.node.app.service.token.impl.TokenServiceImpl.TOKENS_KEY; -import static com.hedera.node.app.service.token.impl.test.handlers.util.AdapterUtils.mockStates; -import static com.hedera.node.app.service.token.impl.test.handlers.util.AdapterUtils.mockWritableStates; import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.common.UniqueTokenId; import com.hedera.hapi.node.state.token.Account; +import com.hedera.hapi.node.state.token.Nft; import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.state.token.TokenRelation; import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.service.mono.utils.EntityNum; +import com.hedera.node.app.service.mono.utils.EntityNumPair; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.ReadableTokenStoreImpl; +import com.hedera.node.app.service.token.impl.TokenServiceImpl; import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.WritableNftStore; import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; import com.hedera.node.app.service.token.impl.WritableTokenStore; +import com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler; +import com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler; import com.hedera.node.app.service.token.impl.test.util.SigReqAdapterUtils; -import com.hedera.node.app.service.token.impl.util.IdConvenienceUtils; +import com.hedera.node.app.spi.fixtures.state.MapReadableStates; import com.hedera.node.app.spi.fixtures.state.MapWritableKVState; +import com.hedera.node.app.spi.fixtures.state.MapWritableStates; import com.hedera.test.factories.scenarios.TxnHandlingScenario; import java.util.HashMap; import java.util.Map; @@ -70,7 +77,7 @@ protected TransactionBody txnFrom(final TxnHandlingScenario scenario) { private MapWritableKVState newTokenStateFromTokens(Token... tokens) { final var backingMap = new HashMap(); for (final Token token : tokens) { - backingMap.put(EntityNum.fromTokenId(fromPbj(IdConvenienceUtils.fromTokenNum(token.tokenNumber()))), token); + backingMap.put(EntityNum.fromTokenId(fromPbj(BaseTokenHandler.asToken(token.tokenNumber()))), token); } return new MapWritableKVState<>(TOKENS_KEY, backingMap); @@ -78,22 +85,41 @@ private MapWritableKVState newTokenStateFromTokens(Token... to protected ReadableTokenStore newReadableStoreWithTokens(Token... tokens) { final var wrappedState = newTokenStateFromTokens(tokens); - return new ReadableTokenStoreImpl(mockStates(Map.of(TOKENS_KEY, wrappedState))); + return new ReadableTokenStoreImpl(new MapReadableStates(Map.of(TOKENS_KEY, wrappedState))); } protected WritableTokenStore newWritableStoreWithTokens(Token... tokens) { final var wrappedState = newTokenStateFromTokens(tokens); - return new WritableTokenStore(mockWritableStates(Map.of(TOKENS_KEY, wrappedState))); + return new WritableTokenStore(new MapWritableStates(Map.of(TOKENS_KEY, wrappedState))); } protected WritableAccountStore newWritableStoreWithAccounts(Account... accounts) { final var backingMap = new HashMap(); for (final Account account : accounts) { - backingMap.put(IdConvenienceUtils.fromAccountNum(account.accountNumber()), account); + backingMap.put(BaseCryptoHandler.asAccount(account.accountNumber()), account); } final var wrappingState = new MapWritableKVState<>(ACCOUNTS_KEY, backingMap); - return new WritableAccountStore(mockWritableStates(Map.of( + return new WritableAccountStore(new MapWritableStates(Map.of( ACCOUNTS_KEY, wrappingState, ALIASES_KEY, new MapWritableKVState<>(ALIASES_KEY, new HashMap<>())))); } + + protected WritableTokenRelationStore newWritableStoreWithTokenRels(final TokenRelation... tokenRels) { + final var backingMap = new HashMap(); + for (final TokenRelation tokenRel : tokenRels) { + backingMap.put(EntityNumPair.fromLongs(tokenRel.accountNumber(), tokenRel.tokenNumber()), tokenRel); + } + + final var wrappingState = new MapWritableKVState<>(ACCOUNTS_KEY, backingMap); + return new WritableTokenRelationStore( + new MapWritableStates(Map.of(TokenServiceImpl.TOKEN_RELS_KEY, wrappingState))); + } + + protected WritableNftStore newWritableStoreWithNfts(Nft... nfts) { + final var nftStateBuilder = MapWritableKVState.builder(TokenServiceImpl.NFTS_KEY); + for (final Nft nft : nfts) { + nftStateBuilder.value(nft.id(), nft); + } + return new WritableNftStore(new MapWritableStates(Map.of(TokenServiceImpl.NFTS_KEY, nftStateBuilder.build()))); + } } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/IdConvenienceUtilsTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/IdConvenienceUtilsTest.java deleted file mode 100644 index 710370a65683..000000000000 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/util/IdConvenienceUtilsTest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (C) 2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.service.token.impl.test.util; - -import static com.hedera.node.app.service.token.impl.util.IdConvenienceUtils.fromAccountNum; -import static com.hedera.node.app.service.token.impl.util.IdConvenienceUtils.fromTokenNum; -import static com.hedera.node.app.service.token.impl.util.IdConvenienceUtils.isValidAccountNum; -import static com.hedera.node.app.service.token.impl.util.IdConvenienceUtils.isValidTokenNum; - -import com.hedera.hapi.node.base.AccountID; -import com.hedera.hapi.node.base.TokenID; -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.Test; - -class IdConvenienceUtilsTest { - - @Test - void fromAccountNum_invalidAccountNumThrowsException() { - Assertions.assertThatThrownBy(() -> fromAccountNum(-1L)).isInstanceOf(IllegalArgumentException.class); - } - - @Test - void fromAccountNum_validAccountNumReturnsAccountId() { - final var result = fromAccountNum(1L); - Assertions.assertThat(fromAccountNum(1L)) - .isEqualTo(AccountID.newBuilder().accountNum(1L).build()); - } - - @Test - void fromTokenNum_invalidTokenNumThrowsException() { - Assertions.assertThatThrownBy(() -> fromTokenNum(-1L)).isInstanceOf(IllegalArgumentException.class); - } - - @Test - void fromTokenNum_validTokenNumReturnsTokenId() { - Assertions.assertThat(fromTokenNum(1L)) - .isEqualTo(TokenID.newBuilder().tokenNum(1L).build()); - } - - @Test - void isValidTokenNum_invalidTokenNum() { - Assertions.assertThat(isValidTokenNum(-1L)).isFalse(); - Assertions.assertThat(isValidTokenNum(0L)).isFalse(); - } - - @Test - void isValidTokenNum_validTokenNum() { - Assertions.assertThat(isValidTokenNum(1L)).isTrue(); - } - - @Test - void isValidAccountNum_invalidAccountNum() { - Assertions.assertThat(isValidAccountNum(-1L)).isFalse(); - Assertions.assertThat(isValidAccountNum(0L)).isFalse(); - } - - @Test - void isValidAccountNum_validAccountNum() { - Assertions.assertThat(isValidAccountNum(1L)).isTrue(); - } -} From 06ecbf76681a2e11a72c44f89b026de90ace7c16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20Brandst=C3=A4tter?= Date: Thu, 15 Jun 2023 18:15:09 +0200 Subject: [PATCH 39/70] #6451 Introduce MetricsConfig (#7110) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Timo Brandstätter --- .../swirlds/common/config/BasicConfig.java | 3 - .../common/internal/SettingsCommon.java | 1 - .../common/metrics/RunningAverageMetric.java | 6 +- .../common/metrics/SpeedometerMetric.java | 6 +- .../common/metrics/config/MetricsConfig.java | 7 +- .../platform/DefaultMetricsProvider.java | 7 +- .../metrics/platform/DefaultStatEntry.java | 11 +- .../platform/prometheus/PrometheusConfig.java | 16 +- .../config/export/ConfigExportTest.java | 1 - .../RunningAverageMetricConfigTest.java | 11 +- .../metrics/SpeedometerMetricConfigTest.java | 11 +- .../metrics/platform/DefaultMetricsTest.java | 4 +- .../DefaultRunningAverageMetricTest.java | 30 +- .../DefaultSpeedometerMetricTest.java | 28 +- .../platform/DefaultStatEntryTest.java | 9 +- .../metrics/platform/LegacyCsvWriterTest.java | 298 +++++++++--------- .../prometheus/PrometheusConfigTest.java | 13 +- .../test/resources/metrics-test.properties | 22 +- .../swirlds/platform/SettingConstants.java | 38 +-- .../java/com/swirlds/platform/Settings.java | 76 ----- .../platform/config/ConfigMappings.java | 11 +- .../com/swirlds/platform/SettingsTest.java | 15 - .../platform/util/MetricsDocUtilsTest.java | 2 +- 23 files changed, 282 insertions(+), 344 deletions(-) diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java index 70688860d488..890a41e3e9b2 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java @@ -52,8 +52,6 @@ * number of connections maintained by each member (syncs happen on random connections from that set * @param bufferSize * for BufferedInputStream and BufferedOutputStream for syncing - * @param halfLife - * half life of some of the various statistics (give half the weight to the last halfLife seconds) * @param logStack * when converting an exception to a string for logging, should it include the stack trace? * @param doUpnp @@ -195,7 +193,6 @@ public record BasicConfig( @ConfigProperty(defaultValue = "104857600") int throttle7maxBytes, @ConfigProperty(defaultValue = "40") int numConnections, @ConfigProperty(defaultValue = "8192") int bufferSize, - @ConfigProperty(defaultValue = "10") double halfLife, @ConfigProperty(defaultValue = "true") boolean logStack, @ConfigProperty(defaultValue = "true") boolean doUpnp, @ConfigProperty(defaultValue = "true") boolean useLoopbackIp, diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/internal/SettingsCommon.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/internal/SettingsCommon.java index a9652cbdc740..31a1db47a0ee 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/internal/SettingsCommon.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/internal/SettingsCommon.java @@ -42,7 +42,6 @@ public class SettingsCommon { public static boolean logStack; // used by AbstractStatistics - public static double halfLife = 10; public static boolean showInternalStats; public static boolean verboseStatistics; } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/RunningAverageMetric.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/RunningAverageMetric.java index c998e868aab0..d8eb1cd56799 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/RunningAverageMetric.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/RunningAverageMetric.java @@ -22,7 +22,8 @@ import static com.swirlds.common.metrics.Metric.ValueType.VALUE; import static org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE; -import com.swirlds.common.internal.SettingsCommon; +import com.swirlds.common.config.singleton.ConfigurationHolder; +import com.swirlds.common.metrics.config.MetricsConfig; import java.util.EnumSet; import org.apache.commons.lang3.builder.ToStringBuilder; @@ -114,7 +115,8 @@ final class Config extends MetricConfig config) { this.statsStringSupplier = (Supplier) config.getStatsStringSupplier(); this.resetStatsStringSupplier = (Supplier) config.getResetStatsStringSupplier(); if (config.getInit() != null) { - config.getInit().apply(SettingsCommon.halfLife); + final MetricsConfig metricsConfig = ConfigurationHolder.getConfigData(MetricsConfig.class); + config.getInit().apply(metricsConfig.halfLife()); } } @@ -134,10 +136,11 @@ public List takeSnapshot() { */ @Override public void reset() { + final MetricsConfig metricsConfig = ConfigurationHolder.getConfigData(MetricsConfig.class); if (reset != null) { - reset.accept(SettingsCommon.halfLife); + reset.accept(metricsConfig.halfLife()); } else if (buffered != null) { - buffered.reset(SettingsCommon.halfLife); + buffered.reset(metricsConfig.halfLife()); } } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/prometheus/PrometheusConfig.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/prometheus/PrometheusConfig.java index 2c17b8aba055..23280ca2dc68 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/prometheus/PrometheusConfig.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/prometheus/PrometheusConfig.java @@ -24,16 +24,16 @@ /** * Configuration concerning the Prometheus endpoint. * - * @param prometheusEndpointEnabled - * Flag that is {@code true}, if the endpoint should be prometheusEndpointEnabled, {@code false otherwise}. - * @param prometheusEndpointPortNumber + * @param endpointEnabled + * Flag that is {@code true}, if the endpoint should be endpointEnabled, {@code false otherwise}. + * @param endpointPortNumber * Port of the Prometheus endpoint. - * @param prometheusEndpointMaxBacklogAllowed + * @param endpointMaxBacklogAllowed * The maximum number of incoming TCP connections which the system will queue internally. * May be {@code 1}, in which case a system default value is used. */ -@ConfigData +@ConfigData("prometheus") public record PrometheusConfig( - @ConfigProperty(defaultValue = "false") boolean prometheusEndpointEnabled, - @Min(0) @Max(65535) @ConfigProperty(defaultValue = "9999") int prometheusEndpointPortNumber, - @Min(0) @ConfigProperty(defaultValue = "1") int prometheusEndpointMaxBacklogAllowed) {} + @ConfigProperty(defaultValue = "false") boolean endpointEnabled, + @Min(0) @Max(65535) @ConfigProperty(defaultValue = "9999") int endpointPortNumber, + @Min(0) @ConfigProperty(defaultValue = "1") int endpointMaxBacklogAllowed) {} diff --git a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/config/export/ConfigExportTest.java b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/config/export/ConfigExportTest.java index c6f5cdbb7dba..361eb1036bca 100644 --- a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/config/export/ConfigExportTest.java +++ b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/config/export/ConfigExportTest.java @@ -63,7 +63,6 @@ void testPrint() throws IOException { assertContains(regexForLine("verifyEventSigs", "false", true), lines); assertContains(regexForLine("doUpnp", "false", true), lines); assertContains(regexForLine("showInternalStats", "true", true), lines); - assertContains(regexForLine("csvFileName", "PlatformTesting", true), lines); assertContains(regexForLine("useLoopbackIp", "false", true), lines); assertContains(regexForLine("maxOutgoingSyncs", "1", true), lines); assertContains(regexForLine("state.saveStatePeriod", "0", true), lines); diff --git a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/RunningAverageMetricConfigTest.java b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/RunningAverageMetricConfigTest.java index 09353131f5f8..95237a0ee98b 100644 --- a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/RunningAverageMetricConfigTest.java +++ b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/RunningAverageMetricConfigTest.java @@ -20,14 +20,14 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.within; -import com.swirlds.common.internal.SettingsCommon; +import com.swirlds.common.metrics.config.MetricsConfig; +import com.swirlds.test.framework.config.TestConfigBuilder; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; class RunningAverageMetricConfigTest { private static final String DEFAULT_FORMAT = FloatFormats.FORMAT_11_3; - private static final double DEFAULT_HALFLIFE = SettingsCommon.halfLife; private static final String CATEGORY = "CaTeGoRy"; private static final String NAME = "NaMe"; @@ -37,6 +37,9 @@ class RunningAverageMetricConfigTest { private static final double EPSILON = 1e-6; + private static final MetricsConfig metricsConfig = + new TestConfigBuilder().getOrCreateConfig().getConfigData(MetricsConfig.class); + @Test void testConstructor() { // when @@ -48,7 +51,7 @@ void testConstructor() { assertThat(config.getDescription()).isEqualTo(NAME); assertThat(config.getUnit()).isEmpty(); assertThat(config.getFormat()).isEqualTo(DEFAULT_FORMAT); - assertThat(config.getHalfLife()).isEqualTo(DEFAULT_HALFLIFE, within(EPSILON)); + assertThat(config.getHalfLife()).isEqualTo(metricsConfig.halfLife(), within(EPSILON)); } @Test @@ -86,7 +89,7 @@ void testSetters() { assertThat(config.getDescription()).isEqualTo(NAME); assertThat(config.getUnit()).isEmpty(); assertThat(config.getFormat()).isEqualTo(DEFAULT_FORMAT); - assertThat(config.getHalfLife()).isEqualTo(DEFAULT_HALFLIFE, within(EPSILON)); + assertThat(config.getHalfLife()).isEqualTo(metricsConfig.halfLife(), within(EPSILON)); assertThat(result.getCategory()).isEqualTo(CATEGORY); assertThat(result.getName()).isEqualTo(NAME); diff --git a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/SpeedometerMetricConfigTest.java b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/SpeedometerMetricConfigTest.java index 41f1357f2914..dfd55dd02957 100644 --- a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/SpeedometerMetricConfigTest.java +++ b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/SpeedometerMetricConfigTest.java @@ -20,14 +20,14 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.within; -import com.swirlds.common.internal.SettingsCommon; +import com.swirlds.common.metrics.config.MetricsConfig; +import com.swirlds.test.framework.config.TestConfigBuilder; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; class SpeedometerMetricConfigTest { private static final String DEFAULT_FORMAT = FloatFormats.FORMAT_11_3; - private static final double DEFAULT_HALFLIFE = SettingsCommon.halfLife; private static final String CATEGORY = "CaTeGoRy"; private static final String NAME = "NaMe"; @@ -37,6 +37,9 @@ class SpeedometerMetricConfigTest { private static final double EPSILON = 1e-6; + private static final MetricsConfig metricsConfig = + new TestConfigBuilder().getOrCreateConfig().getConfigData(MetricsConfig.class); + @Test void testConstructor() { // when @@ -48,7 +51,7 @@ void testConstructor() { assertThat(config.getDescription()).isEqualTo(NAME); assertThat(config.getUnit()).isEmpty(); assertThat(config.getFormat()).isEqualTo(DEFAULT_FORMAT); - assertThat(config.getHalfLife()).isEqualTo(DEFAULT_HALFLIFE, within(EPSILON)); + assertThat(config.getHalfLife()).isEqualTo(metricsConfig.halfLife(), within(EPSILON)); } @Test @@ -84,7 +87,7 @@ void testSetters() { assertThat(config.getDescription()).isEqualTo(NAME); assertThat(config.getUnit()).isEmpty(); assertThat(config.getFormat()).isEqualTo(DEFAULT_FORMAT); - assertThat(config.getHalfLife()).isEqualTo(DEFAULT_HALFLIFE, within(EPSILON)); + assertThat(config.getHalfLife()).isEqualTo(metricsConfig.halfLife(), within(EPSILON)); assertThat(result.getCategory()).isEqualTo(CATEGORY); assertThat(result.getName()).isEqualTo(NAME); diff --git a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultMetricsTest.java b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultMetricsTest.java index 635d5096f7c8..54bc7c792253 100644 --- a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultMetricsTest.java +++ b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultMetricsTest.java @@ -100,7 +100,7 @@ class DefaultMetricsTest { @BeforeEach void setupService() { final Configuration configuration = new TestConfigBuilder() - .withValue("metricsUpdatePeriodMillis", 10L) + .withValue("metrics.metricsUpdatePeriodMillis", 10L) .getOrCreateConfig(); metricsConfig = configuration.getConfigData(MetricsConfig.class); @@ -577,7 +577,7 @@ void testUpdater(@Mock final Runnable updater) { void testDisabledUpdater(@Mock final Runnable updater) { // given final Configuration configuration = new TestConfigBuilder() - .withValue("metricsUpdatePeriodMillis", 0L) + .withValue("metrics.metricsUpdatePeriodMillis", 0L) .getOrCreateConfig(); metricsConfig = configuration.getConfigData(MetricsConfig.class); final DefaultMetrics metrics = new DefaultMetrics(NODE_ID, registry, executor, factory, metricsConfig); diff --git a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultRunningAverageMetricTest.java b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultRunningAverageMetricTest.java index 754808fed065..1d15e2603554 100644 --- a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultRunningAverageMetricTest.java +++ b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultRunningAverageMetricTest.java @@ -26,11 +26,12 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; -import com.swirlds.common.internal.SettingsCommon; import com.swirlds.common.metrics.IntegerGauge; import com.swirlds.common.metrics.RunningAverageMetric; +import com.swirlds.common.metrics.config.MetricsConfig; import com.swirlds.common.metrics.platform.Snapshot.SnapshotEntry; import com.swirlds.common.test.fixtures.FakeTime; +import com.swirlds.test.framework.config.TestConfigBuilder; import java.time.Duration; import java.util.List; import org.junit.jupiter.api.DisplayName; @@ -46,6 +47,9 @@ class DefaultRunningAverageMetricTest { private static final double EPSILON = 1e-6; + private static final MetricsConfig metricsConfig = + new TestConfigBuilder().getOrCreateConfig().getConfigData(MetricsConfig.class); + @SuppressWarnings("removal") @Test @DisplayName("Constructor should store values") @@ -154,8 +158,8 @@ void testDistributionForIncreasedValue() { // when recordValues(metric, time, 0, 1000, Math.E); - recordValues(metric, time, 1000, 1000 + (int) SettingsCommon.halfLife, Math.PI); - time.set(Duration.ofSeconds(1000 + (int) SettingsCommon.halfLife)); + recordValues(metric, time, 1000, 1000 + (int) metricsConfig.halfLife(), Math.PI); + time.set(Duration.ofSeconds(1000 + (int) metricsConfig.halfLife())); double avg = metric.get(); // then @@ -174,14 +178,14 @@ void testDistributionForTwiceIncreasedValue() { // when recordValues(metric, time, 0, 1000, Math.E); - recordValues(metric, time, 1000, 1000 + (int) SettingsCommon.halfLife, Math.PI); + recordValues(metric, time, 1000, 1000 + (int) metricsConfig.halfLife(), Math.PI); recordValues( metric, time, - 1000 + (int) SettingsCommon.halfLife, - 1000 + 2 * (int) SettingsCommon.halfLife, + 1000 + (int) metricsConfig.halfLife(), + 1000 + 2 * (int) metricsConfig.halfLife(), Math.PI + 0.5 * (Math.PI - Math.E)); - time.set(Duration.ofSeconds(1000 + 2 * (int) SettingsCommon.halfLife)); + time.set(Duration.ofSeconds(1000 + 2 * (int) metricsConfig.halfLife())); double avg = metric.get(); // then @@ -199,8 +203,8 @@ void testDistributionForDecreasedValue() { // when recordValues(metric, time, 0, 1000, Math.PI); - recordValues(metric, time, 1000, 1000 + (int) SettingsCommon.halfLife, Math.E); - time.set(Duration.ofSeconds(1000 + (int) SettingsCommon.halfLife)); + recordValues(metric, time, 1000, 1000 + (int) metricsConfig.halfLife(), Math.E); + time.set(Duration.ofSeconds(1000 + (int) metricsConfig.halfLife())); double avg = metric.get(); // then @@ -219,14 +223,14 @@ void testDistributionForTwiceDecreasedValue() { // when recordValues(metric, time, 0, 1000, Math.PI); - recordValues(metric, time, 1000, 1000 + (int) SettingsCommon.halfLife, Math.E); + recordValues(metric, time, 1000, 1000 + (int) metricsConfig.halfLife(), Math.E); recordValues( metric, time, - 1000 + (int) SettingsCommon.halfLife, - 1000 + 2 * (int) SettingsCommon.halfLife, + 1000 + (int) metricsConfig.halfLife(), + 1000 + 2 * (int) metricsConfig.halfLife(), Math.E - 0.5 * (Math.PI - Math.E)); - time.set(Duration.ofSeconds(1000 + 2 * (int) SettingsCommon.halfLife)); + time.set(Duration.ofSeconds(1000 + 2 * (int) metricsConfig.halfLife())); double avg = metric.get(); // then diff --git a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultSpeedometerMetricTest.java b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultSpeedometerMetricTest.java index 31df3dd39a50..8689e1d7e500 100644 --- a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultSpeedometerMetricTest.java +++ b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultSpeedometerMetricTest.java @@ -26,13 +26,14 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; -import com.swirlds.common.internal.SettingsCommon; import com.swirlds.common.metrics.IntegerGauge; import com.swirlds.common.metrics.SpeedometerMetric; +import com.swirlds.common.metrics.config.MetricsConfig; import com.swirlds.common.metrics.platform.Snapshot.SnapshotEntry; import com.swirlds.common.statistics.StatsBuffered; import com.swirlds.common.test.fixtures.FakeTime; import com.swirlds.test.framework.TestQualifierTags; +import com.swirlds.test.framework.config.TestConfigBuilder; import java.time.Duration; import java.util.List; import org.junit.jupiter.api.Disabled; @@ -47,8 +48,9 @@ class DefaultSpeedometerMetricTest { private static final String DESCRIPTION = "DeScRiPtIoN"; private static final String UNIT = "UnIt"; private static final String FORMAT = "FoRmAt"; - private static final double EPSILON = 1e-6; + private static final MetricsConfig metricsConfig = + new TestConfigBuilder().getOrCreateConfig().getConfigData(MetricsConfig.class); @SuppressWarnings("removal") @Test @@ -218,8 +220,8 @@ void testDistributionForIncreasedRate() { // when sendCycles(metric, time, 0, 1000, 1000); - sendCycles(metric, time, 1000, 1000 + (int) SettingsCommon.halfLife, 2000); - time.set(Duration.ofSeconds(1000 + (int) SettingsCommon.halfLife)); + sendCycles(metric, time, 1000, 1000 + (int) metricsConfig.halfLife(), 2000); + time.set(Duration.ofSeconds(1000 + (int) metricsConfig.halfLife())); double rate = metric.get(); // then @@ -238,9 +240,10 @@ void testDistributionForTwiceIncreasedRate() { // when sendCycles(metric, time, 0, 1000, 1000); - sendCycles(metric, time, 1000, 1000 + (int) SettingsCommon.halfLife, 5000); - sendCycles(metric, time, 1000 + (int) SettingsCommon.halfLife, 1000 + 2 * (int) SettingsCommon.halfLife, 7000); - time.set(Duration.ofSeconds(1000 + 2 * (int) SettingsCommon.halfLife)); + sendCycles(metric, time, 1000, 1000 + (int) metricsConfig.halfLife(), 5000); + sendCycles( + metric, time, 1000 + (int) metricsConfig.halfLife(), 1000 + 2 * (int) metricsConfig.halfLife(), 7000); + time.set(Duration.ofSeconds(1000 + 2 * (int) metricsConfig.halfLife())); double rate = metric.get(); // then @@ -260,8 +263,8 @@ void testDistributionForDecreasedRate() { // when sendCycles(metric, time, 0, 1000, 1000); - sendCycles(metric, time, 1000, 1000 + (int) SettingsCommon.halfLife, 500); - time.set(Duration.ofSeconds(1000 + (int) SettingsCommon.halfLife)); + sendCycles(metric, time, 1000, 1000 + (int) metricsConfig.halfLife(), 500); + time.set(Duration.ofSeconds(1000 + (int) metricsConfig.halfLife())); double rate = metric.get(); // then @@ -280,9 +283,10 @@ void testDistributionForTwiceDecreasedRate() { // when sendCycles(metric, time, 0, 1000, 7000); - sendCycles(metric, time, 1000, 1000 + (int) SettingsCommon.halfLife, 5000); - sendCycles(metric, time, 1000 + (int) SettingsCommon.halfLife, 1000 + 2 * (int) SettingsCommon.halfLife, 2000); - time.set(Duration.ofSeconds(1000 + 2 * (int) SettingsCommon.halfLife)); + sendCycles(metric, time, 1000, 1000 + (int) metricsConfig.halfLife(), 5000); + sendCycles( + metric, time, 1000 + (int) metricsConfig.halfLife(), 1000 + 2 * (int) metricsConfig.halfLife(), 2000); + time.set(Duration.ofSeconds(1000 + 2 * (int) metricsConfig.halfLife())); double rate = metric.get(); // then diff --git a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultStatEntryTest.java b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultStatEntryTest.java index 137e2f558055..6f7ef14c7a20 100644 --- a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultStatEntryTest.java +++ b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/DefaultStatEntryTest.java @@ -31,12 +31,13 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import com.swirlds.common.internal.SettingsCommon; import com.swirlds.common.metrics.IntegerGauge; import com.swirlds.common.metrics.Metric; import com.swirlds.common.metrics.StatEntry; +import com.swirlds.common.metrics.config.MetricsConfig; import com.swirlds.common.metrics.platform.Snapshot.SnapshotEntry; import com.swirlds.common.statistics.StatsBuffered; +import com.swirlds.test.framework.config.TestConfigBuilder; import java.util.List; import java.util.function.Consumer; import java.util.function.Function; @@ -52,6 +53,8 @@ class DefaultStatEntryTest { private static final String UNIT = "UnIt"; private static final String FORMAT = "FoRmAt"; private static final double EPSILON = 1e-6; + private static final MetricsConfig metricsConfig = + new TestConfigBuilder().getOrCreateConfig().getConfigData(MetricsConfig.class); @SuppressWarnings({"unchecked", "removal"}) @Test @@ -111,7 +114,7 @@ void testReset() { statEntry.reset(); // then - verify(reset).accept(SettingsCommon.halfLife); + verify(reset).accept(metricsConfig.halfLife()); verify(buffered, never()).reset(anyDouble()); } @@ -129,7 +132,7 @@ void testResetWithoutResetLambda() { statEntry.reset(); // then - verify(buffered).reset(SettingsCommon.halfLife); + verify(buffered).reset(metricsConfig.halfLife()); } @SuppressWarnings({"unchecked", "removal"}) diff --git a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/LegacyCsvWriterTest.java b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/LegacyCsvWriterTest.java index fd082ca0307a..dc8839570b78 100644 --- a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/LegacyCsvWriterTest.java +++ b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/LegacyCsvWriterTest.java @@ -61,8 +61,8 @@ class LegacyCsvWriterTest { @BeforeEach void setStandardSettings() { final Configuration configuration = new TestConfigBuilder() - .withValue("csvOutputFolder", tempDir.toString()) - .withValue("csvAppend", "false") + .withValue("metrics.csvOutputFolder", tempDir.toString()) + .withValue("metrics.csvAppend", "false") .getOrCreateConfig(); metricsConfig = configuration.getConfigData(MetricsConfig.class); @@ -149,24 +149,24 @@ void testWriteDefault() throws IOException { assertThat(content) .matches( """ - filename:,.*, - Counter:,Counter, - DoubleGauge:,DoubleGauge, - FunctionGauge:,FunctionGauge, - IntegerAccumulator:,IntegerAccumulator, - IntegerGauge:,IntegerGauge, - IntegerPairAccumulator:,IntegerPairAccumulator, - LongAccumulator:,LongAccumulator, - LongGauge:,LongGauge, - RunningAverageMetric:,RunningAverageMetric, - SpeedometerMetric:,SpeedometerMetric, - StatEntry:,StatEntry, - - ,,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform, - ,,Counter,DoubleGauge,FunctionGauge,IntegerAccumulator,IntegerGauge,IntegerPairAccumulator,LongAccumulator,LongGauge,RunningAverageMetric,SpeedometerMetric,StatEntry, - ,,0,0\\.0,Hello FunctionGauge,0,0,0.0,0,0,0\\.0,0\\.0,Hello StatEntry, - ,,1,3\\.1,Hello FunctionGauge,42,42,112\\.2,42,4711,1000\\.0,\\d*\\.\\d,Hello StatEntry, - """); + filename:,.*, + Counter:,Counter, + DoubleGauge:,DoubleGauge, + FunctionGauge:,FunctionGauge, + IntegerAccumulator:,IntegerAccumulator, + IntegerGauge:,IntegerGauge, + IntegerPairAccumulator:,IntegerPairAccumulator, + LongAccumulator:,LongAccumulator, + LongGauge:,LongGauge, + RunningAverageMetric:,RunningAverageMetric, + SpeedometerMetric:,SpeedometerMetric, + StatEntry:,StatEntry, + + ,,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform, + ,,Counter,DoubleGauge,FunctionGauge,IntegerAccumulator,IntegerGauge,IntegerPairAccumulator,LongAccumulator,LongGauge,RunningAverageMetric,SpeedometerMetric,StatEntry, + ,,0,0\\.0,Hello FunctionGauge,0,0,0.0,0,0,0\\.0,0\\.0,Hello StatEntry, + ,,1,3\\.1,Hello FunctionGauge,42,42,112\\.2,42,4711,1000\\.0,\\d*\\.\\d,Hello StatEntry, + """); } @Test @@ -202,12 +202,14 @@ void testWritingOfSpecialValues() throws IOException { // then final String content = Files.readString(csvFilePath); - assertThat(content).matches(""" - (.*\\n){5}.* - ,,0,0.0, - ,,0,0.0, - ,,0,0.0, - """); + assertThat(content) + .matches( + """ + (.*\\n){5}.* + ,,0,0.0, + ,,0,0.0, + ,,0,0.0, + """); } @Test @@ -231,22 +233,22 @@ void testWriteWithExistingFile() throws IOException { assertThat(content) .matches( """ - filename:,.*, - Counter:,Counter, - DoubleGauge:,DoubleGauge, - - ,,platform,platform, - ,,Counter,DoubleGauge, - ,,0,0.0, - """); + filename:,.*, + Counter:,Counter, + DoubleGauge:,DoubleGauge, + + ,,platform,platform, + ,,Counter,DoubleGauge, + ,,0,0.0, + """); } @Test void testWriteWithAppendedModeAndExistingFile() throws IOException { // given final Configuration configuration = new TestConfigBuilder() - .withValue("csvOutputFolder", tempDir.toString()) - .withValue("csvAppend", "true") + .withValue("metrics.csvOutputFolder", tempDir.toString()) + .withValue("metrics.csvAppend", "true") .getOrCreateConfig(); final MetricsConfig metricsConfig = configuration.getConfigData(MetricsConfig.class); final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); @@ -254,15 +256,15 @@ void testWriteWithAppendedModeAndExistingFile() throws IOException { Files.writeString( csvFilePath, """ - filename:,/tmp/tempfile.tmp, - Counter:,Counter, - DoubleGauge:,DoubleGauge, - - ,,platform,platform, - ,,Counter,DoubleGauge, - ,,1,2.0, - ,,11,12.0, - """); + filename:,/tmp/tempfile.tmp, + Counter:,Counter, + DoubleGauge:,DoubleGauge, + + ,,platform,platform, + ,,Counter,DoubleGauge, + ,,1,2.0, + ,,11,12.0, + """); final List metrics = createShortList(); final List snapshots = metrics.stream() .map(DefaultMetric.class::cast) @@ -278,26 +280,26 @@ void testWriteWithAppendedModeAndExistingFile() throws IOException { assertThat(content) .matches( """ - filename:,.*, - Counter:,Counter, - DoubleGauge:,DoubleGauge, - - ,,platform,platform, - ,,Counter,DoubleGauge, - ,,1,2.0, - ,,11,12.0, - - - ,,0,0.0, - """); + filename:,.*, + Counter:,Counter, + DoubleGauge:,DoubleGauge, + + ,,platform,platform, + ,,Counter,DoubleGauge, + ,,1,2.0, + ,,11,12.0, + + + ,,0,0.0, + """); } @Test void testWriteWithAppendedModeAndNonExistingFile() throws IOException { // given final Configuration configuration = new TestConfigBuilder() - .withValue("csvOutputFolder", tempDir.toString()) - .withValue("csvAppend", "true") + .withValue("metrics.csvOutputFolder", tempDir.toString()) + .withValue("metrics.csvAppend", "true") .getOrCreateConfig(); final MetricsConfig metricsConfig = configuration.getConfigData(MetricsConfig.class); final LegacyCsvWriter writer = new LegacyCsvWriter(NODE_ID, tempDir, metricsConfig); @@ -318,14 +320,14 @@ void testWriteWithAppendedModeAndNonExistingFile() throws IOException { assertThat(content) .matches( """ - filename:,.*, - Counter:,Counter, - DoubleGauge:,DoubleGauge, - - ,,platform,platform, - ,,Counter,DoubleGauge, - ,,0,0.0, - """); + filename:,.*, + Counter:,Counter, + DoubleGauge:,DoubleGauge, + + ,,platform,platform, + ,,Counter,DoubleGauge, + ,,0,0.0, + """); } @Test @@ -362,15 +364,15 @@ void testWriteWithInternalIgnored() throws IOException { assertThat(content) .matches( """ - filename:,.*, - Public Counter:,Public Counter, - Public DoubleGauge:,Public DoubleGauge, - - ,,platform,platform, - ,,Public Counter,Public DoubleGauge, - ,,0,0.0, - ,,3,2.7, - """); + filename:,.*, + Public Counter:,Public Counter, + Public DoubleGauge:,Public DoubleGauge, + + ,,platform,platform, + ,,Public Counter,Public DoubleGauge, + ,,0,0.0, + ,,3,2.7, + """); } @Test @@ -408,17 +410,17 @@ void testWriteWithInternalNotIgnored() throws IOException { assertThat(content) .matches( """ - filename:,.*, - Internal Counter:,Internal Counter, - Public Counter:,Public Counter, - Internal DoubleGauge:,Internal DoubleGauge, - Public DoubleGauge:,Public DoubleGauge, - - ,,internal,platform,internal,platform, - ,,Internal Counter,Public Counter,Internal DoubleGauge,Public DoubleGauge, - ,,0,0,0.0,0.0, - ,,2,3,3.1,2.7, - """); + filename:,.*, + Internal Counter:,Internal Counter, + Public Counter:,Public Counter, + Internal DoubleGauge:,Internal DoubleGauge, + Public DoubleGauge:,Public DoubleGauge, + + ,,internal,platform,internal,platform, + ,,Internal Counter,Public Counter,Internal DoubleGauge,Public DoubleGauge, + ,,0,0,0.0,0.0, + ,,2,3,3.1,2.7, + """); } @Test @@ -455,17 +457,17 @@ void testWriteWithSecondaryValuesNotIncluded() throws IOException { assertThat(content) .matches( """ - filename:,.*, - RunningAverageMetric:,RunningAverageMetric, - SpeedometerMetric:,SpeedometerMetric, - RunningAverageMetric Info:,RunningAverageMetric Info, - SpeedometerMetric Info:,SpeedometerMetric Info, - - ,,platform,platform,platform\\.info,platform\\.info, - ,,RunningAverageMetric,SpeedometerMetric,RunningAverageMetric Info,SpeedometerMetric Info, - ,,0\\.0,0\\.0,0\\.0,0\\.0, - ,,1000\\.0,\\d*\\.\\d,3000\\.0,\\d*\\.\\d, - """); + filename:,.*, + RunningAverageMetric:,RunningAverageMetric, + SpeedometerMetric:,SpeedometerMetric, + RunningAverageMetric Info:,RunningAverageMetric Info, + SpeedometerMetric Info:,SpeedometerMetric Info, + + ,,platform,platform,platform\\.info,platform\\.info, + ,,RunningAverageMetric,SpeedometerMetric,RunningAverageMetric Info,SpeedometerMetric Info, + ,,0\\.0,0\\.0,0\\.0,0\\.0, + ,,1000\\.0,\\d*\\.\\d,3000\\.0,\\d*\\.\\d, + """); } @Test @@ -503,17 +505,17 @@ void testWriteWithSecondaryValuesIncluded() throws IOException { assertThat(content) .matches( """ - filename:,.*, - RunningAverageMetric:,RunningAverageMetric, - SpeedometerMetric:,SpeedometerMetric, - RunningAverageMetric Info:,RunningAverageMetric Info, - SpeedometerMetric Info:,SpeedometerMetric Info, - - ,,platform,platform,platform,platform,platform,platform,platform,platform,platform\\.info,platform\\.info, - ,,RunningAverageMetric,RunningAverageMetricMax,RunningAverageMetricMin,RunningAverageMetricStd,SpeedometerMetric,SpeedometerMetricMax,SpeedometerMetricMin,SpeedometerMetricStd,RunningAverageMetric Info,SpeedometerMetric Info, - ,,0\\.0,0\\.0,0\\.0,0\\.0,0\\.0,0\\.0,0\\.0,0\\.0,0\\.0,0\\.0, - ,,1000\\.0,1000\\.0,1000\\.0,0\\.0,\\d*\\.\\d,\\d*\\.\\d,\\d*\\.\\d,0\\.0,3000\\.0,\\d*\\.\\d, - """); + filename:,.*, + RunningAverageMetric:,RunningAverageMetric, + SpeedometerMetric:,SpeedometerMetric, + RunningAverageMetric Info:,RunningAverageMetric Info, + SpeedometerMetric Info:,SpeedometerMetric Info, + + ,,platform,platform,platform,platform,platform,platform,platform,platform,platform\\.info,platform\\.info, + ,,RunningAverageMetric,RunningAverageMetricMax,RunningAverageMetricMin,RunningAverageMetricStd,SpeedometerMetric,SpeedometerMetricMax,SpeedometerMetricMin,SpeedometerMetricStd,RunningAverageMetric Info,SpeedometerMetric Info, + ,,0\\.0,0\\.0,0\\.0,0\\.0,0\\.0,0\\.0,0\\.0,0\\.0,0\\.0,0\\.0, + ,,1000\\.0,1000\\.0,1000\\.0,0\\.0,\\d*\\.\\d,\\d*\\.\\d,\\d*\\.\\d,0\\.0,3000\\.0,\\d*\\.\\d, + """); } @Test @@ -533,9 +535,9 @@ void testBrokenFormatString() throws IOException { // then final String content = Files.readString(csvFilePath); assertThat(content).matches(""" - (.*\\n){4}.* - ,,, - """); + (.*\\n){4}.* + ,,, + """); } @Test @@ -572,18 +574,18 @@ void testChangedEntriesWithSimpleMetrics() throws IOException { assertThat(content) .matches( """ - filename:,.*, - Counter 1:,Counter 1, - Counter 2:,Counter 2, - Counter 3:,Counter 3, - Counter 4:,Counter 4, - Counter 5:,Counter 5, - - ,,platform,platform,platform,platform,platform, - ,,Counter 1,Counter 2,Counter 3,Counter 4,Counter 5, - ,,0,1,2,3,4, - ,,,11,,33,, - """); + filename:,.*, + Counter 1:,Counter 1, + Counter 2:,Counter 2, + Counter 3:,Counter 3, + Counter 4:,Counter 4, + Counter 5:,Counter 5, + + ,,platform,platform,platform,platform,platform, + ,,Counter 1,Counter 2,Counter 3,Counter 4,Counter 5, + ,,0,1,2,3,4, + ,,,11,,33,, + """); } @Test @@ -620,18 +622,18 @@ void testChangedEntriesWithComplexMetricsAndNoSecondaryValues() throws IOExcepti assertThat(content) .matches( """ - filename:,.*, - RunningAverageMetric 1:,RunningAverageMetric 1, - RunningAverageMetric 2:,RunningAverageMetric 2, - RunningAverageMetric 3:,RunningAverageMetric 3, - RunningAverageMetric 4:,RunningAverageMetric 4, - RunningAverageMetric 5:,RunningAverageMetric 5, - - ,,platform,platform,platform,platform,platform, - ,,RunningAverageMetric 1,RunningAverageMetric 2,RunningAverageMetric 3,RunningAverageMetric 4,RunningAverageMetric 5, - ,,0\\.0,1000\\.0,2000\\.0,3000\\.0,4000\\.0, - ,,,5\\d*\\.\\d,,16\\d*\\.\\d,, - """); + filename:,.*, + RunningAverageMetric 1:,RunningAverageMetric 1, + RunningAverageMetric 2:,RunningAverageMetric 2, + RunningAverageMetric 3:,RunningAverageMetric 3, + RunningAverageMetric 4:,RunningAverageMetric 4, + RunningAverageMetric 5:,RunningAverageMetric 5, + + ,,platform,platform,platform,platform,platform, + ,,RunningAverageMetric 1,RunningAverageMetric 2,RunningAverageMetric 3,RunningAverageMetric 4,RunningAverageMetric 5, + ,,0\\.0,1000\\.0,2000\\.0,3000\\.0,4000\\.0, + ,,,5\\d*\\.\\d,,16\\d*\\.\\d,, + """); } @Test @@ -669,18 +671,18 @@ void testChangedEntriesWithComplexMetricsAndSecondaryValues() throws IOException assertThat(content) .matches( """ - filename:,.*, - RunningAverageMetric 1:,RunningAverageMetric 1, - RunningAverageMetric 2:,RunningAverageMetric 2, - RunningAverageMetric 3:,RunningAverageMetric 3, - RunningAverageMetric 4:,RunningAverageMetric 4, - RunningAverageMetric 5:,RunningAverageMetric 5, - - ,,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform, - ,,RunningAverageMetric 1,RunningAverageMetric 1Max,RunningAverageMetric 1Min,RunningAverageMetric 1Std,RunningAverageMetric 2,RunningAverageMetric 2Max,RunningAverageMetric 2Min,RunningAverageMetric 2Std,RunningAverageMetric 3,RunningAverageMetric 3Max,RunningAverageMetric 3Min,RunningAverageMetric 3Std,RunningAverageMetric 4,RunningAverageMetric 4Max,RunningAverageMetric 4Min,RunningAverageMetric 4Std,RunningAverageMetric 5,RunningAverageMetric 5Max,RunningAverageMetric 5Min,RunningAverageMetric 5Std, - ,,0\\.0,0\\.0,0\\.0,0\\.0,1000\\.0,1000\\.0,1000\\.0,0\\.0,2000\\.0,2000\\.0,2000\\.0,0\\.0,3000\\.0,3000\\.0,3000\\.0,0\\.0,4000\\.0,4000\\.0,4000\\.0,0\\.0, - ,,,,,,5\\d*\\.\\d,5\\d*\\.\\d,1000.0,\\d*\\.\\d,,,,,16\\d*\\.\\d,16\\d*\\.\\d,3000.0,\\d*\\.\\d,,,,, - """); + filename:,.*, + RunningAverageMetric 1:,RunningAverageMetric 1, + RunningAverageMetric 2:,RunningAverageMetric 2, + RunningAverageMetric 3:,RunningAverageMetric 3, + RunningAverageMetric 4:,RunningAverageMetric 4, + RunningAverageMetric 5:,RunningAverageMetric 5, + + ,,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform,platform, + ,,RunningAverageMetric 1,RunningAverageMetric 1Max,RunningAverageMetric 1Min,RunningAverageMetric 1Std,RunningAverageMetric 2,RunningAverageMetric 2Max,RunningAverageMetric 2Min,RunningAverageMetric 2Std,RunningAverageMetric 3,RunningAverageMetric 3Max,RunningAverageMetric 3Min,RunningAverageMetric 3Std,RunningAverageMetric 4,RunningAverageMetric 4Max,RunningAverageMetric 4Min,RunningAverageMetric 4Std,RunningAverageMetric 5,RunningAverageMetric 5Max,RunningAverageMetric 5Min,RunningAverageMetric 5Std, + ,,0\\.0,0\\.0,0\\.0,0\\.0,1000\\.0,1000\\.0,1000\\.0,0\\.0,2000\\.0,2000\\.0,2000\\.0,0\\.0,3000\\.0,3000\\.0,3000\\.0,0\\.0,4000\\.0,4000\\.0,4000\\.0,0\\.0, + ,,,,,,5\\d*\\.\\d,5\\d*\\.\\d,1000.0,\\d*\\.\\d,,,,,16\\d*\\.\\d,16\\d*\\.\\d,3000.0,\\d*\\.\\d,,,,, + """); } private List createCompleteList() { diff --git a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/prometheus/PrometheusConfigTest.java b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/prometheus/PrometheusConfigTest.java index ea411717577b..8cfdf0adc1f2 100644 --- a/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/prometheus/PrometheusConfigTest.java +++ b/platform-sdk/swirlds-common/src/test/java/com/swirlds/common/metrics/platform/prometheus/PrometheusConfigTest.java @@ -41,11 +41,10 @@ void testDefaultPrometheusConfig() { final PrometheusConfig prometheusConfig = configuration.getConfigData(PrometheusConfig.class); assertThat(prometheusConfig).isNotNull(); - assertThat(prometheusConfig.prometheusEndpointEnabled()) - .isEqualTo(Boolean.valueOf(DEFAULT_PROMETHEUS_ENDPOINT_ENABLED)); - assertThat(prometheusConfig.prometheusEndpointPortNumber()) + assertThat(prometheusConfig.endpointEnabled()).isEqualTo(Boolean.valueOf(DEFAULT_PROMETHEUS_ENDPOINT_ENABLED)); + assertThat(prometheusConfig.endpointPortNumber()) .isEqualTo(Integer.valueOf(DEFAULT_PROMETHEUS_ENDPOINT_PORT_NUMBER)); - assertThat(prometheusConfig.prometheusEndpointMaxBacklogAllowed()) + assertThat(prometheusConfig.endpointMaxBacklogAllowed()) .isEqualTo(Integer.valueOf(DEFAULT_PROMETHEUS_ENDPOINT_MAX_BACKLOG_ALLOWED)); } @@ -61,8 +60,8 @@ void testCustomPrometheusConfig() throws Exception { final PrometheusConfig prometheusConfig = configuration.getConfigData(PrometheusConfig.class); assertThat(prometheusConfig).isNotNull(); - assertThat(prometheusConfig.prometheusEndpointEnabled()).isTrue(); - assertThat(prometheusConfig.prometheusEndpointPortNumber()).isEqualTo(9999); - assertThat(prometheusConfig.prometheusEndpointMaxBacklogAllowed()).isEqualTo(2); + assertThat(prometheusConfig.endpointEnabled()).isTrue(); + assertThat(prometheusConfig.endpointPortNumber()).isEqualTo(9998); + assertThat(prometheusConfig.endpointMaxBacklogAllowed()).isEqualTo(2); } } diff --git a/platform-sdk/swirlds-common/src/test/resources/metrics-test.properties b/platform-sdk/swirlds-common/src/test/resources/metrics-test.properties index 2df6506c643b..aea204ae95ec 100644 --- a/platform-sdk/swirlds-common/src/test/resources/metrics-test.properties +++ b/platform-sdk/swirlds-common/src/test/resources/metrics-test.properties @@ -14,17 +14,15 @@ # ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR # DISTRIBUTING THIS SOFTWARE OR ITS DERIVATIVES. # - # Metrics -metricsUpdatePeriodMillis = 2000 -disableMetricsOutput = true -csvOutputFolder = ./metrics-output -csvFileName = metrics-test -csvAppend = true -csvWriteFrequency = 6000 -metricsDocFileName = metricsDoc-test.tsv - +metrics.metricsUpdatePeriodMillis=2000 +metrics.disableMetricsOutput=true +metrics.csvOutputFolder=./metrics-output +metrics.csvFileName=metrics-test +metrics.csvAppend=true +metrics.csvWriteFrequency=6000 +metrics.metricsDocFileName=metricsDoc-test.tsv # Metrics - Prometheus -prometheusEndpointEnabled = true -prometheusEndpointPortNumber = 9999 -prometheusEndpointMaxBacklogAllowed = 2 +prometheus.endpointEnabled=true +prometheus.endpointPortNumber=9998 +prometheus.endpointMaxBacklogAllowed=2 diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java index c256ad4b37e2..eb1f15904df2 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java @@ -44,7 +44,6 @@ public final class SettingConstants { static final int MAX_INCOMING_SYNCS_INC_DEFAULT_VALUE = 1; static final int BUFFER_SIZE_DEFAULT_VALUE = 8 * 1024; static final int SOCKET_IP_TOS_DEFAULT_VALUE = -1; - static final int HALF_LIFE_DEFAULT_VALUE = 10; static final boolean LOG_STACK_DEFAULT_VALUE = true; static final boolean USE_TLS_DEFAULT_VALUE = true; static final boolean DO_UPNP_DEFAULT_VALUE = true; @@ -72,24 +71,7 @@ public final class SettingConstants { static final boolean LOAD_KEYS_FROM_PFX_FILES_DEFAULT_VALUE = true; static final int MAX_TRANSACTION_BYTES_PER_EVENT_DEFAULT_VALUE = 245760; static final int MAX_TRANSACTION_COUNT_PER_EVENT_DEFAULT_VALUE = 245760; - static final String CSV_OUTPUT_FOLDER_DEFAULT_VALUE = ""; - static final boolean DISABLE_METRICS_OUTPUT_DEFAULT_VALUE = false; - static final String CSV_FILE_NAME_DEFAULT_VALUE = ""; - static final int CSV_WRITE_FREQUENCY_DEFAULT_VALUE = 3000; - static final boolean CSV_APPEND_DEFAULT_VALUE = false; - static final boolean PROMETHEUS_ENDPOINT_ENABLED_DEFAULT_VALUE = false; - static final int PROMETHEUS_ENDPOINT_PORT_NUMBER_DEFAULT_VALUE = 9999; - static final int PROMETHEUS_ENDPOINT_MAX_BACKLOG_ALLOWED_DEFAULT_VALUE = 1; - static final int EVENT_INTAKE_QUEUE_THROTTLE_SIZE_DEFAULT_VALUE = 1000; - static final int EVENT_INTAKE_QUEUE_SIZE_DEFAULT_VALUE = 10_000; - static final boolean CHECK_SIGNED_STATE_FROM_DISK_DEFAULT_VALUE = false; static final int RANDOM_EVENT_PROBABILITY_DEFAULT_VALUE = 0; - static final int STALE_EVENT_PREVENTION_THRESHOLD_DEFAULT_VALUE = 5; - static final int RESCUE_CHILDLESS_INVERSE_PROBABILITY_DEFAULT_VALUE = 10; - static final boolean ENABLE_EVENT_STREAMING_DEFAULT_VALUE = false; - static final int EVENT_STREAM_QUEUE_CAPACITY_DEFAULT_VALUE = 500; - static final int EVENTS_LOG_PERIOD_DEFAULT_VALUE = 60; - static final String EVENTS_LOG_DIR_DEFAULT_VALUE = "./eventstreams"; static final int THREAD_DUMP_PERIOD_MS_DEFAULT_VALUE = 0; static final String THREAD_DUMP_LOG_DIR_DEFAULT_VALUE = "data/threadDump"; static final int JVM_PAUSE_DETECTOR_SLEEP_MS_DEFAULT_VALUE = 1000; @@ -206,7 +188,25 @@ public final class SettingConstants { "event.eventStreamQueueCapacity", "event.eventsLogPeriod", "event.eventsLogDir", - "event.enableEventStreaming"); + "event.enableEventStreaming", + "halfLife", + "csvWriteFrequency", + "csvOutputFolder", + "csvFileName", + "csvAppend", + "prometheusEndpointEnabled", + "prometheusEndpointPortNumber", + "prometheusEndpointMaxBacklogAllowed", + "disableMetricsOutput", + "metrics.halfLife", + "metrics.csvWriteFrequency", + "metrics.csvOutputFolder", + "metrics.csvFileName", + "metrics.csvAppend", + "metrics.prometheusEndpointEnabled", + "metrics.prometheusEndpointPortNumber", + "metrics.prometheusEndpointMaxBacklogAllowed", + "metrics.disableMetricsOutput"); private SettingConstants() {} } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java index b0ed8237a2c4..d9f846f1be85 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java @@ -25,18 +25,12 @@ import static com.swirlds.platform.SettingConstants.BUFFER_SIZE_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.CALLER_SKIPS_BEFORE_SLEEP_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.CONFIG_TXT; -import static com.swirlds.platform.SettingConstants.CSV_APPEND_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.CSV_FILE_NAME_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.CSV_OUTPUT_FOLDER_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.CSV_WRITE_FREQUENCY_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.DATA_STRING; import static com.swirlds.platform.SettingConstants.DEADLOCK_CHECK_PERIOD_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.DELAY_SHUFFLE_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.DISABLE_METRICS_OUTPUT_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.DO_UPNP_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.FREEZE_SECONDS_AFTER_STARTUP_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.GOSSIP_WITH_DIFFERENT_VERSIONS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.HALF_LIFE_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.JVM_PAUSE_DETECTOR_SLEEP_MS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.JVM_PAUSE_REPORT_MS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.KEYS_STRING; @@ -50,9 +44,6 @@ import static com.swirlds.platform.SettingConstants.MAX_TRANSACTION_COUNT_PER_EVENT_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.NUM_CONNECTIONS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.NUM_CRYPTO_THREADS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.PROMETHEUS_ENDPOINT_ENABLED_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.PROMETHEUS_ENDPOINT_MAX_BACKLOG_ALLOWED_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.PROMETHEUS_ENDPOINT_PORT_NUMBER_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.REMOVED_SETTINGS; import static com.swirlds.platform.SettingConstants.SAVED_STRING; import static com.swirlds.platform.SettingConstants.SETTINGS_TXT; @@ -192,9 +183,6 @@ public class Settings { * @see Type of Service */ private int socketIpTos = SOCKET_IP_TOS_DEFAULT_VALUE; - /** half life of some of the various statistics (give half the weight to the last halfLife seconds) */ - private double halfLife = HALF_LIFE_DEFAULT_VALUE; - /** when converting an exception to a string for logging, should it include the stack trace? */ private boolean logStack = LOG_STACK_DEFAULT_VALUE; /** should TLS be turned on, rather than making all sockets unencrypted? */ @@ -254,39 +242,12 @@ public class Settings { private int maxTransactionBytesPerEvent = MAX_TRANSACTION_BYTES_PER_EVENT_DEFAULT_VALUE; /** the maximum number of transactions that a single event may contain */ private int maxTransactionCountPerEvent = MAX_TRANSACTION_COUNT_PER_EVENT_DEFAULT_VALUE; - /** - * The absolute or relative folder path where all the statistics CSV files will be written. If this value is null or - * an empty string, the current folder selection behavior will be used (ie: the SDK base path). - */ - private String csvOutputFolder = CSV_OUTPUT_FOLDER_DEFAULT_VALUE; - /** - * Disable all metrics-outputs. If {@code true}, this overrides all other specific settings concerning - * metrics-output. - */ - private boolean disableMetricsOutput = DISABLE_METRICS_OUTPUT_DEFAULT_VALUE; - /** - * The prefix of the name of the CSV file that the platform will write statistics to. If this value is null or an - * empty string, the platform will not write any statistics. - */ - private String csvFileName = CSV_FILE_NAME_DEFAULT_VALUE; /** * The path to look for an emergency recovery file on node start. If a file is present in this directory at startup, * emergency recovery will begin. */ private Path emergencyRecoveryFileLoadDir = getAbsolutePath().resolve(DATA_STRING).resolve(SAVED_STRING); - /** - * The frequency, in milliseconds, at which values are written to the statistics CSV file. - */ - private int csvWriteFrequency = CSV_WRITE_FREQUENCY_DEFAULT_VALUE; - /** Indicates whether statistics should be appended to the CSV file. */ - private boolean csvAppend = CSV_APPEND_DEFAULT_VALUE; - /** Indicates if a prometheus endpoint should be offered **/ - private boolean prometheusEndpointEnabled = PROMETHEUS_ENDPOINT_ENABLED_DEFAULT_VALUE; - /** Port of the Prometheus endpoint **/ - private int prometheusEndpointPortNumber = PROMETHEUS_ENDPOINT_PORT_NUMBER_DEFAULT_VALUE; - /** Backlog of the Prometheus endpoint (= number of incoming TCP connections the system will queue) **/ - private int prometheusEndpointMaxBacklogAllowed = PROMETHEUS_ENDPOINT_MAX_BACKLOG_ALLOWED_DEFAULT_VALUE; /////////////////////////////////////////// // Setting for thread dump @@ -323,7 +284,6 @@ public static void populateSettingsCommon() { SettingsCommon.maxTransactionBytesPerEvent = getInstance().getMaxTransactionBytesPerEvent(); SettingsCommon.maxAddressSizeAllowed = getInstance().getMaxAddressSizeAllowed(); SettingsCommon.transactionMaxBytes = getInstance().getTransactionMaxBytes(); - SettingsCommon.halfLife = getInstance().getHalfLife(); SettingsCommon.logStack = getInstance().isLogStack(); SettingsCommon.showInternalStats = getInstance().isShowInternalStats(); SettingsCommon.verboseStatistics = getInstance().isVerboseStatistics(); @@ -698,10 +658,6 @@ public void setSocketIpTos(final int socketIpTos) { this.socketIpTos = socketIpTos; } - public double getHalfLife() { - return halfLife; - } - public boolean isLogStack() { return logStack; } @@ -798,38 +754,6 @@ public int getMaxTransactionCountPerEvent() { return maxTransactionCountPerEvent; } - public String getCsvOutputFolder() { - return csvOutputFolder; - } - - public boolean isDisableMetricsOutput() { - return disableMetricsOutput; - } - - public String getCsvFileName() { - return csvFileName; - } - - public int getCsvWriteFrequency() { - return csvWriteFrequency; - } - - public boolean isCsvAppend() { - return csvAppend; - } - - public boolean getPrometheusEndpointEnabled() { - return prometheusEndpointEnabled; - } - - public int getPrometheusEndpointPortNumber() { - return prometheusEndpointPortNumber; - } - - public int getPrometheusEndpointMaxBacklogAllowed() { - return prometheusEndpointMaxBacklogAllowed; - } - public long getThreadDumpPeriodMs() { return threadDumpPeriodMs; } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java index b362f57c1025..ce54429892e4 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java @@ -49,7 +49,16 @@ private ConfigMappings() {} new ConfigMapping("event.eventStreamQueueCapacity", "eventStreamQueueCapacity"), new ConfigMapping("event.eventsLogPeriod", "eventsLogPeriod"), new ConfigMapping("event.eventsLogDir", "eventsLogDir"), - new ConfigMapping("event.enableEventStreaming", "enableEventStreaming")); + new ConfigMapping("event.enableEventStreaming", "enableEventStreaming"), + new ConfigMapping("metrics.halfLife", "halfLife"), + new ConfigMapping("metrics.csvWriteFrequency", "csvWriteFrequency"), + new ConfigMapping("metrics.csvOutputFolder", "csvOutputFolder"), + new ConfigMapping("metrics.csvFileName", "csvFileName"), + new ConfigMapping("metrics.csvAppend", "csvAppend"), + new ConfigMapping("metrics.disableMetricsOutput", "disableMetricsOutput"), + new ConfigMapping("prometheus.endpointEnabled", "prometheusEndpointEnabled"), + new ConfigMapping("prometheus.endpointPortNumber", "prometheusEndpointPortNumber"), + new ConfigMapping("prometheus.endpointMaxBacklogAllowed", "prometheusEndpointMaxBacklogAllowed")); /** * Add all known aliases to the provided config source diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java index 881f542a5316..d67c3aa4a186 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java @@ -20,17 +20,12 @@ import static com.swirlds.platform.SettingConstants.BUFFER_SIZE_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.CALLER_SKIPS_BEFORE_SLEEP_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.CONFIG_TXT; -import static com.swirlds.platform.SettingConstants.CSV_APPEND_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.CSV_FILE_NAME_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.CSV_OUTPUT_FOLDER_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.CSV_WRITE_FREQUENCY_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.DATA_STRING; import static com.swirlds.platform.SettingConstants.DEADLOCK_CHECK_PERIOD_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.DELAY_SHUFFLE_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.DO_UPNP_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.FREEZE_SECONDS_AFTER_STARTUP_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.GOSSIP_WITH_DIFFERENT_VERSIONS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.HALF_LIFE_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.JVM_PAUSE_DETECTOR_SLEEP_MS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.JVM_PAUSE_REPORT_MS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.KEYS_STRING; @@ -231,7 +226,6 @@ public void checkGetDefaultSettings() { Assertions.assertEquals(MAX_INCOMING_SYNCS_INC_DEFAULT_VALUE, settings.getMaxIncomingSyncsInc()); Assertions.assertEquals(BUFFER_SIZE_DEFAULT_VALUE, settings.getBufferSize()); Assertions.assertEquals(SOCKET_IP_TOS_DEFAULT_VALUE, settings.getSocketIpTos()); - Assertions.assertEquals(HALF_LIFE_DEFAULT_VALUE, settings.getHalfLife()); Assertions.assertEquals( Integer.parseInt(ConsensusConfig.COIN_FREQ_DEFAULT_VALUE), configuration.getConfigData(ConsensusConfig.class).coinFreq()); @@ -259,10 +253,6 @@ public void checkGetDefaultSettings() { MAX_TRANSACTION_BYTES_PER_EVENT_DEFAULT_VALUE, settings.getMaxTransactionBytesPerEvent()); Assertions.assertEquals( MAX_TRANSACTION_COUNT_PER_EVENT_DEFAULT_VALUE, settings.getMaxTransactionCountPerEvent()); - Assertions.assertEquals(CSV_OUTPUT_FOLDER_DEFAULT_VALUE, settings.getCsvOutputFolder()); - Assertions.assertEquals(CSV_FILE_NAME_DEFAULT_VALUE, settings.getCsvFileName()); - Assertions.assertEquals(CSV_WRITE_FREQUENCY_DEFAULT_VALUE, settings.getCsvWriteFrequency()); - Assertions.assertEquals(CSV_APPEND_DEFAULT_VALUE, settings.isCsvAppend()); Assertions.assertEquals(THREAD_DUMP_PERIOD_MS_DEFAULT_VALUE, settings.getThreadDumpPeriodMs()); Assertions.assertEquals(THREAD_DUMP_LOG_DIR_DEFAULT_VALUE, settings.getThreadDumpLogDir()); Assertions.assertEquals(JVM_PAUSE_DETECTOR_SLEEP_MS_DEFAULT_VALUE, settings.getJVMPauseDetectorSleepMs()); @@ -313,7 +303,6 @@ public void checkGetLoadedSettings() throws IOException { Assertions.assertEquals(2, settings.getMaxIncomingSyncsInc()); Assertions.assertEquals(7000, settings.getBufferSize()); Assertions.assertEquals(1, settings.getSocketIpTos()); - Assertions.assertEquals(5, settings.getHalfLife()); Assertions.assertFalse(settings.isLogStack()); Assertions.assertFalse(settings.isUseTLS()); Assertions.assertFalse(settings.isDoUpnp()); @@ -335,10 +324,6 @@ public void checkGetLoadedSettings() throws IOException { Assertions.assertFalse(settings.isLoadKeysFromPfxFiles()); Assertions.assertEquals(300000, settings.getMaxTransactionBytesPerEvent()); Assertions.assertEquals(300000, settings.getMaxTransactionCountPerEvent()); - Assertions.assertEquals("csvFolder", settings.getCsvOutputFolder()); - Assertions.assertEquals("csvFile", settings.getCsvFileName()); - Assertions.assertEquals(4000, settings.getCsvWriteFrequency()); - Assertions.assertTrue(settings.isCsvAppend()); Assertions.assertEquals(1, settings.getThreadDumpPeriodMs()); Assertions.assertEquals("badData/badThreadDump", settings.getThreadDumpLogDir()); Assertions.assertEquals(2000, settings.getJVMPauseDetectorSleepMs()); diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/util/MetricsDocUtilsTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/util/MetricsDocUtilsTest.java index 286e17c64332..a0eaa9cec537 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/util/MetricsDocUtilsTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/util/MetricsDocUtilsTest.java @@ -51,7 +51,7 @@ void writeMetricsDocumentToFile() { // setup // Unable to mock BasicConfig.class because it is a final class, so using the test builder instead of mocking. final Configuration configuration = new TestConfigBuilder() - .withValue("metricsDocFileName", METRIC_DOC_FILE_NAME) + .withValue("metrics.metricsDocFileName", METRIC_DOC_FILE_NAME) .getOrCreateConfig(); final String docFilePath = com.swirlds.common.io.utility.FileUtils.getUserDir() + File.separator + METRIC_DOC_FILE_NAME; From 2eff59e6c5c516155302719845f6c76a0bd8642f Mon Sep 17 00:00:00 2001 From: Joseph Sinclair <121976561+jsync-swirlds@users.noreply.github.com> Date: Thu, 15 Jun 2023 09:37:36 -0700 Subject: [PATCH 40/70] Removed terrible `throws Throwable` from some test scenario code, replaced with correct exceptions (#7118) Signed-off-by: Joseph Sinclair --- .../scenarios/ScheduleSignScenarios.java | 8 +++++++- .../hedera/test/factories/sigs/SigFactory.java | 16 +++++++++++----- .../test/factories/txns/SignedTxnFactory.java | 10 ++++++++-- 3 files changed, 26 insertions(+), 8 deletions(-) diff --git a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/scenarios/ScheduleSignScenarios.java b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/scenarios/ScheduleSignScenarios.java index a0fd1714852f..58524dafadd9 100644 --- a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/scenarios/ScheduleSignScenarios.java +++ b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/scenarios/ScheduleSignScenarios.java @@ -20,16 +20,22 @@ import static com.hedera.test.factories.txns.ScheduleSignFactory.newSignedScheduleSign; import static com.hedera.test.factories.txns.TinyBarsFromTo.tinyBarsFromTo; +import com.google.protobuf.InvalidProtocolBufferException; import com.hedera.node.app.service.mono.utils.accessors.PlatformTxnAccessor; import com.hedera.node.app.service.mono.utils.accessors.SignedTxnAccessor; import com.hedera.test.factories.txns.ScheduleUtils; import com.hederahashgraph.api.proto.java.ScheduleCreateTransactionBody; import com.hederahashgraph.api.proto.java.TransactionBody; +import java.security.InvalidKeyException; +import java.security.NoSuchAlgorithmException; +import java.security.SignatureException; public enum ScheduleSignScenarios implements TxnHandlingScenario { SCHEDULE_SIGN_MISSING_SCHEDULE { @Override - public PlatformTxnAccessor platformTxn() throws Throwable { + public PlatformTxnAccessor platformTxn() + throws InvalidProtocolBufferException, SignatureException, NoSuchAlgorithmException, + InvalidKeyException { return PlatformTxnAccessor.from( newSignedScheduleSign().signing(UNKNOWN_SCHEDULE).get()); } diff --git a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/sigs/SigFactory.java b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/sigs/SigFactory.java index ecead1b4fe90..ba06cf9b32f6 100644 --- a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/sigs/SigFactory.java +++ b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/sigs/SigFactory.java @@ -21,6 +21,7 @@ import static com.hedera.node.app.service.mono.sigs.utils.MiscCryptoUtils.keccak256DigestOf; import com.google.protobuf.ByteString; +import com.google.protobuf.InvalidProtocolBufferException; import com.hedera.node.app.hapi.utils.SignatureGenerator; import com.hedera.test.factories.keys.KeyFactory; import com.hedera.test.factories.keys.KeyTree; @@ -63,7 +64,8 @@ public static byte[] signUnchecked(final byte[] data, final PrivateKey pk) { } public Transaction signWithSigMap( - final Transaction.Builder txn, final List signers, final KeyFactory factory) throws Throwable { + final Transaction.Builder txn, final List signers, final KeyFactory factory) + throws SignatureException, NoSuchAlgorithmException, InvalidKeyException, InvalidProtocolBufferException { final SimpleSigning signing = new SimpleSigning(extractTransactionBodyBytes(txn), signers, factory); final List> sigs = signing.completed(); txn.setSigMap(sigMapGen.generate(sigs, signing.sigTypes())); @@ -95,14 +97,16 @@ public SignatureType get() { }; } - public List> completed() throws Throwable { + public List> completed() + throws SignatureException, NoSuchAlgorithmException, InvalidKeyException { for (final KeyTree signer : signers) { signRecursively(signer.getRoot()); } return keySigs; } - private void signRecursively(final KeyTreeNode node) throws Throwable { + private void signRecursively(final KeyTreeNode node) + throws SignatureException, NoSuchAlgorithmException, InvalidKeyException { if (node instanceof KeyTreeLeaf) { if (((KeyTreeLeaf) node).isUsedToSign()) { signIfNecessary(node.asKey(factory)); @@ -114,7 +118,8 @@ private void signRecursively(final KeyTreeNode node) throws Throwable { } } - private void signIfNecessary(final Key key) throws Throwable { + private void signIfNecessary(final Key key) + throws SignatureException, NoSuchAlgorithmException, InvalidKeyException { final String pubKeyHex = KeyFactory.asPubKeyHex(key); if (!used.contains(pubKeyHex)) { signFor(pubKeyHex, key); @@ -122,7 +127,8 @@ private void signIfNecessary(final Key key) throws Throwable { } } - private void signFor(final String pubKeyHex, final Key key) throws Throwable { + private void signFor(final String pubKeyHex, final Key key) + throws SignatureException, NoSuchAlgorithmException, InvalidKeyException { final SignatureType sigType = sigTypeOf(key); if (sigType == SignatureType.ED25519) { final PrivateKey signer = factory.lookupPrivateKey(pubKeyHex); diff --git a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/txns/SignedTxnFactory.java b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/txns/SignedTxnFactory.java index ea5bb15e61ef..5f8aece2d01f 100644 --- a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/txns/SignedTxnFactory.java +++ b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/txns/SignedTxnFactory.java @@ -23,6 +23,7 @@ import static java.util.stream.Collectors.toList; import com.google.protobuf.ByteString; +import com.google.protobuf.InvalidProtocolBufferException; import com.hedera.node.app.hapi.utils.fee.FeeBuilder; import com.hedera.test.factories.keys.KeyFactory; import com.hedera.test.factories.keys.KeyTree; @@ -34,6 +35,9 @@ import com.hederahashgraph.api.proto.java.Transaction; import com.hederahashgraph.api.proto.java.TransactionBody; import com.hederahashgraph.api.proto.java.TransactionID; +import java.security.InvalidKeyException; +import java.security.NoSuchAlgorithmException; +import java.security.SignatureException; import java.time.Instant; import java.util.List; import java.util.Optional; @@ -78,7 +82,8 @@ public abstract class SignedTxnFactory> { protected abstract void customizeTxn(TransactionBody.Builder txn); - public Transaction get() throws Throwable { + public Transaction get() + throws InvalidProtocolBufferException, SignatureException, NoSuchAlgorithmException, InvalidKeyException { final Transaction provisional = signed(signableTxn(customFee.orElse(0L))); return customFee.isPresent() ? provisional : signed(signableTxn(feeFor(provisional, payerKt.numLeaves()))); } @@ -91,7 +96,8 @@ private Transaction.Builder signableTxn(final long fee) { .setBodyBytes(ByteString.copyFrom(txn.build().toByteArray())); } - private Transaction signed(final Transaction.Builder txnWithSigs) throws Throwable { + private Transaction signed(final Transaction.Builder txnWithSigs) + throws InvalidProtocolBufferException, SignatureException, NoSuchAlgorithmException, InvalidKeyException { final List signers = allKts(); return sigFactory.signWithSigMap(txnWithSigs, signers, keyFactory); } From 928b3435c0139badc1317e113ba21af7cccf87e0 Mon Sep 17 00:00:00 2001 From: Hendrik Ebbers Date: Thu, 15 Jun 2023 19:30:09 +0200 Subject: [PATCH 41/70] Objects.requireNonNull used in platform-core (#7120) Signed-off-by: Hendrik Ebbers --- .../swirlds/platform/components/EventIntake.java | 16 ++++++++-------- .../AsyncPreconsensusEventWriter.java | 8 ++++---- .../SyncPreconsensusEventWriter.java | 14 +++++++------- .../platform/gossip/FallenBehindManagerImpl.java | 8 ++++---- .../platform/internal/ConsensusRound.java | 8 ++++---- .../reconnect/ReconnectLearnerThrottle.java | 6 +++--- .../reconnect/ReconnectProtocolResponder.java | 14 +++++++------- .../platform/reconnect/ReconnectThrottle.java | 4 ++-- .../emergency/EmergencyReconnectProtocol.java | 4 ++-- .../swirlds/platform/state/iss/IssHandler.java | 13 ++++++------- .../state/signed/SignedStateFileManager.java | 7 +++---- 11 files changed, 50 insertions(+), 52 deletions(-) diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventIntake.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventIntake.java index 57c7813a7d82..a16309b239e1 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventIntake.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/components/EventIntake.java @@ -16,7 +16,6 @@ package com.swirlds.platform.components; -import static com.swirlds.base.ArgumentUtils.throwArgNull; import static com.swirlds.logging.LogMarker.INTAKE_EVENT; import static com.swirlds.logging.LogMarker.STALE_EVENTS; import static com.swirlds.logging.LogMarker.SYNC; @@ -37,6 +36,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collection; import java.util.List; +import java.util.Objects; import java.util.function.Supplier; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -80,14 +80,14 @@ public EventIntake( @NonNull final EventObserverDispatcher dispatcher, @NonNull final IntakeCycleStats stats, @NonNull final ShadowGraph shadowGraph) { - this.selfId = throwArgNull(selfId, "selfId"); - this.eventLinker = throwArgNull(eventLinker, "eventLinker"); - this.consensusSupplier = throwArgNull(consensusSupplier, "consensusSupplier"); + this.selfId = Objects.requireNonNull(selfId, "selfId must not be null"); + this.eventLinker = Objects.requireNonNull(eventLinker, "eventLinker must not be null"); + this.consensusSupplier = Objects.requireNonNull(consensusSupplier, "consensusSupplier must not be null"); this.consensusWrapper = new ConsensusWrapper(consensusSupplier); - this.addressBook = throwArgNull(addressBook, "addressBook"); - this.dispatcher = throwArgNull(dispatcher, "dispatcher"); - this.stats = throwArgNull(stats, "stats"); - this.shadowGraph = throwArgNull(shadowGraph, "shadowGraph"); + this.addressBook = Objects.requireNonNull(addressBook, "addressBook must not be null"); + this.dispatcher = Objects.requireNonNull(dispatcher, "dispatcher must not be null"); + this.stats = Objects.requireNonNull(stats, "stats must not be null"); + this.shadowGraph = Objects.requireNonNull(shadowGraph, "shadowGraph must not be null"); } /** diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/AsyncPreconsensusEventWriter.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/AsyncPreconsensusEventWriter.java index c386ca97c3b3..fcbcf8cacaa9 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/AsyncPreconsensusEventWriter.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/AsyncPreconsensusEventWriter.java @@ -16,7 +16,6 @@ package com.swirlds.platform.event.preconsensus; -import static com.swirlds.base.ArgumentUtils.throwArgNull; import static com.swirlds.logging.LogMarker.EXCEPTION; import com.swirlds.common.context.PlatformContext; @@ -28,6 +27,7 @@ import com.swirlds.platform.internal.EventImpl; import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Duration; +import java.util.Objects; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -107,9 +107,9 @@ public AsyncPreconsensusEventWriter( @NonNull final ThreadManager threadManager, @NonNull final PreconsensusEventWriter writer) { - throwArgNull(platformContext, "platformContext"); - throwArgNull(threadManager, "threadManager"); - this.writer = throwArgNull(writer, "writer"); + Objects.requireNonNull(platformContext, "platformContext must not be null"); + Objects.requireNonNull(threadManager, "threadManager must not be null"); + this.writer = Objects.requireNonNull(writer, "writer must not be null"); final PreconsensusEventStreamConfig config = platformContext.getConfiguration().getConfigData(PreconsensusEventStreamConfig.class); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/SyncPreconsensusEventWriter.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/SyncPreconsensusEventWriter.java index 271a98d60429..c64f93c52aa1 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/SyncPreconsensusEventWriter.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/event/preconsensus/SyncPreconsensusEventWriter.java @@ -16,7 +16,6 @@ package com.swirlds.platform.event.preconsensus; -import static com.swirlds.base.ArgumentUtils.throwArgNull; import static com.swirlds.common.units.DataUnit.UNIT_BYTES; import static com.swirlds.common.units.DataUnit.UNIT_MEGABYTES; import static com.swirlds.logging.LogMarker.EXCEPTION; @@ -33,6 +32,7 @@ import java.io.UncheckedIOException; import java.nio.file.Files; import java.time.Duration; +import java.util.Objects; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -139,8 +139,8 @@ public class SyncPreconsensusEventWriter implements PreconsensusEventWriter, Sta public SyncPreconsensusEventWriter( @NonNull final PlatformContext platformContext, @NonNull final PreconsensusEventFileManager fileManager) { - throwArgNull(platformContext, "platformContext"); - throwArgNull(fileManager, "fileManager"); + Objects.requireNonNull(platformContext, "platformContext must not be null"); + Objects.requireNonNull(fileManager, "fileManager must not be null"); final PreconsensusEventStreamConfig config = platformContext.getConfiguration().getConfigData(PreconsensusEventStreamConfig.class); @@ -256,7 +256,7 @@ public void setMinimumGenerationToStore(final long minimumGenerationToStore) { */ @Override public boolean isEventDurable(@NonNull final EventImpl event) { - throwArgNull(event, "event"); + Objects.requireNonNull(event, "event must not be null"); if (event.getStreamSequenceNumber() == EventImpl.STALE_EVENT_STREAM_SEQUENCE_NUMBER) { // Stale events are not written to disk. return false; @@ -269,7 +269,7 @@ public boolean isEventDurable(@NonNull final EventImpl event) { */ @Override public void waitUntilDurable(@NonNull final EventImpl event) throws InterruptedException { - throwArgNull(event, "event"); + Objects.requireNonNull(event, "event must not be null"); if (event.getStreamSequenceNumber() == EventImpl.STALE_EVENT_STREAM_SEQUENCE_NUMBER) { throw new IllegalStateException("Event is stale and will never be durable"); } @@ -282,8 +282,8 @@ public void waitUntilDurable(@NonNull final EventImpl event) throws InterruptedE @Override public boolean waitUntilDurable(@NonNull final EventImpl event, @NonNull final Duration timeToWait) throws InterruptedException { - throwArgNull(event, "event"); - throwArgNull(timeToWait, "timeToWait"); + Objects.requireNonNull(event, "event must not be null"); + Objects.requireNonNull(timeToWait, "timeToWait must not be null"); if (event.getStreamSequenceNumber() == EventImpl.STALE_EVENT_STREAM_SEQUENCE_NUMBER) { throw new IllegalStateException("Event is stale and will never be durable"); } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/FallenBehindManagerImpl.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/FallenBehindManagerImpl.java index 9c1d1ed108e7..d254501cfd1d 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/FallenBehindManagerImpl.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/gossip/FallenBehindManagerImpl.java @@ -16,7 +16,6 @@ package com.swirlds.platform.gossip; -import com.swirlds.base.ArgumentUtils; import com.swirlds.common.merkle.synchronization.config.ReconnectConfig; import com.swirlds.common.system.EventCreationRule; import com.swirlds.common.system.EventCreationRuleResponse; @@ -76,9 +75,10 @@ public FallenBehindManagerImpl( for (final int neighbor : neighbors) { allNeighbors.add(addressBook.getNodeId(neighbor)); } - this.notifyPlatform = ArgumentUtils.throwArgNull(notifyPlatform, "notifyPlatform"); - this.fallenBehindCallback = ArgumentUtils.throwArgNull(fallenBehindCallback, "fallenBehindCallback"); - this.config = ArgumentUtils.throwArgNull(config, "config"); + this.notifyPlatform = Objects.requireNonNull(notifyPlatform, "notifyPlatform must not be null"); + this.fallenBehindCallback = + Objects.requireNonNull(fallenBehindCallback, "fallenBehindCallback must not be null"); + this.config = Objects.requireNonNull(config, "config must not be null"); } @Override diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/internal/ConsensusRound.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/internal/ConsensusRound.java index 730a0d6ad09b..53cd38b38b10 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/internal/ConsensusRound.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/internal/ConsensusRound.java @@ -16,7 +16,6 @@ package com.swirlds.platform.internal; -import static com.swirlds.base.ArgumentUtils.throwArgNull; import static org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE; import com.swirlds.common.system.Round; @@ -28,6 +27,7 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; +import java.util.Objects; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @@ -69,9 +69,9 @@ public ConsensusRound( @NonNull final EventImpl keystoneEvent, @NonNull final GraphGenerations generations) { - throwArgNull(consensusEvents, "consensusEvents"); - throwArgNull(keystoneEvent, "keystoneEvent"); - throwArgNull(generations, "generations"); + Objects.requireNonNull(consensusEvents, "consensusEvents must not be null"); + Objects.requireNonNull(keystoneEvent, "keystoneEvent must not be null"); + Objects.requireNonNull(generations, "generations must not be null"); this.consensusEvents = Collections.unmodifiableList(consensusEvents); this.keystoneEvent = keystoneEvent; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectLearnerThrottle.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectLearnerThrottle.java index 7fba0514cae9..a60a728b5514 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectLearnerThrottle.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectLearnerThrottle.java @@ -20,7 +20,6 @@ import static com.swirlds.logging.LogMarker.SOCKET_EXCEPTIONS; import static com.swirlds.logging.LogMarker.STARTUP; -import com.swirlds.base.ArgumentUtils; import com.swirlds.common.StartupTime; import com.swirlds.common.merkle.synchronization.config.ReconnectConfig; import com.swirlds.common.system.NodeId; @@ -32,6 +31,7 @@ import com.swirlds.platform.system.SystemExitCode; import com.swirlds.platform.system.SystemExitUtils; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -48,8 +48,8 @@ public class ReconnectLearnerThrottle { private int failedReconnectsInARow; public ReconnectLearnerThrottle(@NonNull final NodeId selfId, @NonNull final ReconnectConfig config) { - this.selfId = ArgumentUtils.throwArgNull(selfId, "selfId"); - this.config = ArgumentUtils.throwArgNull(config, "config"); + this.selfId = Objects.requireNonNull(selfId, "selfId must not be null"); + this.config = Objects.requireNonNull(config, "config must not be null"); this.failedReconnectsInARow = 0; } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectProtocolResponder.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectProtocolResponder.java index 0e9e2982c495..4a5f2db07e14 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectProtocolResponder.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectProtocolResponder.java @@ -18,7 +18,6 @@ import static com.swirlds.logging.LogMarker.RECONNECT; -import com.swirlds.base.ArgumentUtils; import com.swirlds.common.merkle.synchronization.config.ReconnectConfig; import com.swirlds.common.threading.manager.ThreadManager; import com.swirlds.platform.components.state.query.LatestSignedStateProvider; @@ -29,6 +28,7 @@ import com.swirlds.platform.state.signed.ReservedSignedState; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; +import java.util.Objects; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -67,13 +67,13 @@ public ReconnectProtocolResponder( @NonNull final ReconnectThrottle reconnectThrottle, @NonNull final FallenBehindManager fallenBehindManager, @NonNull final ReconnectMetrics stats) { - this.threadManager = ArgumentUtils.throwArgNull(threadManager, "threadManager"); + this.threadManager = Objects.requireNonNull(threadManager, "threadManager must not be null"); this.latestSignedStateProvider = - ArgumentUtils.throwArgNull(latestSignedStateProvider, "latestSignedStateProvider"); - this.config = ArgumentUtils.throwArgNull(config, "config"); - this.fallenBehindManager = ArgumentUtils.throwArgNull(fallenBehindManager, "fallenBehindManager"); - this.reconnectThrottle = ArgumentUtils.throwArgNull(reconnectThrottle, "reconnectThrottle"); - this.stats = ArgumentUtils.throwArgNull(stats, "stats"); + Objects.requireNonNull(latestSignedStateProvider, "latestSignedStateProvider must not be null"); + this.config = Objects.requireNonNull(config, "config must not be null"); + this.fallenBehindManager = Objects.requireNonNull(fallenBehindManager, "fallenBehindManager must not be null"); + this.reconnectThrottle = Objects.requireNonNull(reconnectThrottle, "reconnectThrottle must not be null"); + this.stats = Objects.requireNonNull(stats, "stats must not be null"); } /** {@inheritDoc} */ diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectThrottle.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectThrottle.java index fb3e9e0f6ee5..931783d60d71 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectThrottle.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/ReconnectThrottle.java @@ -18,7 +18,6 @@ import static com.swirlds.logging.LogMarker.RECONNECT; -import com.swirlds.base.ArgumentUtils; import com.swirlds.common.merkle.synchronization.config.ReconnectConfig; import com.swirlds.common.system.NodeId; import edu.umd.cs.findbugs.annotations.NonNull; @@ -26,6 +25,7 @@ import java.time.Instant; import java.util.HashMap; import java.util.Iterator; +import java.util.Objects; import java.util.function.Supplier; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -59,7 +59,7 @@ public class ReconnectThrottle { private Supplier currentTime; public ReconnectThrottle(@NonNull final ReconnectConfig config) { - this.config = ArgumentUtils.throwArgNull(config, "config"); + this.config = Objects.requireNonNull(config, "config must not be null"); lastReconnectTime = new HashMap<>(); reconnectingNode = null; currentTime = Instant::now; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectProtocol.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectProtocol.java index 835a94e85508..5335f51c7d97 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectProtocol.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/reconnect/emergency/EmergencyReconnectProtocol.java @@ -18,7 +18,6 @@ import static com.swirlds.logging.LogMarker.RECONNECT; -import com.swirlds.base.ArgumentUtils; import com.swirlds.common.notification.NotificationEngine; import com.swirlds.common.notification.listeners.ReconnectCompleteListener; import com.swirlds.common.notification.listeners.ReconnectCompleteNotification; @@ -35,6 +34,7 @@ import com.swirlds.platform.state.signed.SignedStateFinder; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; +import java.util.Objects; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -97,7 +97,7 @@ public EmergencyReconnectProtocol( this.reconnectSocketTimeout = reconnectSocketTimeout; this.reconnectMetrics = reconnectMetrics; this.reconnectController = reconnectController; - this.fallenBehindManager = ArgumentUtils.throwArgNull(fallenBehindManager, "fallenBehindManager"); + this.fallenBehindManager = Objects.requireNonNull(fallenBehindManager, "fallenBehindManager must not be null"); } @Override diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/IssHandler.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/IssHandler.java index acd81f6a333a..f8c5f18dbd7d 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/IssHandler.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/iss/IssHandler.java @@ -16,8 +16,6 @@ package com.swirlds.platform.state.iss; -import static com.swirlds.base.ArgumentUtils.throwArgNull; - import com.swirlds.base.time.Time; import com.swirlds.common.config.StateConfig; import com.swirlds.common.crypto.Hash; @@ -75,16 +73,17 @@ public IssHandler( @NonNull final FatalErrorConsumer fatalErrorConsumer, @NonNull final IssConsumer issConsumer) { - this.issConsumer = throwArgNull(issConsumer, "issConsumer"); - this.haltRequestedConsumer = throwArgNull(haltRequestedConsumer, "haltRequestedConsumer"); - this.fatalErrorConsumer = throwArgNull(fatalErrorConsumer, "fatalErrorConsumer"); + this.issConsumer = Objects.requireNonNull(issConsumer, "issConsumer must not be null"); + this.haltRequestedConsumer = + Objects.requireNonNull(haltRequestedConsumer, "haltRequestedConsumer must not be null"); + this.fatalErrorConsumer = Objects.requireNonNull(fatalErrorConsumer, "fatalErrorConsumer must not be null"); this.stateDumpRequestedDispatcher = dispatchBuilder.getDispatcher(this, StateDumpRequestedTrigger.class)::dispatch; - this.stateConfig = throwArgNull(stateConfig, "stateConfig"); + this.stateConfig = Objects.requireNonNull(stateConfig, "stateConfig must not be null"); this.issDumpRateLimiter = new RateLimiter(time, Duration.ofSeconds(stateConfig.secondsBetweenISSDumps())); - this.selfId = throwArgNull(selfId, "selfId"); + this.selfId = Objects.requireNonNull(selfId, "selfId must not be null"); } /** diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileManager.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileManager.java index d86cde0fe2f0..618f0b5e6137 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileManager.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/state/signed/SignedStateFileManager.java @@ -16,7 +16,6 @@ package com.swirlds.platform.state.signed; -import static com.swirlds.base.ArgumentUtils.throwArgNull; import static com.swirlds.common.io.utility.FileUtils.deleteDirectoryAndLog; import static com.swirlds.logging.LogMarker.EXCEPTION; import static com.swirlds.logging.LogMarker.STATE_TO_DISK; @@ -127,15 +126,15 @@ public SignedStateFileManager( @NonNull final StateToDiskAttemptConsumer stateToDiskAttemptConsumer, @NonNull final MinimumGenerationNonAncientConsumer minimumGenerationNonAncientConsumer) { - this.metrics = throwArgNull(metrics, "metrics"); + this.metrics = Objects.requireNonNull(metrics, "metrics must not be null"); this.time = time; this.selfId = selfId; this.mainClassName = mainClassName; this.swirldName = swirldName; this.stateToDiskAttemptConsumer = stateToDiskAttemptConsumer; this.stateConfig = context.getConfiguration().getConfigData(StateConfig.class); - this.minimumGenerationNonAncientConsumer = - throwArgNull(minimumGenerationNonAncientConsumer, "minimumGenerationNonAncientConsumer"); + this.minimumGenerationNonAncientConsumer = Objects.requireNonNull( + minimumGenerationNonAncientConsumer, "minimumGenerationNonAncientConsumer must not be null"); final BasicConfig basicConfig = context.getConfiguration().getConfigData(BasicConfig.class); From aad49f793347f767375ac213bafd2f7a2092fe88 Mon Sep 17 00:00:00 2001 From: Hendrik Ebbers Date: Thu, 15 Jun 2023 19:30:22 +0200 Subject: [PATCH 42/70] Resource moved to resources (#7091) Signed-off-by: Hendrik Ebbers --- .../swirlds/common/formatting/TextEffect-Colors.png | Bin 1 file changed, 0 insertions(+), 0 deletions(-) rename platform-sdk/swirlds-common/src/main/{java => resources}/com/swirlds/common/formatting/TextEffect-Colors.png (100%) diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/formatting/TextEffect-Colors.png b/platform-sdk/swirlds-common/src/main/resources/com/swirlds/common/formatting/TextEffect-Colors.png similarity index 100% rename from platform-sdk/swirlds-common/src/main/java/com/swirlds/common/formatting/TextEffect-Colors.png rename to platform-sdk/swirlds-common/src/main/resources/com/swirlds/common/formatting/TextEffect-Colors.png From 8a79e31f6fe059e5d78372ad6d929b842302aba1 Mon Sep 17 00:00:00 2001 From: Hendrik Ebbers Date: Thu, 15 Jun 2023 19:31:20 +0200 Subject: [PATCH 43/70] Time interface is facade for OSTime (#7089) Signed-off-by: Hendrik Ebbers --- .../main/java/com/swirlds/base/time/Time.java | 13 ++++++ .../swirlds/base/time/internal}/OSTime.java | 7 ++- .../common/metrics/extensions/BusyTime.java | 5 +-- .../metrics/extensions/CountPerSecond.java | 8 ++-- .../platform/DefaultRunningAverageMetric.java | 3 +- .../platform/DefaultSpeedometerMetric.java | 3 +- .../metrics/platform/SnapshotService.java | 3 +- .../statistics/StatsRunningAverage.java | 3 +- .../common/statistics/StatsSpeedometer.java | 3 +- .../statistics/internal/StatsBuffer.java | 3 +- .../QueueThreadMetricsConfiguration.java | 5 +-- .../com/swirlds/platform/SwirldsPlatform.java | 7 ++- .../DefaultStateManagementComponent.java | 10 ++--- .../gossip/chatter/ChatterGossip.java | 3 +- .../state/SwirldStateManagerImpl.java | 4 +- .../platform/state/signed/SignedState.java | 4 +- .../state/signed/SignedStateSentinel.java | 3 +- .../platform/test/NoOpIntakeCycleStats.java | 4 +- .../chatter/SimulatedChatterFactories.java | 4 +- .../platform/test/consensus/TestIntake.java | 5 +-- .../test/chatter/ChatterCoreTests.java | 4 +- .../components/EventTaskDispatcherTest.java | 4 +- .../AsyncPreconsensusEventWriterTests.java | 10 ++--- .../PreconsensusEventFileManagerTests.java | 44 +++++++++---------- .../PreconsensusEventReplayWorkflowTests.java | 4 +- .../SyncPreconsensusEventWriterTests.java | 14 +++--- .../test/state/ConsensusHashManagerTests.java | 30 ++++++------- .../platform/test/state/IssHandlerTests.java | 30 ++++++------- 28 files changed, 121 insertions(+), 119 deletions(-) rename platform-sdk/{swirlds-common/src/main/java/com/swirlds/common/time => swirlds-base/src/main/java/com/swirlds/base/time/internal}/OSTime.java (89%) diff --git a/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/time/Time.java b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/time/Time.java index ad7b8a24b2a0..4de3c46fdcb8 100644 --- a/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/time/Time.java +++ b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/time/Time.java @@ -16,6 +16,8 @@ package com.swirlds.base.time; +import com.swirlds.base.time.internal.OSTime; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Instant; /** @@ -45,5 +47,16 @@ public interface Time { * * @return the curren time relative to the epoch */ + @NonNull Instant now(); + + /** + * Returns a {@link Time} instance + * + * @return a {@link Time} instance + */ + @NonNull + static Time getCurrent() { + return OSTime.getInstance(); + } } diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/OSTime.java b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/time/internal/OSTime.java similarity index 89% rename from platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/OSTime.java rename to platform-sdk/swirlds-base/src/main/java/com/swirlds/base/time/internal/OSTime.java index e322b6adb5c1..b6cb315860dc 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/time/OSTime.java +++ b/platform-sdk/swirlds-base/src/main/java/com/swirlds/base/time/internal/OSTime.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2023 Hedera Hashgraph, LLC + * Copyright (C) 2023 Hedera Hashgraph, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,9 +14,10 @@ * limitations under the License. */ -package com.swirlds.common.time; +package com.swirlds.base.time.internal; import com.swirlds.base.time.Time; +import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Instant; /** @@ -31,6 +32,7 @@ private OSTime() {} /** * Get a static instance of a standard time implementation. */ + @NonNull public static Time getInstance() { return instance; } @@ -54,6 +56,7 @@ public long currentTimeMillis() { /** * {@inheritDoc} */ + @NonNull @Override public Instant now() { return Instant.now(); diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/BusyTime.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/BusyTime.java index 6d7f57d70edd..d7975cc4a9e3 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/BusyTime.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/BusyTime.java @@ -21,7 +21,6 @@ import com.swirlds.common.metrics.FunctionGauge; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.time.IntegerEpochTime; -import com.swirlds.common.time.OSTime; import com.swirlds.common.utility.ByteUtils; import com.swirlds.common.utility.StackTrace; import com.swirlds.common.utility.throttle.RateLimiter; @@ -55,10 +54,10 @@ public class BusyTime { private final RateLimiter logLimiter; /** - * The default constructor, uses the {@link OSTime} instance to get the current time + * The default constructor, uses the {@link Time#getCurrent()} instance to get the current time */ public BusyTime() { - this(OSTime.getInstance()); + this(Time.getCurrent()); } /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/CountPerSecond.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/CountPerSecond.java index 38900bb7ced0..09ee5af6f0af 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/CountPerSecond.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/extensions/CountPerSecond.java @@ -25,7 +25,6 @@ import com.swirlds.common.metrics.Metric; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.time.IntegerEpochTime; -import com.swirlds.common.time.OSTime; import com.swirlds.common.utility.Units; import java.util.Objects; @@ -43,13 +42,12 @@ public class CountPerSecond { private final IntegerPairAccumulator accumulator; /** - * The default constructor, uses the {@link OSTime} + * The default constructor, uses the {@link Time#getCurrent()} * - * @param config - * the configuration for this metric + * @param config the configuration for this metric */ public CountPerSecond(final Metrics metrics, final CountPerSecond.Config config) { - this(metrics, config, new IntegerEpochTime(OSTime.getInstance())); + this(metrics, config, new IntegerEpochTime(Time.getCurrent())); } /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultRunningAverageMetric.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultRunningAverageMetric.java index 9c515dd16c0b..e2a5dfd21b5d 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultRunningAverageMetric.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultRunningAverageMetric.java @@ -20,7 +20,6 @@ import com.swirlds.common.metrics.RunningAverageMetric; import com.swirlds.common.statistics.StatsBuffered; import com.swirlds.common.statistics.StatsRunningAverage; -import com.swirlds.common.time.OSTime; import org.apache.commons.lang3.builder.ToStringBuilder; /** @@ -33,7 +32,7 @@ public class DefaultRunningAverageMetric extends AbstractDistributionMetric impl private final StatsRunningAverage runningAverage; public DefaultRunningAverageMetric(final RunningAverageMetric.Config config) { - this(config, OSTime.getInstance()); + this(config, Time.getCurrent()); } /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultSpeedometerMetric.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultSpeedometerMetric.java index 2635ebb4eb1f..68fcc0144d87 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultSpeedometerMetric.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/DefaultSpeedometerMetric.java @@ -22,7 +22,6 @@ import com.swirlds.common.metrics.SpeedometerMetric; import com.swirlds.common.statistics.StatsBuffered; import com.swirlds.common.statistics.StatsSpeedometer; -import com.swirlds.common.time.OSTime; import org.apache.commons.lang3.builder.ToStringBuilder; /** @@ -34,7 +33,7 @@ public class DefaultSpeedometerMetric extends AbstractDistributionMetric impleme private final StatsSpeedometer speedometer; public DefaultSpeedometerMetric(final SpeedometerMetric.Config config) { - this(config, OSTime.getInstance()); + this(config, Time.getCurrent()); } /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/SnapshotService.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/SnapshotService.java index 5a1b9212b2a3..411388105e7a 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/SnapshotService.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/metrics/platform/SnapshotService.java @@ -23,7 +23,6 @@ import com.swirlds.base.time.Time; import com.swirlds.common.metrics.Metric; import com.swirlds.common.metrics.config.MetricsConfig; -import com.swirlds.common.time.OSTime; import java.time.Duration; import java.util.List; import java.util.Map; @@ -83,7 +82,7 @@ public class SnapshotService implements Startable { */ public SnapshotService( final DefaultMetrics globalMetrics, final ScheduledExecutorService executor, final Duration interval) { - this(globalMetrics, executor, interval, OSTime.getInstance()); + this(globalMetrics, executor, interval, Time.getCurrent()); } // This method is just for testing and will be removed from the public API at some point. diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsRunningAverage.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsRunningAverage.java index bd82eb2f6044..1b001b9e5d1d 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsRunningAverage.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsRunningAverage.java @@ -18,7 +18,6 @@ import com.swirlds.base.time.Time; import com.swirlds.common.statistics.internal.StatsBuffer; -import com.swirlds.common.time.OSTime; import com.swirlds.logging.LogMarker; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -104,7 +103,7 @@ public StatsRunningAverage() { */ @SuppressWarnings("removal") public StatsRunningAverage(final double halfLife) { - this(halfLife, OSTime.getInstance()); + this(halfLife, Time.getCurrent()); } /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsSpeedometer.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsSpeedometer.java index a357eb85001f..803d3d8d9a61 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsSpeedometer.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/StatsSpeedometer.java @@ -20,7 +20,6 @@ import com.swirlds.base.time.Time; import com.swirlds.common.statistics.internal.StatsBuffer; -import com.swirlds.common.time.OSTime; /** * This class measures how many times per second the cycle() method is called. It is recalculated every @@ -109,7 +108,7 @@ public StatsSpeedometer(final double halfLife) { */ @SuppressWarnings("removal") public StatsSpeedometer(final double halfLife, final boolean saveHistory) { - this(halfLife, saveHistory, OSTime.getInstance()); + this(halfLife, saveHistory, Time.getCurrent()); } /** diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/internal/StatsBuffer.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/internal/StatsBuffer.java index bf8d39c75a24..2d6336658a3b 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/internal/StatsBuffer.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/statistics/internal/StatsBuffer.java @@ -19,7 +19,6 @@ import static com.swirlds.common.utility.Units.NANOSECONDS_TO_SECONDS; import com.swirlds.base.time.Time; -import com.swirlds.common.time.OSTime; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.IntBinaryOperator; import java.util.function.IntUnaryOperator; @@ -148,7 +147,7 @@ public class StatsBuffer { * time it's called */ public StatsBuffer(final int maxBins, final double recentSeconds, final double startDelay) { - this(maxBins, recentSeconds, startDelay, OSTime.getInstance()); + this(maxBins, recentSeconds, startDelay, Time.getCurrent()); } public StatsBuffer(final int maxBins, final double recentSeconds, final double startDelay, final Time time) { diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/config/QueueThreadMetricsConfiguration.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/config/QueueThreadMetricsConfiguration.java index f83873cebe21..368705438d2e 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/config/QueueThreadMetricsConfiguration.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/framework/config/QueueThreadMetricsConfiguration.java @@ -18,7 +18,6 @@ import com.swirlds.base.time.Time; import com.swirlds.common.metrics.Metrics; -import com.swirlds.common.time.OSTime; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Objects; @@ -31,8 +30,8 @@ public class QueueThreadMetricsConfiguration { /** The category to use for metrics */ private String category = Metrics.INTERNAL_CATEGORY; /** The time object to use for metrics */ - private Time time = OSTime.getInstance(); - /** If enabled, the max size metric will be applied to the queue.*/ + private Time time = Time.getCurrent(); + /** If enabled, the max size metric will be applied to the queue. */ private boolean maxSizeMetricEnabled; /** If enabled, the min size metric will be applied to the queue.*/ private boolean minSizeMetricEnabled; diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java index c344c81cc38d..25df5296b3d3 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java @@ -64,7 +64,6 @@ import com.swirlds.common.threading.framework.config.QueueThreadConfiguration; import com.swirlds.common.threading.framework.config.QueueThreadMetricsConfiguration; import com.swirlds.common.threading.manager.ThreadManager; -import com.swirlds.common.time.OSTime; import com.swirlds.common.utility.AutoCloseableWrapper; import com.swirlds.common.utility.Clearable; import com.swirlds.common.utility.LoggingClearables; @@ -316,7 +315,7 @@ public class SwirldsPlatform implements Platform, Startable { final boolean softwareUpgrade) { this.platformContext = Objects.requireNonNull(platformContext, "platformContext"); - final Time time = OSTime.getInstance(); + final Time time = Time.getCurrent(); final DispatchBuilder dispatchBuilder = new DispatchBuilder(platformContext.getConfiguration().getConfigData(DispatchConfiguration.class)); @@ -996,7 +995,7 @@ private EventLinker buildEventLinker(@NonNull final List core = new ChatterCore<>( - OSTime.getInstance(), + Time.getCurrent(), ChatterEvent.class, e -> {}, chatterConfig, diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/TestIntake.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/TestIntake.java index b672f8fb3418..652204f81f13 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/TestIntake.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/main/java/com/swirlds/platform/test/consensus/TestIntake.java @@ -23,7 +23,6 @@ import com.swirlds.common.config.singleton.ConfigurationHolder; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.address.AddressBook; -import com.swirlds.common.time.OSTime; import com.swirlds.platform.Consensus; import com.swirlds.platform.ConsensusImpl; import com.swirlds.platform.components.EventIntake; @@ -70,7 +69,7 @@ public TestIntake(final AddressBook ab) { } public TestIntake(final AddressBook ab, final BiConsumer minGenConsumer) { - this(ab, minGenConsumer, OSTime.getInstance()); + this(ab, minGenConsumer, Time.getCurrent()); } public TestIntake(final AddressBook ab, final Time time) { @@ -82,7 +81,7 @@ public TestIntake(final AddressBook ab, final BiConsumer minGenConsu } public TestIntake(final AddressBook ab, final ConsensusConfig consensusConfig) { - this(ab, NOOP_MINGEN, OSTime.getInstance(), consensusConfig); + this(ab, NOOP_MINGEN, Time.getCurrent(), consensusConfig); } /** diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/ChatterCoreTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/ChatterCoreTests.java index 6d1febfdddcc..f5cd8e9e6334 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/ChatterCoreTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/chatter/ChatterCoreTests.java @@ -21,10 +21,10 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import com.swirlds.base.time.Time; import com.swirlds.common.metrics.noop.NoOpMetrics; import com.swirlds.common.system.NodeId; import com.swirlds.common.test.RandomUtils; -import com.swirlds.common.time.OSTime; import com.swirlds.config.api.Configuration; import com.swirlds.platform.event.GossipEvent; import com.swirlds.platform.gossip.chatter.config.ChatterConfig; @@ -55,7 +55,7 @@ void loadFromSignedStateTest() { final Random random = RandomUtils.getRandomPrintSeed(); final ChatterCore chatterCore = new ChatterCore<>( - OSTime.getInstance(), GossipEvent.class, (m) -> {}, chatterConfig, (id, l) -> {}, new NoOpMetrics()); + Time.getCurrent(), GossipEvent.class, (m) -> {}, chatterConfig, (id, l) -> {}, new NoOpMetrics()); chatterCore.newPeerInstance(new NodeId(0L), e -> {}); chatterCore.newPeerInstance(new NodeId(1L), e -> {}); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskDispatcherTest.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskDispatcherTest.java index a601cd5a80ab..d9149e9f8b67 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskDispatcherTest.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/components/EventTaskDispatcherTest.java @@ -20,10 +20,10 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import com.swirlds.base.time.Time; import com.swirlds.common.system.NodeId; import com.swirlds.common.system.events.BaseEventHashedData; import com.swirlds.common.system.events.BaseEventUnhashedData; -import com.swirlds.common.time.OSTime; import com.swirlds.platform.components.EventCreator; import com.swirlds.platform.components.EventTaskDispatcher; import com.swirlds.platform.event.CreateEventTask; @@ -51,7 +51,7 @@ void test() { final Consumer intake = (Consumer) mock(Consumer.class); final EventTaskDispatcher dispatcher = new EventTaskDispatcher( - OSTime.getInstance(), + Time.getCurrent(), validator, creator, intake, diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/AsyncPreconsensusEventWriterTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/AsyncPreconsensusEventWriterTests.java index 846e3ef76448..3003b54f2874 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/AsyncPreconsensusEventWriterTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/AsyncPreconsensusEventWriterTests.java @@ -26,6 +26,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; +import com.swirlds.base.time.Time; import com.swirlds.common.constructable.ConstructableRegistry; import com.swirlds.common.constructable.ConstructableRegistryException; import com.swirlds.common.context.DefaultPlatformContext; @@ -44,7 +45,6 @@ import com.swirlds.common.test.fixtures.FakeTime; import com.swirlds.common.test.fixtures.TestRecycleBin; import com.swirlds.common.test.io.FileManipulation; -import com.swirlds.common.time.OSTime; import com.swirlds.config.api.Configuration; import com.swirlds.platform.event.preconsensus.AsyncPreconsensusEventWriter; import com.swirlds.platform.event.preconsensus.PreconsensusEventFile; @@ -182,7 +182,7 @@ static void verifyStream( } final PreconsensusEventFileManager reader = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); // Verify that the events were written correctly final PreconsensusEventMultiFileIterator eventsIterator = reader.getEventIterator(0, fixDiscontinuities); @@ -277,7 +277,7 @@ void overflowTest(final boolean artificialPauses) throws IOException, Interrupte final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( - buildContext(), OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + buildContext(), Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new AsyncPreconsensusEventWriter( @@ -471,7 +471,7 @@ void restartSimulationTest(final boolean truncateLastFile) throws InterruptedExc final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager fileManager1 = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer1 = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer1 = new AsyncPreconsensusEventWriter( @@ -519,7 +519,7 @@ void restartSimulationTest(final boolean truncateLastFile) throws InterruptedExc } final PreconsensusEventFileManager fileManager2 = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer2 = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer2 = new AsyncPreconsensusEventWriter( platformContext, diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java index f509bf398276..df6760324eab 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java @@ -23,6 +23,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import com.swirlds.base.time.Time; import com.swirlds.common.config.StateConfig; import com.swirlds.common.context.DefaultPlatformContext; import com.swirlds.common.context.PlatformContext; @@ -36,7 +37,6 @@ import com.swirlds.common.system.NodeId; import com.swirlds.common.test.fixtures.FakeTime; import com.swirlds.common.test.fixtures.TestRecycleBin; -import com.swirlds.common.time.OSTime; import com.swirlds.common.utility.CompareTo; import com.swirlds.config.api.Configuration; import com.swirlds.platform.event.preconsensus.PreconsensusEventFile; @@ -135,7 +135,7 @@ void minimumDecreasesTest() throws IOException { assertThrows( IllegalStateException.class, () -> new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0))); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0))); } @Test @@ -153,7 +153,7 @@ void maximumDecreasesTest() throws IOException { assertThrows( IllegalStateException.class, () -> new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0))); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0))); } @Test @@ -171,7 +171,7 @@ void timestampDecreasesTest() throws IOException { assertThrows( IllegalStateException.class, () -> new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0))); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0))); } @Test @@ -213,7 +213,7 @@ void readFilesInOrderTest() throws IOException { final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); assertIteratorEquality( files.iterator(), manager.getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false)); @@ -274,7 +274,7 @@ void readFilesInOrderGapTest(final boolean permitGaps) throws IOException { if (permitGaps) { // Gaps are allowed. We should see all files except for the one that was skipped. final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); assertIteratorEquality( files.iterator(), @@ -284,7 +284,7 @@ void readFilesInOrderGapTest(final boolean permitGaps) throws IOException { assertThrows( IllegalStateException.class, () -> new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0))); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0))); } } @@ -326,7 +326,7 @@ void readFilesFromMiddleTest() throws IOException { final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); // For this test, we want to iterate over files so that we are guaranteed to observe every event // with a generation greater than or equal to the target generation. Choose a generation that falls @@ -405,7 +405,7 @@ void readFilesFromMiddleRepeatingGenerationsTest() throws IOException { final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); // For this test, we want to iterate over files so that we are guaranteed to observe every event // with a generation greater than or equal to the target generation. Choose a generation that falls @@ -477,7 +477,7 @@ void readFilesFromHighGenerationTest() throws IOException { final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); // Request a generation higher than all files in the data store final long targetGeneration = files.get(fileCount - 1).getMaximumGeneration() + 1; @@ -492,7 +492,7 @@ void readFilesFromEmptyStreamTest() throws IOException { final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); final Iterator iterator = manager.getFileIterator(1234, false); assertFalse(iterator.hasNext()); @@ -515,7 +515,7 @@ void generateDescriptorsWithManagerTest() throws IOException { Instant timestamp = Instant.now(); final PreconsensusEventFileManager generatingManager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); for (int i = 0; i < fileCount; i++) { final PreconsensusEventFile file = @@ -534,7 +534,7 @@ void generateDescriptorsWithManagerTest() throws IOException { } final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); assertIteratorEquality( files.iterator(), manager.getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false)); @@ -606,7 +606,7 @@ void incrementalPruningByGenerationTest() throws IOException { // removing the in-memory descriptor without also removing the file on disk final List parsedFiles = new ArrayList<>(); new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)) + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)) .getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false) .forEachRemaining(parsedFiles::add); @@ -647,7 +647,7 @@ void incrementalPruningByGenerationTest() throws IOException { // removing the in-memory descriptor without also removing the file on disk final List parsedFiles = new ArrayList<>(); new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)) + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)) .getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false) .forEachRemaining(parsedFiles::add); @@ -729,7 +729,7 @@ void incrementalPruningByTimestampTest() throws IOException { // removing the in-memory descriptor without also removing the file on disk final List parsedFiles = new ArrayList<>(); new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)) + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)) .getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false) .forEachRemaining(parsedFiles::add); @@ -775,7 +775,7 @@ void incrementalPruningByTimestampTest() throws IOException { // removing the in-memory descriptor without also removing the file on disk final List parsedFiles = new ArrayList<>(); new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)) + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)) .getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false) .forEachRemaining(parsedFiles::add); @@ -875,7 +875,7 @@ void startAtFirstFileDiscontinuityInMiddleTest(final boolean startAtSpecificGene final RecycleBin recycleBin = RecycleBin.create(platformContext.getConfiguration(), new NodeId(0)); final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), recycleBin, new NodeId(0)); + new PreconsensusEventFileManager(platformContext, Time.getCurrent(), recycleBin, new NodeId(0)); // Don't try to fix discontinuities, we should see all files assertIteratorEquality( @@ -954,7 +954,7 @@ void startAtFirstFileDiscontinuityInFirstFileTest(final boolean startAtSpecificG final RecycleBin recycleBin = RecycleBin.create(platformContext.getConfiguration(), new NodeId(0)); final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), recycleBin, new NodeId(0)); + new PreconsensusEventFileManager(platformContext, Time.getCurrent(), recycleBin, new NodeId(0)); // Don't try to fix discontinuities, we should see all files assertIteratorEquality( @@ -1049,7 +1049,7 @@ void startAtMiddleFileDiscontinuityInMiddleTest() throws IOException { final RecycleBin recycleBin = RecycleBin.create(platformContext.getConfiguration(), new NodeId(0)); final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), recycleBin, new NodeId(0)); + new PreconsensusEventFileManager(platformContext, Time.getCurrent(), recycleBin, new NodeId(0)); // Don't try to fix discontinuities, we should see all files starting with the one we request assertIteratorEquality(files.iterator(), manager.getFileIterator(startGeneration, false)); @@ -1124,7 +1124,7 @@ void startAtDiscontinuityInMiddleTest() throws IOException { final RecycleBin recycleBin = RecycleBin.create(platformContext.getConfiguration(), new NodeId(0)); final PreconsensusEventFileManager manager = - new PreconsensusEventFileManager(platformContext, OSTime.getInstance(), recycleBin, new NodeId(0)); + new PreconsensusEventFileManager(platformContext, Time.getCurrent(), recycleBin, new NodeId(0)); // Don't try to fix discontinuities, we should see all files starting with the one we request assertIteratorEquality(postDiscontinuityFiles.iterator(), manager.getFileIterator(startGeneration, false)); @@ -1192,7 +1192,7 @@ void startAfterDiscontinuityInMiddleTest() throws IOException { final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); // Iterate without fixing discontinuities. assertIteratorEquality(files.iterator(), manager.getFileIterator(startGeneration, false)); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventReplayWorkflowTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventReplayWorkflowTests.java index c5d65bab35a8..2b310d63247d 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventReplayWorkflowTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventReplayWorkflowTests.java @@ -31,10 +31,10 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import com.swirlds.base.time.Time; import com.swirlds.common.system.platformstatus.PlatformStatus; import com.swirlds.common.threading.framework.QueueThread; import com.swirlds.common.threading.manager.AdHocThreadManager; -import com.swirlds.common.time.OSTime; import com.swirlds.platform.components.EventTaskDispatcher; import com.swirlds.platform.components.state.StateManagementComponent; import com.swirlds.platform.event.EventIntakeTask; @@ -191,7 +191,7 @@ void testBasicReplayWorkflow() throws InterruptedException { replayPreconsensusEvents( TestPlatformContextBuilder.create().build(), AdHocThreadManager.getStaticThreadManager(), - OSTime.getInstance(), + Time.getCurrent(), preconsensusEventFileManager, preconsensusEventWriter, eventTaskDispatcher, diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/SyncPreconsensusEventWriterTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/SyncPreconsensusEventWriterTests.java index a6165507823c..75002337a1d3 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/SyncPreconsensusEventWriterTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/SyncPreconsensusEventWriterTests.java @@ -22,6 +22,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; +import com.swirlds.base.time.Time; import com.swirlds.common.constructable.ConstructableRegistry; import com.swirlds.common.constructable.ConstructableRegistryException; import com.swirlds.common.context.DefaultPlatformContext; @@ -34,7 +35,6 @@ import com.swirlds.common.system.NodeId; import com.swirlds.common.test.RandomUtils; import com.swirlds.common.test.fixtures.TestRecycleBin; -import com.swirlds.common.time.OSTime; import com.swirlds.config.api.Configuration; import com.swirlds.platform.event.preconsensus.PreconsensusEventFile; import com.swirlds.platform.event.preconsensus.PreconsensusEventFileManager; @@ -119,7 +119,7 @@ void standardOperationTest() throws IOException, InterruptedException { final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new SyncPreconsensusEventWriter(platformContext, fileManager); @@ -175,7 +175,7 @@ void stopFlushesEventsTest() throws IOException, InterruptedException { final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new SyncPreconsensusEventWriter(platformContext, fileManager); @@ -231,7 +231,7 @@ void ancientEventTest() throws IOException, InterruptedException { final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new SyncPreconsensusEventWriter(platformContext, fileManager); @@ -304,7 +304,7 @@ void overflowTest() throws IOException, InterruptedException { final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new SyncPreconsensusEventWriter(platformContext, fileManager); @@ -347,7 +347,7 @@ void beginStreamingEventsTest() throws IOException, InterruptedException { final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new SyncPreconsensusEventWriter(platformContext, fileManager); @@ -405,7 +405,7 @@ void discontinuityTest() throws IOException, InterruptedException { final PlatformContext platformContext = buildContext(); final PreconsensusEventFileManager fileManager = new PreconsensusEventFileManager( - platformContext, OSTime.getInstance(), TestRecycleBin.getInstance(), new NodeId(0)); + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); final PreconsensusEventStreamSequencer sequencer = new PreconsensusEventStreamSequencer(); final PreconsensusEventWriter writer = new SyncPreconsensusEventWriter(platformContext, fileManager); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/ConsensusHashManagerTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/ConsensusHashManagerTests.java index 4d7283105aaa..f68d9e12a4c3 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/ConsensusHashManagerTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/ConsensusHashManagerTests.java @@ -31,6 +31,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; +import com.swirlds.base.time.Time; import com.swirlds.common.config.ConsensusConfig; import com.swirlds.common.config.StateConfig; import com.swirlds.common.crypto.Hash; @@ -38,7 +39,6 @@ import com.swirlds.common.system.address.Address; import com.swirlds.common.system.address.AddressBook; import com.swirlds.common.test.RandomAddressBookGenerator; -import com.swirlds.common.time.OSTime; import com.swirlds.platform.dispatch.DispatchBuilder; import com.swirlds.platform.dispatch.triggers.error.CatastrophicIssTrigger; import com.swirlds.platform.dispatch.triggers.error.SelfIssTrigger; @@ -83,8 +83,8 @@ void validSignaturesAfterHashTest() { .build(); final DispatchBuilder dispatchBuilder = new DispatchBuilder(getDefaultDispatchConfiguration()); - final ConsensusHashManager manager = new ConsensusHashManager( - OSTime.getInstance(), dispatchBuilder, addressBook, consensusConfig, stateConfig); + final ConsensusHashManager manager = + new ConsensusHashManager(Time.getCurrent(), dispatchBuilder, addressBook, consensusConfig, stateConfig); final AtomicBoolean fail = new AtomicBoolean(false); dispatchBuilder.registerObserver(this, SelfIssTrigger.class, (a, b, c) -> fail.set(true)); @@ -182,8 +182,8 @@ void mixedOrderTest() { } final DispatchBuilder dispatchBuilder = new DispatchBuilder(getDefaultDispatchConfiguration()); - final ConsensusHashManager manager = new ConsensusHashManager( - OSTime.getInstance(), dispatchBuilder, addressBook, consensusConfig, stateConfig); + final ConsensusHashManager manager = + new ConsensusHashManager(Time.getCurrent(), dispatchBuilder, addressBook, consensusConfig, stateConfig); final AtomicBoolean fail = new AtomicBoolean(false); final AtomicInteger issCount = new AtomicInteger(0); @@ -322,8 +322,8 @@ void earlyAddTest() { final NodeId selfId = addressBook.getNodeId(0); final DispatchBuilder dispatchBuilder = new DispatchBuilder(getDefaultDispatchConfiguration()); - final ConsensusHashManager manager = new ConsensusHashManager( - OSTime.getInstance(), dispatchBuilder, addressBook, consensusConfig, stateConfig); + final ConsensusHashManager manager = + new ConsensusHashManager(Time.getCurrent(), dispatchBuilder, addressBook, consensusConfig, stateConfig); dispatchBuilder.registerObserver( this, CatastrophicIssTrigger.class, (a, b) -> fail("did not expect catastrophic ISS")); @@ -386,8 +386,8 @@ void lateAddTest() { final NodeId selfId = addressBook.getNodeId(0); final DispatchBuilder dispatchBuilder = new DispatchBuilder(getDefaultDispatchConfiguration()); - final ConsensusHashManager manager = new ConsensusHashManager( - OSTime.getInstance(), dispatchBuilder, addressBook, consensusConfig, stateConfig); + final ConsensusHashManager manager = + new ConsensusHashManager(Time.getCurrent(), dispatchBuilder, addressBook, consensusConfig, stateConfig); dispatchBuilder.registerObserver( this, CatastrophicIssTrigger.class, (a, b) -> fail("did not expect catastrophic ISS")); @@ -436,8 +436,8 @@ void shiftBeforeCompleteTest() { final NodeId selfId = addressBook.getNodeId(0); final DispatchBuilder dispatchBuilder = new DispatchBuilder(getDefaultDispatchConfiguration()); - final ConsensusHashManager manager = new ConsensusHashManager( - OSTime.getInstance(), dispatchBuilder, addressBook, consensusConfig, stateConfig); + final ConsensusHashManager manager = + new ConsensusHashManager(Time.getCurrent(), dispatchBuilder, addressBook, consensusConfig, stateConfig); final AtomicInteger issCount = new AtomicInteger(); dispatchBuilder.registerObserver(this, CatastrophicIssTrigger.class, (a, b) -> issCount.getAndIncrement()); @@ -526,8 +526,8 @@ void catastrophicShiftBeforeCompleteTest() { final NodeId selfId = addressBook.getNodeId(0); final DispatchBuilder dispatchBuilder = new DispatchBuilder(getDefaultDispatchConfiguration()); - final ConsensusHashManager manager = new ConsensusHashManager( - OSTime.getInstance(), dispatchBuilder, addressBook, consensusConfig, stateConfig); + final ConsensusHashManager manager = + new ConsensusHashManager(Time.getCurrent(), dispatchBuilder, addressBook, consensusConfig, stateConfig); final AtomicInteger issCount = new AtomicInteger(); dispatchBuilder.registerObserver(this, CatastrophicIssTrigger.class, (a, b) -> issCount.getAndIncrement()); @@ -589,8 +589,8 @@ void bigShiftTest() { final NodeId selfId = addressBook.getNodeId(0); final DispatchBuilder dispatchBuilder = new DispatchBuilder(getDefaultDispatchConfiguration()); - final ConsensusHashManager manager = new ConsensusHashManager( - OSTime.getInstance(), dispatchBuilder, addressBook, consensusConfig, stateConfig); + final ConsensusHashManager manager = + new ConsensusHashManager(Time.getCurrent(), dispatchBuilder, addressBook, consensusConfig, stateConfig); final AtomicInteger issCount = new AtomicInteger(); dispatchBuilder.registerObserver(this, CatastrophicIssTrigger.class, (a, b) -> issCount.getAndIncrement()); diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/IssHandlerTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/IssHandlerTests.java index 665449a14fc7..af761e8ab977 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/IssHandlerTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/state/IssHandlerTests.java @@ -23,10 +23,10 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; +import com.swirlds.base.time.Time; import com.swirlds.common.config.StateConfig; import com.swirlds.common.crypto.Hash; import com.swirlds.common.system.NodeId; -import com.swirlds.common.time.OSTime; import com.swirlds.config.api.Configuration; import com.swirlds.platform.components.common.output.FatalErrorConsumer; import com.swirlds.platform.components.state.output.IssConsumer; @@ -68,7 +68,7 @@ void hashDisagreementFromSelf() { final FatalErrorConsumer fatalErrorConsumer = (msg, t, code) -> shutdownCount.getAndIncrement(); final IssHandler handler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(selfId), @@ -115,7 +115,7 @@ void hashDisagreementAlwaysFreeze() { final FatalErrorConsumer fatalErrorConsumer = (msg, t, code) -> shutdownCount.getAndIncrement(); final IssHandler handler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(selfId), @@ -168,7 +168,7 @@ void hashDisagreementAlwaysDump() { final FatalErrorConsumer fatalErrorConsumer = (msg, t, code) -> shutdownCount.getAndIncrement(); final IssHandler handler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(selfId), @@ -217,7 +217,7 @@ void hashDisagreementNoAction() { final FatalErrorConsumer fatalErrorConsumer = (msg, t, code) -> shutdownCount.getAndIncrement(); final IssHandler handler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(selfId), @@ -263,7 +263,7 @@ void hashDisagreementFreezeAndDump() { final FatalErrorConsumer fatalErrorConsumer = (msg, t, code) -> shutdownCount.getAndIncrement(); final IssHandler handler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(selfId), @@ -317,7 +317,7 @@ void selfIssAutomatedRecovery() { final FatalErrorConsumer fatalErrorConsumer = (msg, t, code) -> shutdownCount.getAndIncrement(); final IssHandler handler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(selfId), @@ -360,7 +360,7 @@ void selfIssNoAction() { final FatalErrorConsumer fatalErrorConsumer = (msg, t, code) -> shutdownCount.getAndIncrement(); final IssHandler handler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(selfId), @@ -407,7 +407,7 @@ void selfIssAlwaysFreeze() { final FatalErrorConsumer fatalErrorConsumer = (msg, t, code) -> shutdownCount.getAndIncrement(); final IssHandler handler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(selfId), @@ -461,7 +461,7 @@ void selfIssAlwaysDump() { final FatalErrorConsumer fatalErrorConsumer = (msg, t, code) -> shutdownCount.getAndIncrement(); final IssHandler handler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(selfId), @@ -511,7 +511,7 @@ void catastrophicIssNoAction() { final FatalErrorConsumer fatalErrorConsumer = (msg, t, code) -> shutdownCount.getAndIncrement(); final IssHandler handler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(selfId), @@ -558,7 +558,7 @@ void catastrophicIssAlwaysFreeze() { final FatalErrorConsumer fatalErrorConsumer = (msg, t, code) -> shutdownCount.getAndIncrement(); final IssHandler handler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(selfId), @@ -612,7 +612,7 @@ void catastrophicIssFreezeOnCatastrophic() { final FatalErrorConsumer fatalErrorConsumer = (msg, t, code) -> shutdownCount.getAndIncrement(); final IssHandler handler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(selfId), @@ -666,7 +666,7 @@ void catastrophicIssAlwaysDump() { final FatalErrorConsumer fatalErrorConsumer = (msg, t, code) -> shutdownCount.getAndIncrement(); final IssHandler handler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(selfId), @@ -708,7 +708,7 @@ void issConsumerTest() { final Configuration configuration = new TestConfigBuilder().getOrCreateConfig(); final StateConfig stateConfig = configuration.getConfigData(StateConfig.class); final IssHandler issHandler = new IssHandler( - OSTime.getInstance(), + Time.getCurrent(), dispatchBuilder, stateConfig, new NodeId(0L), From 3d05146e44145c71f7535aeb8c7aaa4c01a0c83f Mon Sep 17 00:00:00 2001 From: Lev Povolotsky <16233475+povolev15@users.noreply.github.com> Date: Thu, 15 Jun 2023 16:38:03 -0400 Subject: [PATCH 44/70] add bi-direction file translator (for migration) (#7115) Signed-off-by: Lev Povolotsky --- .../codec/FileServiceStateTranslator.java | 113 +++++++++ .../src/main/java/module-info.java | 1 + ...HandlerTestBase.java => FileTestBase.java} | 41 +++- .../impl/test/ReadableFileStoreImplTest.java | 3 +- .../impl/test/WritableFileStoreImplTest.java | 3 +- .../codec/FileServiceStateTranslatorTest.java | 224 ++++++++++++++++++ ...ndHandlerTest.java => FileAppendTest.java} | 3 +- ...teHandlerTest.java => FileCreateTest.java} | 3 +- ...teHandlerTest.java => FileDeleteTest.java} | 3 +- ...dlerTest.java => FileGetContentsTest.java} | 3 +- ...oHandlerTest.java => FileGetInfoTest.java} | 3 +- ...lerTest.java => FileSystemDeleteTest.java} | 3 +- ...rTest.java => FileSystemUndeleteTest.java} | 3 +- ...teHandlerTest.java => FileUpdateTest.java} | 3 +- .../src/main/java/module-info.java | 6 +- 15 files changed, 399 insertions(+), 16 deletions(-) create mode 100644 hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/codec/FileServiceStateTranslator.java rename hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/{handlers/FileHandlerTestBase.java => FileTestBase.java} (81%) create mode 100644 hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/codec/FileServiceStateTranslatorTest.java rename hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/{FileAppendHandlerTest.java => FileAppendTest.java} (98%) rename hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/{FileCreateHandlerTest.java => FileCreateTest.java} (99%) rename hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/{FileDeleteHandlerTest.java => FileDeleteTest.java} (98%) rename hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/{FileGetContentsHandlerTest.java => FileGetContentsTest.java} (98%) rename hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/{FileGetInfoHandlerTest.java => FileGetInfoTest.java} (98%) rename hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/{FileSystemDeleteHandlerTest.java => FileSystemDeleteTest.java} (98%) rename hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/{FileSystemUndeleteHandlerTest.java => FileSystemUndeleteTest.java} (98%) rename hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/{FileUpdateHandlerTest.java => FileUpdateTest.java} (99%) diff --git a/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/codec/FileServiceStateTranslator.java b/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/codec/FileServiceStateTranslator.java new file mode 100644 index 000000000000..e39ccf60cca8 --- /dev/null +++ b/hedera-node/hedera-file-service-impl/src/main/java/com/hedera/node/app/service/file/impl/codec/FileServiceStateTranslator.java @@ -0,0 +1,113 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.file.impl.codec; + +import static java.util.Objects.requireNonNull; + +import com.hedera.hapi.node.base.FileID; +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.state.file.File; +import com.hedera.node.app.service.file.impl.ReadableFileStoreImpl; +import com.hedera.node.app.service.mono.files.HFileMeta; +import com.hedera.node.app.service.mono.pbj.PbjConverter; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import org.apache.commons.codec.DecoderException; + +/** + * The class is used to convert a {@link com.hedera.node.app.service.mono.files.HFileMeta} content and metadata to a {@link File} and vice versa during the migration process + */ +public class FileServiceStateTranslator { + + /** + * The method converts a {@link com.hedera.node.app.service.mono.files.HederaFs} content and metadata to a {@link File} + * @param fileID old protobuf fileID that will be used to search in old state + * @param hederaFs old state + * @return new protobuf File + */ + @NonNull + public static File stateToPbj( + @NonNull com.hederahashgraph.api.proto.java.FileID fileID, + @NonNull com.hedera.node.app.service.mono.files.HederaFs hederaFs) { + requireNonNull(fileID); + return stateToPbj(hederaFs.cat(fileID), hederaFs.getattr(fileID), fileID); + } + + /** + * The method converts a {@link com.hedera.node.app.service.mono.files.HFileMeta} content and metadata to a {@link File} + * @param data content of the file in old state + * @param metadata metadata of the file in old state + * @param fileID old protobuf fileID that will be used in order to initialize the new protobuf file + * @return new protobuf File + */ + @NonNull + public static File stateToPbj( + @Nullable final byte[] data, + @NonNull final com.hedera.node.app.service.mono.files.HFileMeta metadata, + @NonNull final com.hederahashgraph.api.proto.java.FileID fileID) { + requireNonNull(metadata); + requireNonNull(fileID); + final var fileBuilder = new File.Builder(); + fileBuilder.fileNumber(fileID.getFileNum()); + fileBuilder.expirationTime(metadata.getExpiry()); + if (metadata.getWacl() != null) + fileBuilder.keys(PbjConverter.asPbjKey(metadata.getWacl()).keyList()); + if (data != null) fileBuilder.contents(Bytes.wrap(data)); + fileBuilder.memo(metadata.getMemo()); + fileBuilder.deleted(metadata.isDeleted()); + + return fileBuilder.build(); + } + + /** + * The method converts a {@link File} to a {@link com.hedera.node.app.service.mono.files.HFileMeta} content and metadata + * @param fileID new protobuf fileID that will be used to search in file store + * @param readableFileStore file store that will be used to search for the file + * @return File and metadata pair object + */ + @NonNull + public static FileMetadataAndContent pbjToState( + @NonNull FileID fileID, @NonNull ReadableFileStoreImpl readableFileStore) throws DecoderException { + requireNonNull(fileID); + requireNonNull(readableFileStore); + final var optionalFile = readableFileStore.getFileLeaf(fileID); + return pbjToState(optionalFile.orElseThrow(() -> new IllegalArgumentException("File not found"))); + } + + /** + * The method converts a {@link File} to a {@link FileMetadataAndContent} content and metadata + * @param file new protobuf file that will be used + * @return File and metadata pair object + */ + @NonNull + public static FileMetadataAndContent pbjToState(@NonNull File file) throws DecoderException { + requireNonNull(file); + var keys = (file.hasKeys()) + ? com.hedera.node.app.service.mono.legacy.core.jproto.JKey.convertKey( + Key.newBuilder().keyList(file.keys()).build(), 1) + : null; + com.hedera.node.app.service.mono.files.HFileMeta hFileMeta = + new HFileMeta(file.deleted(), keys, file.expirationTime(), file.memo()); + final byte[] data = (file.contents() == null) ? null : file.contents().toByteArray(); + return new FileMetadataAndContent(data, hFileMeta); + } + + @SuppressWarnings("java:S6218") + public record FileMetadataAndContent( + @Nullable byte[] data, @NonNull com.hedera.node.app.service.mono.files.HFileMeta metadata) {} +} diff --git a/hedera-node/hedera-file-service-impl/src/main/java/module-info.java b/hedera-node/hedera-file-service-impl/src/main/java/module-info.java index 46ab1c3bfb77..28898406effc 100644 --- a/hedera-node/hedera-file-service-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-file-service-impl/src/main/java/module-info.java @@ -13,6 +13,7 @@ requires com.swirlds.config; requires org.apache.commons.lang3; requires org.apache.logging.log4j; + requires org.apache.commons.codec; requires static com.github.spotbugs.annotations; provides com.hedera.node.app.service.file.FileService with diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileHandlerTestBase.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/FileTestBase.java similarity index 81% rename from hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileHandlerTestBase.java rename to hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/FileTestBase.java index b4ba4a3bf8d9..0215ecc1115f 100644 --- a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileHandlerTestBase.java +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/FileTestBase.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.hedera.node.app.service.file.impl.test.handlers; +package com.hedera.node.app.service.file.impl.test; import static com.hedera.node.app.service.mono.pbj.PbjConverter.protoToPbj; import static com.hedera.test.utils.IdUtils.asAccount; @@ -34,6 +34,8 @@ import com.hedera.hapi.node.state.file.File; import com.hedera.node.app.service.file.impl.ReadableFileStoreImpl; import com.hedera.node.app.service.file.impl.WritableFileStoreImpl; +import com.hedera.node.app.service.mono.legacy.core.jproto.JEd25519Key; +import com.hedera.node.app.service.mono.legacy.core.jproto.JKeyList; import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; import com.hedera.node.app.spi.fixtures.state.MapWritableKVState; import com.hedera.node.app.spi.state.ReadableStates; @@ -41,13 +43,14 @@ import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -public class FileHandlerTestBase { +public class FileTestBase { protected static final String FILES = "FILES"; protected final Key key = A_COMPLEX_KEY; protected final Key anotherKey = B_COMPLEX_KEY; @@ -58,10 +61,19 @@ public class FileHandlerTestBase { protected final Bytes contentsBytes = Bytes.wrap(contents); protected final KeyList keys = A_KEY_LIST.keyList(); + protected final JKeyList jKeyList = new JKeyList(List.of( + new JEd25519Key("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".getBytes()), + new JEd25519Key("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb".getBytes()), + new JEd25519Key("cccccccccccccccccccccccccccccccc".getBytes()))); + protected final KeyList anotherKeys = B_KEY_LIST.keyList(); protected final FileID fileId = FileID.newBuilder().fileNum(1_234L).build(); protected final FileID fileSystemfileId = FileID.newBuilder().fileNum(250L).build(); + protected final com.hederahashgraph.api.proto.java.FileID monoFileID = + com.hederahashgraph.api.proto.java.FileID.newBuilder() + .setFileNum(1_234L) + .build(); protected final Duration WELL_KNOWN_AUTO_RENEW_PERIOD = Duration.newBuilder().seconds(100).build(); protected final Timestamp WELL_KNOWN_EXPIRY = @@ -84,6 +96,10 @@ public class FileHandlerTestBase { protected File fileSystem; + protected File fileWithNoKeysAndMemo; + + protected File fileWithNoContent; + @Mock protected ReadableStates readableStates; @@ -160,6 +176,8 @@ protected void givenValidFile(boolean deleted) { protected void givenValidFile(boolean deleted, boolean withKeys) { file = new File(fileId.fileNum(), expirationTime, withKeys ? keys : null, Bytes.wrap(contents), memo, deleted); + fileWithNoKeysAndMemo = new File(fileId.fileNum(), expirationTime, null, Bytes.wrap(contents), null, deleted); + fileWithNoContent = new File(fileId.fileNum(), expirationTime, withKeys ? keys : null, null, memo, deleted); fileSystem = new File( fileSystemfileId.fileNum(), expirationTime, @@ -179,4 +197,23 @@ protected File createFile() { .deleted(true) .build(); } + + protected File createFileEmptyMemoAndKeys() { + return new File.Builder() + .fileNumber(fileId.fileNum()) + .expirationTime(expirationTime) + .contents(Bytes.wrap(contents)) + .deleted(true) + .build(); + } + + protected File createFileWithoutContent() { + return new File.Builder() + .fileNumber(fileId.fileNum()) + .expirationTime(expirationTime) + .keys(keys) + .memo(memo) + .deleted(true) + .build(); + } } diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/ReadableFileStoreImplTest.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/ReadableFileStoreImplTest.java index 0eab1ab7c967..f7f012b8d732 100644 --- a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/ReadableFileStoreImplTest.java +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/ReadableFileStoreImplTest.java @@ -22,13 +22,12 @@ import com.hedera.hapi.node.state.file.File; import com.hedera.node.app.service.file.impl.ReadableFileStoreImpl; -import com.hedera.node.app.service.file.impl.test.handlers.FileHandlerTestBase; import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; import com.hedera.pbj.runtime.io.buffer.Bytes; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -class ReadableFileStoreImplTest extends FileHandlerTestBase { +class ReadableFileStoreImplTest extends FileTestBase { private ReadableFileStoreImpl subject; @BeforeEach diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/WritableFileStoreImplTest.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/WritableFileStoreImplTest.java index b7283c333852..3ea5db9e27a5 100644 --- a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/WritableFileStoreImplTest.java +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/WritableFileStoreImplTest.java @@ -20,14 +20,13 @@ import com.hedera.hapi.node.state.file.File; import com.hedera.node.app.service.file.impl.WritableFileStoreImpl; -import com.hedera.node.app.service.file.impl.test.handlers.FileHandlerTestBase; import java.util.Optional; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class WritableFileStoreImplTest extends FileHandlerTestBase { +class WritableFileStoreImplTest extends FileTestBase { private File file; @Test diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/codec/FileServiceStateTranslatorTest.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/codec/FileServiceStateTranslatorTest.java new file mode 100644 index 000000000000..a744ead4df61 --- /dev/null +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/codec/FileServiceStateTranslatorTest.java @@ -0,0 +1,224 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.file.impl.test.codec; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.mockito.BDDMockito.given; + +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.state.file.File; +import com.hedera.node.app.service.file.impl.codec.FileServiceStateTranslator; +import com.hedera.node.app.service.file.impl.codec.FileServiceStateTranslator.FileMetadataAndContent; +import com.hedera.node.app.service.file.impl.test.FileTestBase; +import com.hedera.node.app.service.mono.files.HFileMeta; +import com.hedera.node.app.service.mono.utils.MiscUtils; +import org.apache.commons.codec.DecoderException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class FileServiceStateTranslatorTest extends FileTestBase { + + @Mock + private com.hedera.node.app.service.mono.files.HederaFs hederaFs; + + @BeforeEach + void setUp() {} + + @Test + void createFileMetadataAndContentFromFile() throws DecoderException { + final var existingFile = readableStore.getFileMetadata(fileId); + assertFalse(existingFile.deleted()); + + final FileMetadataAndContent convertedFile = FileServiceStateTranslator.pbjToState(file); + + assertArrayEquals( + convertedFile.data(), getExpectedMonoFileMetaAndContent().data()); + assertEquals( + convertedFile.metadata().getExpiry(), + getExpectedMonoFileMetaAndContent().metadata().getExpiry()); + assertEquals( + convertedFile.metadata().getMemo(), + getExpectedMonoFileMetaAndContent().metadata().getMemo()); + assertEquals( + MiscUtils.describe(convertedFile.metadata().getWacl()), + MiscUtils.describe( + getExpectedMonoFileMetaAndContent().metadata().getWacl())); + assertEquals( + convertedFile.metadata().isDeleted(), + getExpectedMonoFileMetaAndContent().metadata().isDeleted()); + } + + @Test + void createFileMetadataAndContentFromFileWithEmptyKeysAndMemo() throws DecoderException { + final var existingFile = readableStore.getFileMetadata(fileId); + assertFalse(existingFile.deleted()); + + final FileMetadataAndContent convertedFile = FileServiceStateTranslator.pbjToState(fileWithNoKeysAndMemo); + + assertArrayEquals( + convertedFile.data(), + getExpectedMonoFileMetaAndContentWithEmptyMemoAndKeys().data()); + assertEquals( + convertedFile.metadata().getExpiry(), + getExpectedMonoFileMetaAndContentWithEmptyMemoAndKeys() + .metadata() + .getExpiry()); + assertEquals( + convertedFile.metadata().getMemo(), + getExpectedMonoFileMetaAndContentWithEmptyMemoAndKeys() + .metadata() + .getMemo()); + assertEquals( + MiscUtils.describe(convertedFile.metadata().getWacl()), + MiscUtils.describe(getExpectedMonoFileMetaAndContentWithEmptyMemoAndKeys() + .metadata() + .getWacl())); + assertEquals( + convertedFile.metadata().isDeleted(), + getExpectedMonoFileMetaAndContentWithEmptyMemoAndKeys() + .metadata() + .isDeleted()); + } + + @Test + void createFileMetadataAndContentFromFileWithEmptyConentForDeletedFile() throws DecoderException { + + final FileMetadataAndContent convertedFile = FileServiceStateTranslator.pbjToState(fileWithNoContent); + + assertArrayEquals( + convertedFile.data(), + getExpectedMonoFileMetaAndContentEmptyContent().data()); + assertEquals( + convertedFile.metadata().getExpiry(), + getExpectedMonoFileMetaAndContentEmptyContent().metadata().getExpiry()); + assertEquals( + convertedFile.metadata().getMemo(), + getExpectedMonoFileMetaAndContentEmptyContent().metadata().getMemo()); + assertEquals( + MiscUtils.describe(convertedFile.metadata().getWacl()), + MiscUtils.describe(getExpectedMonoFileMetaAndContentEmptyContent() + .metadata() + .getWacl())); + assertEquals( + convertedFile.metadata().isDeleted(), + getExpectedMonoFileMetaAndContentEmptyContent().metadata().isDeleted()); + } + + @Test + void createFileMetadataAndContentFromReadableFileStore() throws DecoderException { + final var existingFile = readableStore.getFileMetadata(fileId); + assertFalse(existingFile.deleted()); + + final FileMetadataAndContent convertedFile = FileServiceStateTranslator.pbjToState(fileId, readableStore); + + assertArrayEquals( + convertedFile.data(), getExpectedMonoFileMetaAndContent().data()); + assertEquals( + convertedFile.metadata().getExpiry(), + getExpectedMonoFileMetaAndContent().metadata().getExpiry()); + assertEquals( + convertedFile.metadata().getMemo(), + getExpectedMonoFileMetaAndContent().metadata().getMemo()); + assertEquals( + MiscUtils.describe(convertedFile.metadata().getWacl()), + MiscUtils.describe( + getExpectedMonoFileMetaAndContent().metadata().getWacl())); + assertEquals( + convertedFile.metadata().isDeleted(), + getExpectedMonoFileMetaAndContent().metadata().isDeleted()); + } + + @Test + void createFileFromMetadataContentAndFileId() { + final byte[] data = contents; + final com.hedera.node.app.service.mono.files.HFileMeta metadata = + new HFileMeta(true, jKeyList, expirationTime, memo); + + final com.hederahashgraph.api.proto.java.FileID fileID = monoFileID; + + final File convertedFile = FileServiceStateTranslator.stateToPbj(data, metadata, fileID); + + assertEquals(createFile(), convertedFile); + } + + @Test + void createFileFromMetadataContentAndFileIdWithEmptyMemoAndKeys() { + final byte[] data = contents; + final com.hedera.node.app.service.mono.files.HFileMeta metadata = new HFileMeta(true, null, expirationTime); + + final com.hederahashgraph.api.proto.java.FileID fileID = monoFileID; + + final File convertedFile = FileServiceStateTranslator.stateToPbj(data, metadata, fileID); + + assertEquals(createFileEmptyMemoAndKeys(), convertedFile); + } + + @Test + void createFileFromMetadataContentAndFileIdWithoutContentForDeletedFile() { + final byte[] data = null; + final com.hedera.node.app.service.mono.files.HFileMeta metadata = + new HFileMeta(true, jKeyList, expirationTime, memo); + + final com.hederahashgraph.api.proto.java.FileID fileID = monoFileID; + + final File convertedFile = FileServiceStateTranslator.stateToPbj(data, metadata, fileID); + + assertEquals(createFileWithoutContent(), convertedFile); + } + + @Test + void createFileFromFileIDAndHederaFs() { + + final com.hederahashgraph.api.proto.java.FileID fileID = monoFileID; + final com.hedera.node.app.service.mono.files.HFileMeta metadata = + new HFileMeta(true, jKeyList, expirationTime, memo); + + given(hederaFs.cat(fileID)).willReturn(contents); + given(hederaFs.getattr(fileID)).willReturn(metadata); + final File convertedFile = FileServiceStateTranslator.stateToPbj(fileID, hederaFs); + + assertEquals(createFile(), convertedFile); + } + + private FileMetadataAndContent getExpectedMonoFileMetaAndContent() throws DecoderException { + var keys = com.hedera.node.app.service.mono.legacy.core.jproto.JKey.convertKey( + Key.newBuilder().keyList(file.keys()).build(), 1); + com.hedera.node.app.service.mono.files.HFileMeta hFileMeta = + new HFileMeta(file.deleted(), keys, file.expirationTime(), file.memo()); + return new FileMetadataAndContent(file.contents().toByteArray(), hFileMeta); + } + + private FileMetadataAndContent getExpectedMonoFileMetaAndContentWithEmptyMemoAndKeys() { + com.hedera.node.app.service.mono.files.HFileMeta hFileMeta = + new HFileMeta(file.deleted(), null, file.expirationTime(), null); + return new FileMetadataAndContent(file.contents().toByteArray(), hFileMeta); + } + + private FileMetadataAndContent getExpectedMonoFileMetaAndContentEmptyContent() throws DecoderException { + var keys = com.hedera.node.app.service.mono.legacy.core.jproto.JKey.convertKey( + Key.newBuilder().keyList(file.keys()).build(), 1); + com.hedera.node.app.service.mono.files.HFileMeta hFileMeta = + new HFileMeta(file.deleted(), keys, file.expirationTime(), file.memo()); + return new FileMetadataAndContent(null, hFileMeta); + } +} diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileAppendHandlerTest.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileAppendTest.java similarity index 98% rename from hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileAppendHandlerTest.java rename to hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileAppendTest.java index a12e5b865c98..8d8a2b00d7e8 100644 --- a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileAppendHandlerTest.java +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileAppendTest.java @@ -33,6 +33,7 @@ import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.service.file.impl.WritableFileStoreImpl; import com.hedera.node.app.service.file.impl.handlers.FileAppendHandler; +import com.hedera.node.app.service.file.impl.test.FileTestBase; import com.hedera.node.app.spi.validation.AttributeValidator; import com.hedera.node.app.spi.validation.ExpiryMeta; import com.hedera.node.app.spi.validation.ExpiryValidator; @@ -48,7 +49,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class FileAppendHandlerTest extends FileHandlerTestBase { +class FileAppendTest extends FileTestBase { private static final FileID WELL_KNOWN_FILE_ID = FileID.newBuilder().fileNum(1L).build(); diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileCreateHandlerTest.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileCreateTest.java similarity index 99% rename from hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileCreateHandlerTest.java rename to hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileCreateTest.java index 5f4ccd494287..820205a858d9 100644 --- a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileCreateHandlerTest.java +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileCreateTest.java @@ -47,6 +47,7 @@ import com.hedera.node.app.service.file.impl.WritableFileStoreImpl; import com.hedera.node.app.service.file.impl.handlers.FileCreateHandler; import com.hedera.node.app.service.file.impl.records.CreateFileRecordBuilder; +import com.hedera.node.app.service.file.impl.test.FileTestBase; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.validation.AttributeValidator; @@ -66,7 +67,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class FileCreateHandlerTest extends FileHandlerTestBase { +class FileCreateTest extends FileTestBase { static final AccountID ACCOUNT_ID_3 = AccountID.newBuilder().accountNum(3L).build(); private static final AccountID AUTO_RENEW_ACCOUNT = AccountID.newBuilder().accountNum(4L).build(); diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileDeleteHandlerTest.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileDeleteTest.java similarity index 98% rename from hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileDeleteHandlerTest.java rename to hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileDeleteTest.java index c6a9a425a5d2..8a61d956e85f 100644 --- a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileDeleteHandlerTest.java +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileDeleteTest.java @@ -42,6 +42,7 @@ import com.hedera.node.app.service.file.impl.ReadableFileStoreImpl; import com.hedera.node.app.service.file.impl.WritableFileStoreImpl; import com.hedera.node.app.service.file.impl.handlers.FileDeleteHandler; +import com.hedera.node.app.service.file.impl.test.FileTestBase; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.workflows.HandleContext; @@ -59,7 +60,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class FileDeleteHandlerTest extends FileHandlerTestBase { +class FileDeleteTest extends FileTestBase { @Mock private ReadableAccountStore accountStore; diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetContentsHandlerTest.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetContentsTest.java similarity index 98% rename from hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetContentsHandlerTest.java rename to hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetContentsTest.java index 4a9ab4510409..ec48b6cc6753 100644 --- a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetContentsHandlerTest.java +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetContentsTest.java @@ -42,6 +42,7 @@ import com.hedera.node.app.service.file.ReadableFileStore; import com.hedera.node.app.service.file.impl.ReadableFileStoreImpl; import com.hedera.node.app.service.file.impl.handlers.FileGetContentsHandler; +import com.hedera.node.app.service.file.impl.test.FileTestBase; import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.QueryContext; @@ -53,7 +54,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class FileGetContentsHandlerTest extends FileHandlerTestBase { +class FileGetContentsTest extends FileTestBase { @Mock private QueryContext context; diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetInfoHandlerTest.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetInfoTest.java similarity index 98% rename from hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetInfoHandlerTest.java rename to hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetInfoTest.java index 3644d1a30dff..f66d08bb3bac 100644 --- a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetInfoHandlerTest.java +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileGetInfoTest.java @@ -45,6 +45,7 @@ import com.hedera.node.app.service.file.ReadableFileStore; import com.hedera.node.app.service.file.impl.ReadableFileStoreImpl; import com.hedera.node.app.service.file.impl.handlers.FileGetInfoHandler; +import com.hedera.node.app.service.file.impl.test.FileTestBase; import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.QueryContext; @@ -56,7 +57,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class FileGetInfoHandlerTest extends FileHandlerTestBase { +class FileGetInfoTest extends FileTestBase { @Mock(strictness = LENIENT) private QueryContext context; diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileSystemDeleteHandlerTest.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileSystemDeleteTest.java similarity index 98% rename from hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileSystemDeleteHandlerTest.java rename to hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileSystemDeleteTest.java index 84d732357b5c..ea7f216f102a 100644 --- a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileSystemDeleteHandlerTest.java +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileSystemDeleteTest.java @@ -40,6 +40,7 @@ import com.hedera.node.app.service.file.impl.ReadableFileStoreImpl; import com.hedera.node.app.service.file.impl.WritableFileStoreImpl; import com.hedera.node.app.service.file.impl.handlers.FileSystemDeleteHandler; +import com.hedera.node.app.service.file.impl.test.FileTestBase; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.workflows.HandleContext; @@ -58,7 +59,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class FileSystemDeleteHandlerTest extends FileHandlerTestBase { +class FileSystemDeleteTest extends FileTestBase { @Mock private ReadableAccountStore accountStore; diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileSystemUndeleteHandlerTest.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileSystemUndeleteTest.java similarity index 98% rename from hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileSystemUndeleteHandlerTest.java rename to hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileSystemUndeleteTest.java index 1f2a04fa46ec..cba650037007 100644 --- a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileSystemUndeleteHandlerTest.java +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileSystemUndeleteTest.java @@ -40,6 +40,7 @@ import com.hedera.node.app.service.file.impl.ReadableFileStoreImpl; import com.hedera.node.app.service.file.impl.WritableFileStoreImpl; import com.hedera.node.app.service.file.impl.handlers.FileSystemUndeleteHandler; +import com.hedera.node.app.service.file.impl.test.FileTestBase; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.workflows.HandleContext; @@ -58,7 +59,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class FileSystemUndeleteHandlerTest extends FileHandlerTestBase { +class FileSystemUndeleteTest extends FileTestBase { @Mock private ReadableAccountStore accountStore; diff --git a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileUpdateHandlerTest.java b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileUpdateTest.java similarity index 99% rename from hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileUpdateHandlerTest.java rename to hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileUpdateTest.java index c3c81c8db793..402357b4226d 100644 --- a/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileUpdateHandlerTest.java +++ b/hedera-node/hedera-file-service-impl/src/test/java/com/hedera/node/app/service/file/impl/test/handlers/FileUpdateTest.java @@ -37,6 +37,7 @@ import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.service.file.impl.WritableFileStoreImpl; import com.hedera.node.app.service.file.impl.handlers.FileUpdateHandler; +import com.hedera.node.app.service.file.impl.test.FileTestBase; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.spi.validation.AttributeValidator; import com.hedera.node.app.spi.validation.ExpiryMeta; @@ -52,7 +53,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class FileUpdateHandlerTest extends FileHandlerTestBase { +class FileUpdateTest extends FileTestBase { private static final FileID WELL_KNOWN_FILE_ID = FileID.newBuilder().fileNum(1L).build(); diff --git a/hedera-node/hedera-mono-service/src/main/java/module-info.java b/hedera-node/hedera-mono-service/src/main/java/module-info.java index 020547dd3c0e..c116353fc00d 100644 --- a/hedera-node/hedera-mono-service/src/main/java/module-info.java +++ b/hedera-node/hedera-mono-service/src/main/java/module-info.java @@ -32,7 +32,8 @@ com.hedera.node.app.service.consensus.impl, com.hedera.node.app, com.hedera.node.app.service.consensus.impl.test, - com.hedera.node.app.service.schedule.impl; + com.hedera.node.app.service.schedule.impl, + com.hedera.node.app.service.file.impl; exports com.hedera.node.app.service.mono.utils to com.hedera.node.app.service.mono.test.fixtures, com.hedera.node.app.service.schedule.impl, @@ -86,7 +87,8 @@ com.hedera.node.app; exports com.hedera.node.app.service.mono.files to com.hedera.node.app.service.mono.test.fixtures, - com.hedera.node.app; + com.hedera.node.app, + com.hedera.node.app.service.file.impl; exports com.hedera.node.app.service.mono.state.virtual.schedule to com.hedera.node.app.service.mono.test.fixtures, com.hedera.node.app.service.schedule.impl, From 5cc13e1ae1ee989db3606c38400e5fb86f684271 Mon Sep 17 00:00:00 2001 From: artemananiev <33361937+artemananiev@users.noreply.github.com> Date: Thu, 15 Jun 2023 14:00:43 -0700 Subject: [PATCH 45/70] 7053: MerkleDb metadata file must not be changed in saved states (#7059) Fixes: https://github.com/hashgraph/hedera-services/issues/7053 Reviewed-by: Ivan Malygin , Oleg Mazurov Signed-off-by: Artem Ananev --- .../java/com/swirlds/merkledb/MerkleDb.java | 22 +++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/platform-sdk/swirlds-jasperdb/src/main/java/com/swirlds/merkledb/MerkleDb.java b/platform-sdk/swirlds-jasperdb/src/main/java/com/swirlds/merkledb/MerkleDb.java index fcecff9dcab0..280744b8f3f5 100644 --- a/platform-sdk/swirlds-jasperdb/src/main/java/com/swirlds/merkledb/MerkleDb.java +++ b/platform-sdk/swirlds-jasperdb/src/main/java/com/swirlds/merkledb/MerkleDb.java @@ -209,13 +209,21 @@ private MerkleDb(final Path storageDir) { this.storageDir = storageDir; this.tableConfigs = loadMetadata(); try { - Files.createDirectories(getSharedDir()); - Files.createDirectories(getTablesDir()); + final Path sharedDir = getSharedDir(); + if (!Files.exists(sharedDir)) { + Files.createDirectories(sharedDir); + } + final Path tablesDir = getTablesDir(); + if (!Files.exists(tablesDir)) { + Files.createDirectories(tablesDir); + } } catch (IOException z) { throw new UncheckedIOException(z); } // If this is a new database, create the metadata file - storeMetadata(); + if (!Files.exists(storageDir.resolve(METADATA_FILENAME))) { + storeMetadata(); + } logger.info(MERKLE_DB.getMarker(), "New MerkleDb instance is created, storageDir={}", storageDir); } @@ -535,7 +543,13 @@ public static MerkleDb restore(final Path source, final Path target) throws IOEx final Path defaultInstancePath = (target != null) ? target : getDefaultPath(); if (!Files.exists(defaultInstancePath.resolve(METADATA_FILENAME))) { Files.createDirectories(defaultInstancePath); - hardLinkTree(source.resolve(METADATA_FILENAME), defaultInstancePath.resolve(METADATA_FILENAME)); + // For all data files, it's enough to create hard-links from the source dir to the + // target dir. However, hard-linking the metadata file wouldn't work. The target + // MerkleDb instance is mutable, e.g. new tables can be created in it and stored + // in DB metadata. With hard links, changing target metadata would also change the + // source metadata, which is strictly prohibited as existing saved states must + // never be changed. So just copy the metadata file + Files.copy(source.resolve(METADATA_FILENAME), defaultInstancePath.resolve(METADATA_FILENAME)); final Path sharedDirPath = source.resolve(SHARED_DIRNAME); // No shared data yet, so the folder may be empty or even may not exist if (Files.exists(sharedDirPath)) { From 2fbfb9b14ca773615ab36b200703f50bed3fab0b Mon Sep 17 00:00:00 2001 From: Neeharika Sompalli <52669918+Neeharika-Sompalli@users.noreply.github.com> Date: Thu, 15 Jun 2023 20:15:47 -0500 Subject: [PATCH 46/70] Make all TokenService validators not depend on `ConfigProvider` (#7131) Signed-off-by: Neeharika-Sompalli --- .../hedera/node/app/spi/info/NodeInfo.java | 16 ++++ .../impl/handlers/CryptoUpdateHandler.java | 11 ++- .../handlers/TokenAccountWipeHandler.java | 10 +- .../token/impl/handlers/TokenBurnHandler.java | 11 ++- .../token/impl/handlers/TokenMintHandler.java | 3 +- .../impl/validators/AllowanceValidator.java | 19 +--- .../validators/ApproveAllowanceValidator.java | 20 ++-- .../validators/DeleteAllowanceValidator.java | 17 ++-- .../impl/validators/StakingValidator.java | 30 +++--- .../validators/TokenAttributesValidator.java | 12 +-- .../impl/validators/TokenCreateValidator.java | 4 +- .../TokenSupplyChangeOpsValidator.java | 35 +++---- .../CryptoApproveAllowanceHandlerTest.java | 10 +- .../CryptoDeleteAllowanceHandlerTest.java | 9 +- .../handlers/CryptoUpdateHandlerTest.java | 12 +-- .../handlers/TokenAccountWipeHandlerTest.java | 87 ++++++++--------- .../test/handlers/TokenBurnHandlerTest.java | 96 +++++++++---------- .../handlers/TokenCreateHandleParityTest.java | 7 +- .../test/handlers/TokenCreateHandlerTest.java | 4 +- .../handlers/TokenMintHandlerParityTest.java | 7 +- .../test/handlers/TokenMintHandlerTest.java | 13 +-- .../util/CryptoTokenHandlerTestBase.java | 4 +- .../validators/AllowanceValidatorTest.java | 24 +---- .../ApproveAllowanceValidatorTest.java | 24 ++--- .../DeleteAllowanceValidatorTest.java | 39 +++----- .../TokenAttributesValidatorTest.java | 71 ++++---------- .../token/UniqueTokenManagementSpecs.java | 7 +- 27 files changed, 251 insertions(+), 351 deletions(-) diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/NodeInfo.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/NodeInfo.java index 65b322475b88..4b604cbe7a34 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/NodeInfo.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/info/NodeInfo.java @@ -18,6 +18,7 @@ import com.hedera.hapi.node.base.AccountID; import com.swirlds.common.system.address.AddressBook; +import edu.umd.cs.findbugs.annotations.NonNull; /** * Summarizes useful information about the nodes in the {@link AddressBook} from the Platform. In @@ -41,5 +42,20 @@ public interface NodeInfo { * @throws IllegalArgumentException if the book did not contain the id, or was missing an * account for the id */ + @NonNull AccountID accountOf(long nodeId); + + /** + * Returns if the given node id is valid and the address book contains the id. + * @param nodeId the id of interest + * @return true if the given node id is valid. False otherwise. + */ + default boolean isValidId(long nodeId) { + try { + accountOf(nodeId); + return true; + } catch (IllegalArgumentException e) { + return false; + } + } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoUpdateHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoUpdateHandler.java index 1994fa9df8d9..5b63a77f6838 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoUpdateHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoUpdateHandler.java @@ -38,6 +38,7 @@ import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.validators.StakingValidator; +import com.hedera.node.app.spi.info.NodeInfo; import com.hedera.node.app.spi.validation.EntityType; import com.hedera.node.app.spi.validation.ExpiryMeta; import com.hedera.node.app.spi.workflows.HandleContext; @@ -60,13 +61,17 @@ public class CryptoUpdateHandler extends BaseCryptoHandler implements Transactio private final CryptoSignatureWaivers waivers; private StakingValidator stakingValidator; + private NodeInfo nodeInfo; @Inject public CryptoUpdateHandler( - @NonNull final CryptoSignatureWaivers waivers, @NonNull final StakingValidator stakingValidator) { + @NonNull final CryptoSignatureWaivers waivers, + @NonNull final StakingValidator stakingValidator, + @NonNull final NodeInfo nodeInfo) { this.waivers = requireNonNull(waivers, "The supplied argument 'waivers' must not be null"); this.stakingValidator = requireNonNull(stakingValidator, "The supplied argument 'stakingValidator' must not be null"); + this.nodeInfo = requireNonNull(nodeInfo, "The supplied argument 'nodeInfo' must not be null"); } @Override @@ -265,6 +270,8 @@ private void validateFields( op.stakedId().kind().name(), op.stakedAccountId(), op.stakedNodeId(), - accountStore); + accountStore, + context, + nodeInfo); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAccountWipeHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAccountWipeHandler.java index ede617a90c81..44d5eaf90883 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAccountWipeHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenAccountWipeHandler.java @@ -51,6 +51,7 @@ import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.PreHandleContext; import com.hedera.node.app.spi.workflows.TransactionHandler; +import com.hedera.node.config.data.TokensConfig; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ArrayList; import java.util.LinkedHashSet; @@ -103,6 +104,7 @@ public void handle(@NonNull final HandleContext context) throws HandleException final var tokenRelStore = context.writableStore(WritableTokenRelationStore.class); final var nftStore = context.writableStore(WritableNftStore.class); final var expiryValidator = context.expiryValidator(); + final var tokensConfig = context.configuration().getConfigData(TokensConfig.class); // Assign relevant variables final var txn = context.body(); @@ -122,7 +124,8 @@ public void handle(@NonNull final HandleContext context) throws HandleException accountStore, tokenStore, tokenRelStore, - expiryValidator); + expiryValidator, + tokensConfig); final var acct = validated.account(); final var token = validated.token(); @@ -188,13 +191,14 @@ private ValidationResult validateSemantics( @NonNull final ReadableAccountStore accountStore, @NonNull final ReadableTokenStore tokenStore, @NonNull final ReadableTokenRelationStore tokenRelStore, - @NonNull final ExpiryValidator expiryValidator) { + @NonNull final ExpiryValidator expiryValidator, + @NonNull final TokensConfig tokensConfig) { validateTrue(fungibleWipeCount > -1, INVALID_WIPING_AMOUNT); final var account = TokenHandlerHelper.getIfUsable(accountId, accountStore, expiryValidator, INVALID_ACCOUNT_ID); - validator.validateWipe(fungibleWipeCount, nftSerialNums); + validator.validateWipe(fungibleWipeCount, nftSerialNums, tokensConfig); final var token = TokenHandlerHelper.getIfUsable(tokenId, tokenStore); validateTrue(token.wipeKey() != null, ResponseCodeEnum.TOKEN_HAS_NO_WIPE_KEY); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java index d401316746cb..40a143c289f7 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenBurnHandler.java @@ -43,6 +43,7 @@ import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.PreHandleContext; import com.hedera.node.app.spi.workflows.TransactionHandler; +import com.hedera.node.config.data.TokensConfig; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ArrayList; import java.util.LinkedHashSet; @@ -91,13 +92,16 @@ public void handle(@NonNull final HandleContext context) throws HandleException final var tokenStore = context.writableStore(WritableTokenStore.class); final var tokenRelStore = context.writableStore(WritableTokenRelationStore.class); final var nftStore = context.writableStore(WritableNftStore.class); + final var tokensConfig = context.configuration().getConfigData(TokensConfig.class); + final var txn = context.body(); final var op = txn.tokenBurnOrThrow(); final var tokenId = op.token(); final var fungibleBurnCount = op.amount(); // Wrapping the serial nums this way de-duplicates the serial nums: final var nftSerialNums = new ArrayList<>(new LinkedHashSet<>(op.serialNumbers())); - final var validated = validateSemantics(tokenId, fungibleBurnCount, nftSerialNums, tokenStore, tokenRelStore); + final var validated = + validateSemantics(tokenId, fungibleBurnCount, nftSerialNums, tokenStore, tokenRelStore, tokensConfig); final var token = validated.token(); if (token.tokenType() == TokenType.FUNGIBLE_COMMON) { @@ -149,10 +153,11 @@ private ValidationResult validateSemantics( final long fungibleBurnCount, @NonNull final List nftSerialNums, @NonNull final ReadableTokenStore tokenStore, - @NonNull final ReadableTokenRelationStore tokenRelStore) { + @NonNull final ReadableTokenRelationStore tokenRelStore, + @NonNull final TokensConfig tokensConfig) { validateTrue(fungibleBurnCount >= 0, INVALID_TOKEN_BURN_AMOUNT); - validator.validateBurn(fungibleBurnCount, nftSerialNums); + validator.validateBurn(fungibleBurnCount, nftSerialNums, tokensConfig); final var token = TokenHandlerHelper.getIfUsable(tokenId, tokenStore); validateTrue(token.supplyKey() != null, TOKEN_HAS_NO_SUPPLY_KEY); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java index 6f4082cc77db..79a810038c01 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenMintHandler.java @@ -142,7 +142,8 @@ public void handle(@NonNull final HandleContext context) throws HandleException private void validateSemantics(final HandleContext context) { requireNonNull(context); final var op = context.body().tokenMintOrThrow(); - validator.validateMint(op.amount(), op.metadata()); + final var tokensConfig = context.configuration().getConfigData(TokensConfig.class); + validator.validateMint(op.amount(), op.metadata(), tokensConfig); } /** diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/AllowanceValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/AllowanceValidator.java index 2663daffa2b3..eceb3bd05ee7 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/AllowanceValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/AllowanceValidator.java @@ -29,7 +29,6 @@ import com.hedera.hapi.node.token.NftAllowance; import com.hedera.node.app.service.token.ReadableAccountStore; import com.hedera.node.app.service.token.ReadableNftStore; -import com.hedera.node.config.ConfigProvider; import com.hedera.node.config.data.HederaConfig; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -38,25 +37,11 @@ import javax.inject.Inject; public class AllowanceValidator { - final ConfigProvider configProvider; @Inject - public AllowanceValidator(final ConfigProvider configProvider) { - this.configProvider = configProvider; - } - - /** - * Check if the allowance feature is enabled - * - * @return true if the feature is enabled in {@link HederaConfig} - */ - public boolean isEnabled() { - final var hederaConfig = configProvider.getConfiguration().getConfigData(HederaConfig.class); - return hederaConfig.allowancesIsEnabled(); - } + public AllowanceValidator() {} - protected void validateTotalAllowancesPerTxn(final int totalAllowances) { - final var hederaConfig = configProvider.getConfiguration().getConfigData(HederaConfig.class); + protected void validateTotalAllowancesPerTxn(final int totalAllowances, @NonNull final HederaConfig hederaConfig) { validateFalse( exceedsTxnLimit(totalAllowances, hederaConfig.allowancesMaxTransactionLimit()), MAX_ALLOWANCES_EXCEEDED); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/ApproveAllowanceValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/ApproveAllowanceValidator.java index b58355217704..0dc29d5aa1c2 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/ApproveAllowanceValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/ApproveAllowanceValidator.java @@ -37,7 +37,7 @@ import com.hedera.node.app.service.token.ReadableTokenRelationStore; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.spi.workflows.HandleContext; -import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.HederaConfig; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.List; import javax.inject.Inject; @@ -50,9 +50,7 @@ public class ApproveAllowanceValidator extends AllowanceValidator { @Inject - public ApproveAllowanceValidator(final ConfigProvider configProvider) { - super(configProvider); - } + public ApproveAllowanceValidator() {} public void validate( @NonNull final HandleContext context, final Account payerAccount, final ReadableAccountStore accountStore) { @@ -60,6 +58,7 @@ public void validate( final var tokenStore = context.readableStore(ReadableTokenStore.class); final var tokenRelStore = context.readableStore(ReadableTokenRelationStore.class); final var nftStore = context.readableStore(ReadableNftStore.class); + final var hederaConfig = context.configuration().getConfigData(HederaConfig.class); final var txn = context.body(); final var op = txn.cryptoApproveAllowanceOrThrow(); @@ -69,10 +68,10 @@ public void validate( final var nftAllowances = op.nftAllowancesOrElse(emptyList()); // feature flag for allowances. Will probably be moved to some other place in app in the future. - validateTrue(isEnabled(), NOT_SUPPORTED); + validateTrue(hederaConfig.allowancesIsEnabled(), NOT_SUPPORTED); // validate total count of allowances - validateAllowanceCount(cryptoAllowances, tokenAllowances, nftAllowances); + validateAllowanceCount(cryptoAllowances, tokenAllowances, nftAllowances, hederaConfig); // validate all allowances validateCryptoAllowances(cryptoAllowances, payerAccount, accountStore); validateFungibleTokenAllowances(tokenAllowances, payerAccount, accountStore, tokenStore, tokenRelStore); @@ -188,14 +187,15 @@ private void validateNftAllowances( } private void validateAllowanceCount( - final List cryptoAllowances, - final List tokenAllowances, - final List nftAllowances) { + @NonNull final List cryptoAllowances, + @NonNull final List tokenAllowances, + @NonNull final List nftAllowances, + @NonNull final HederaConfig hederaConfig) { // each serial number of an NFT is considered as an allowance. // So for Nft allowances aggregated amount is considered for limit calculation. final var totalAllowances = cryptoAllowances.size() + tokenAllowances.size() + aggregateApproveNftAllowances(nftAllowances); - validateTotalAllowancesPerTxn(totalAllowances); + validateTotalAllowancesPerTxn(totalAllowances, hederaConfig); } private void validateTokenBasics( diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/DeleteAllowanceValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/DeleteAllowanceValidator.java index 44319236244a..ef3aef150ab4 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/DeleteAllowanceValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/DeleteAllowanceValidator.java @@ -33,7 +33,8 @@ import com.hedera.node.app.service.token.ReadableTokenRelationStore; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.spi.workflows.HandleContext; -import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.HederaConfig; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.HashSet; import java.util.List; import javax.inject.Inject; @@ -42,9 +43,7 @@ @Singleton public class DeleteAllowanceValidator extends AllowanceValidator { @Inject - public DeleteAllowanceValidator(final ConfigProvider configProvider) { - super(configProvider); - } + public DeleteAllowanceValidator() {} /** * Validates all allowances provided in {@link CryptoDeleteAllowanceTransactionBody} @@ -61,11 +60,12 @@ public void validate( final var tokenStore = handleContext.readableStore(ReadableTokenStore.class); final var tokenRelStore = handleContext.readableStore(ReadableTokenRelationStore.class); final var nftStore = handleContext.readableStore(ReadableNftStore.class); + final var hederaConfig = handleContext.configuration().getConfigData(HederaConfig.class); // feature flag for allowances. Will probably be moved to some other place in app in the future. - validateTrue(isEnabled(), NOT_SUPPORTED); + validateTrue(hederaConfig.allowancesIsEnabled(), NOT_SUPPORTED); - validateAllowancesCount(nftAllowances); + validateAllowancesCount(nftAllowances, hederaConfig); validateNftDeleteAllowances(nftAllowances, payerAccount, accountStore, tokenStore, tokenRelStore, nftStore); } @@ -116,11 +116,12 @@ private void validateDeleteSerialNums( validateSerialNums(serialNums, tokenId, nftStore); } - private void validateAllowancesCount(final List nftAllowances) { + private void validateAllowancesCount( + @NonNull final List nftAllowances, @NonNull final HederaConfig hederaConfig) { // each serial number of an NFT is considered as an allowance. // So for Nft allowances aggregated amount is considered for transaction limit calculation. // Number of serials will not be counted for allowance on account. - validateTotalAllowancesPerTxn(aggregateNftDeleteAllowances(nftAllowances)); + validateTotalAllowancesPerTxn(aggregateNftDeleteAllowances(nftAllowances), hederaConfig); } /** diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/StakingValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/StakingValidator.java index f92a46c8f7b9..b41a14b29015 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/StakingValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/StakingValidator.java @@ -23,9 +23,9 @@ import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.AccountID; -import com.hedera.node.app.service.mono.context.NodeInfo; import com.hedera.node.app.service.token.ReadableAccountStore; -import com.hedera.node.config.ConfigProvider; +import com.hedera.node.app.spi.info.NodeInfo; +import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.config.data.StakingConfig; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -37,31 +37,29 @@ */ @Singleton public class StakingValidator { - private NodeInfo nodeInfo; - private ConfigProvider configProvider; - @Inject - public StakingValidator(NodeInfo nodeInfo, ConfigProvider configProvider) { - this.nodeInfo = nodeInfo; - this.configProvider = configProvider; - } + public StakingValidator() {} /** * Validates staked id if present + * * @param hasDeclineRewardChange if the transaction body has decline reward field to be updated - * @param stakedIdKind staked id kind (account or node) - * @param stakedAccountIdInOp staked account id - * @param stakedNodeIdInOp staked node id - * @param accountStore readable account store + * @param stakedIdKind staked id kind (account or node) + * @param stakedAccountIdInOp staked account id + * @param stakedNodeIdInOp staked node id + * @param accountStore readable account store + * @param context */ public void validateStakedId( @NonNull final boolean hasDeclineRewardChange, @NonNull final String stakedIdKind, @Nullable final AccountID stakedAccountIdInOp, @Nullable final Long stakedNodeIdInOp, - @NonNull ReadableAccountStore accountStore) { + @NonNull ReadableAccountStore accountStore, + @NonNull final HandleContext context, + @NonNull final NodeInfo nodeInfo) { final var hasStakingId = stakedAccountIdInOp != null || stakedNodeIdInOp != null; - final var stakingConfig = configProvider.getConfiguration().getConfigData(StakingConfig.class); + final var stakingConfig = context.configuration().getConfigData(StakingConfig.class); // If staking is not enabled, then can't update staked id validateFalse(!stakingConfig.isEnabled() && (hasStakingId || hasDeclineRewardChange), STAKING_NOT_ENABLED); @@ -75,7 +73,7 @@ public void validateStakedId( if (stakedIdKind.equals("STAKED_ACCOUNT_ID")) { validateTrue(accountStore.getAccountById(requireNonNull(stakedAccountIdInOp)) != null, INVALID_STAKING_ID); } else if (stakedIdKind.equals("STAKED_NODE_ID")) { - validateTrue(nodeInfo.isValidId((requireNonNull(stakedNodeIdInOp).longValue())), INVALID_STAKING_ID); + validateTrue(nodeInfo.isValidId((requireNonNull(stakedNodeIdInOp))), INVALID_STAKING_ID); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenAttributesValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenAttributesValidator.java index dcf20df71c49..465a133ff6a7 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenAttributesValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenAttributesValidator.java @@ -35,7 +35,6 @@ import com.hedera.hapi.node.base.Key; import com.hedera.hapi.node.base.KeyList; import com.hedera.hapi.node.base.ResponseCodeEnum; -import com.hedera.node.config.ConfigProvider; import com.hedera.node.config.data.TokensConfig; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -49,21 +48,17 @@ */ @Singleton public class TokenAttributesValidator { - private final ConfigProvider configProvider; public static final Key IMMUTABILITY_SENTINEL_KEY = Key.newBuilder().keyList(KeyList.DEFAULT).build(); @Inject - public TokenAttributesValidator(@NonNull final ConfigProvider configProvider) { - this.configProvider = configProvider; - } + public TokenAttributesValidator() {} /** * Validates the token symbol, if it is exists and is not empty or not too long. * @param symbol the token symbol to validate */ - public void validateTokenSymbol(@Nullable final String symbol) { - final var tokensConfig = configProvider.getConfiguration().getConfigData(TokensConfig.class); + public void validateTokenSymbol(@Nullable final String symbol, @NonNull final TokensConfig tokensConfig) { tokenStringCheck(symbol, tokensConfig.maxSymbolUtf8Bytes(), MISSING_TOKEN_SYMBOL, TOKEN_SYMBOL_TOO_LONG); } @@ -71,8 +66,7 @@ public void validateTokenSymbol(@Nullable final String symbol) { * Validates the token name, if it is exists and is not empty or not too long. * @param name the token name to validate */ - public void validateTokenName(@Nullable final String name) { - final var tokensConfig = configProvider.getConfiguration().getConfigData(TokensConfig.class); + public void validateTokenName(@Nullable final String name, @NonNull final TokensConfig tokensConfig) { tokenStringCheck(name, tokensConfig.maxTokenNameUtf8Bytes(), MISSING_TOKEN_NAME, TOKEN_NAME_TOO_LONG); } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java index f8ec76952993..a8335bdcbf12 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java @@ -119,8 +119,8 @@ public void validate( } context.attributeValidator().validateMemo(op.memo()); - tokenAttributesValidator.validateTokenSymbol(op.symbol()); - tokenAttributesValidator.validateTokenName(op.name()); + tokenAttributesValidator.validateTokenSymbol(op.symbol(), config); + tokenAttributesValidator.validateTokenName(op.name(), config); tokenAttributesValidator.checkKeys( op.hasAdminKey(), op.adminKey(), diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java index d7c0b2796937..cd6b707a9279 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenSupplyChangeOpsValidator.java @@ -25,12 +25,10 @@ import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck; -import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.ResponseCodeEnum; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; -import com.hedera.node.config.ConfigProvider; import com.hedera.node.config.data.TokensConfig; import com.hedera.pbj.runtime.io.buffer.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; @@ -43,12 +41,8 @@ * Token Burn operations in handle */ public class TokenSupplyChangeOpsValidator { - private final ConfigProvider configProvider; - @Inject - public TokenSupplyChangeOpsValidator(@NonNull final ConfigProvider configProvider) { - this.configProvider = requireNonNull(configProvider); - } + public TokenSupplyChangeOpsValidator() {} /** * Validate the transaction data for a token mint operation @@ -57,11 +51,11 @@ public TokenSupplyChangeOpsValidator(@NonNull final ConfigProvider configProvide * @param metaDataList the list of metadata for the NFTs to mint * @throws HandleException if the transaction data is invalid */ - public void validateMint(final long fungibleCount, final List metaDataList) { + public void validateMint( + final long fungibleCount, final List metaDataList, final TokensConfig tokensConfig) { final var numNfts = metaDataList.size(); - validateCommon(fungibleCount, numNfts, TokensConfig::nftsMaxBatchSizeMint); + validateCommon(fungibleCount, numNfts, TokensConfig::nftsMaxBatchSizeMint, tokensConfig); - final var tokensConfig = configProvider.getConfiguration().getConfigData(TokensConfig.class); final var maxNftMetadataBytes = tokensConfig.nftsMaxMetadataBytes(); if (fungibleCount <= 0 && numNfts > 0) { validateMetaData(metaDataList, maxNftMetadataBytes); @@ -75,8 +69,11 @@ public void validateMint(final long fungibleCount, final List metaDataLis * @param nftSerialNums the list of NFT serial numbers to burn * @throws HandleException if the transaction data is invalid */ - public void validateBurn(final long fungibleCount, final List nftSerialNums) { - validateCommon(fungibleCount, nftSerialNums.size(), TokensConfig::nftsMaxBatchSizeBurn); + public void validateBurn( + final long fungibleCount, + @NonNull final List nftSerialNums, + @NonNull final TokensConfig tokensConfig) { + validateCommon(fungibleCount, nftSerialNums.size(), TokensConfig::nftsMaxBatchSizeBurn, tokensConfig); } /** @@ -124,8 +121,11 @@ public static void verifyTokenInstanceAmounts( * @param nftSerialNums the list of NFT serial numbers to wipe * @throws HandleException if the transaction data is invalid */ - public void validateWipe(final long fungibleCount, final List nftSerialNums) { - validateCommon(fungibleCount, nftSerialNums.size(), TokensConfig::nftsMaxBatchSizeWipe); + public void validateWipe( + final long fungibleCount, + @NonNull final List nftSerialNums, + @NonNull final TokensConfig tokensConfig) { + validateCommon(fungibleCount, nftSerialNums.size(), TokensConfig::nftsMaxBatchSizeWipe, tokensConfig); } /** @@ -136,9 +136,10 @@ public void validateWipe(final long fungibleCount, final List nftSerialNum * @param batchSizeGetter The function to get the corresponding batch size for the token operation. */ private void validateCommon( - final long fungibleCount, final int nftCount, @NonNull final ToIntFunction batchSizeGetter) { - final var tokensConfig = configProvider.getConfiguration().getConfigData(TokensConfig.class); - + final long fungibleCount, + final int nftCount, + @NonNull final ToIntFunction batchSizeGetter, + final TokensConfig tokensConfig) { // Get needed configurations final var nftsAreEnabled = tokensConfig.nftsAreEnabled(); final var maxNftBatchOpSize = batchSizeGetter.applyAsInt(tokensConfig); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoApproveAllowanceHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoApproveAllowanceHandlerTest.java index d87d734b3840..b851ce81f8a4 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoApproveAllowanceHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoApproveAllowanceHandlerTest.java @@ -34,7 +34,6 @@ import com.hedera.hapi.node.token.NftAllowance; import com.hedera.hapi.node.token.TokenAllowance; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.config.VersionedConfigImpl; import com.hedera.node.app.service.token.impl.*; import com.hedera.node.app.service.token.impl.ReadableAccountStoreImpl; import com.hedera.node.app.service.token.impl.handlers.CryptoApproveAllowanceHandler; @@ -44,7 +43,6 @@ import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; -import com.hedera.node.config.ConfigProvider; import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import java.util.List; import org.junit.jupiter.api.BeforeEach; @@ -56,9 +54,6 @@ @ExtendWith(MockitoExtension.class) class CryptoApproveAllowanceHandlerTest extends CryptoTokenHandlerTestBase { - @Mock(strictness = Strictness.LENIENT) - private ConfigProvider configProvider; - @Mock(strictness = Strictness.LENIENT) private HandleContext handleContext; @@ -68,8 +63,8 @@ class CryptoApproveAllowanceHandlerTest extends CryptoTokenHandlerTestBase { public void setUp() { super.setUp(); refreshWritableStores(); - final var validator = new ApproveAllowanceValidator(configProvider); - givenStoresAndConfig(configProvider, handleContext); + final var validator = new ApproveAllowanceValidator(); + givenStoresAndConfig(handleContext); subject = new CryptoApproveAllowanceHandler(validator); } @@ -344,7 +339,6 @@ void checksIfAllowancesExceedLimit() { configuration = new HederaTestConfigBuilder() .withValue("hedera.allowances.maxAccountLimit", 2) .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); given(handleContext.configuration()).willReturn(configuration); final var txn = cryptoApproveAllowanceTransaction( diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteAllowanceHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteAllowanceHandlerTest.java index d90fb0bb97b2..3479d7213944 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteAllowanceHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoDeleteAllowanceHandlerTest.java @@ -38,7 +38,6 @@ import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; -import com.hedera.node.config.ConfigProvider; import java.util.List; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -48,9 +47,6 @@ @ExtendWith(MockitoExtension.class) class CryptoDeleteAllowanceHandlerTest extends CryptoTokenHandlerTestBase { - @Mock(strictness = LENIENT) - private ConfigProvider configProvider; - @Mock(strictness = LENIENT) private HandleContext handleContext; @@ -59,13 +55,12 @@ class CryptoDeleteAllowanceHandlerTest extends CryptoTokenHandlerTestBase { @BeforeEach public void setUp() { super.setUp(); - final var deleteAllowanceValidator = new DeleteAllowanceValidator(configProvider); + final var deleteAllowanceValidator = new DeleteAllowanceValidator(); subject = new CryptoDeleteAllowanceHandler(deleteAllowanceValidator); refreshWritableStores(); - givenStoresAndConfig(configProvider, handleContext); + givenStoresAndConfig(handleContext); given(handleContext.configuration()).willReturn(configuration); - given(configProvider.getConfiguration()).willReturn(versionedConfig); } @Test diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoUpdateHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoUpdateHandlerTest.java index 494179d2a75f..92d2e152ae72 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoUpdateHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoUpdateHandlerTest.java @@ -54,7 +54,6 @@ import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.config.VersionedConfigImpl; import com.hedera.node.app.service.mono.config.HederaNumbers; -import com.hedera.node.app.service.mono.context.NodeInfo; import com.hedera.node.app.service.mono.context.properties.GlobalDynamicProperties; import com.hedera.node.app.service.mono.context.properties.PropertySource; import com.hedera.node.app.service.token.impl.CryptoSignatureWaiversImpl; @@ -64,6 +63,7 @@ import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoHandlerTestBase; import com.hedera.node.app.service.token.impl.validators.StakingValidator; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; +import com.hedera.node.app.spi.info.NodeInfo; import com.hedera.node.app.spi.validation.AttributeValidator; import com.hedera.node.app.spi.validation.ExpiryValidator; import com.hedera.node.app.spi.workflows.HandleContext; @@ -116,7 +116,6 @@ class CryptoUpdateHandlerTest extends CryptoHandlerTestBase { private final long updateAccountNum = 32132L; private final AccountID updateAccountId = AccountID.newBuilder().accountNum(updateAccountNum).build(); - private final Key opKey = B_COMPLEX_KEY; private Account updateAccount; private Configuration configuration; @@ -135,8 +134,8 @@ public void setUp() { attributeValidator = new StandardizedAttributeValidator(consensusSecondNow, compositeProps, dynamicProperties); expiryValidator = new StandardizedExpiryValidator( System.out::println, attributeValidator, consensusSecondNow, hederaNumbers, configProvider); - stakingValidator = new StakingValidator(nodeInfo, configProvider); - subject = new CryptoUpdateHandler(waivers, stakingValidator); + stakingValidator = new StakingValidator(); + subject = new CryptoUpdateHandler(waivers, stakingValidator, nodeInfo); } @Test @@ -332,8 +331,7 @@ void rejectsStakedIdIfStakingDisabled() { final var config = new HederaTestConfigBuilder() .withValue("staking.isEnabled", false) .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(config, 1)); - + given(handleContext.configuration()).willReturn(config); assertThatThrownBy(() -> subject.handle(handleContext)) .isInstanceOf(HandleException.class) .has(responseCode(STAKING_NOT_ENABLED)); @@ -348,7 +346,7 @@ void rejectsDeclineRewardUpdateIfStakingDisabled() { final var config = new HederaTestConfigBuilder() .withValue("staking.isEnabled", false) .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(config, 1)); + given(handleContext.configuration()).willReturn(config); assertThatThrownBy(() -> subject.handle(handleContext)) .isInstanceOf(HandleException.class) diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAccountWipeHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAccountWipeHandlerTest.java index ad754a3e22bd..3036b3c6449d 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAccountWipeHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenAccountWipeHandlerTest.java @@ -44,7 +44,6 @@ import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.notNull; import static org.mockito.BDDMockito.given; -import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.mock; import com.hedera.hapi.node.base.AccountID; @@ -75,10 +74,10 @@ import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; -import com.hedera.node.config.ConfigProvider; -import com.hedera.node.config.VersionedConfiguration; -import com.hedera.node.config.data.TokensConfig; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import com.swirlds.config.api.Configuration; import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -89,11 +88,20 @@ class TokenAccountWipeHandlerTest extends ParityTestBase { private static final AccountID ACCOUNT_4680 = BaseCryptoHandler.asAccount(4680); private static final AccountID TREASURY_ACCOUNT_9876 = BaseCryptoHandler.asAccount(9876); private static final TokenID TOKEN_531 = BaseTokenHandler.asToken(531); - private final ConfigProvider configProvider = mock(ConfigProvider.class); - - private final TokenSupplyChangeOpsValidator validator = new TokenSupplyChangeOpsValidator(configProvider); + private final TokenSupplyChangeOpsValidator validator = new TokenSupplyChangeOpsValidator(); private final TokenAccountWipeHandler subject = new TokenAccountWipeHandler(validator); + private Configuration configuration; + + @BeforeEach + public void setUp() { + super.setUp(); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.areEnabled", true) + .withValue("tokens.nfts.maxBatchSizeWipe", 100) + .getOrCreateConfig(); + } + @Nested class PureChecks { @SuppressWarnings("DataFlowIssue") @@ -222,7 +230,7 @@ void nullArgsThrows() { @Test void invalidFungibleAmount() { - mockConfig(); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, -1); final var context = mockContext(txn); @@ -233,7 +241,7 @@ void invalidFungibleAmount() { @Test void accountDoesntExist() { - mockConfig(); + // Both stores are intentionally empty writableAccountStore = newWritableStoreWithAccounts(); writableTokenStore = newWritableStoreWithTokens(); @@ -247,8 +255,10 @@ void accountDoesntExist() { @Test void fungibleAmountExceedsBatchSize() { - final var maxBatchSize = 5; - mockConfig(maxBatchSize, true); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.areEnabled", true) + .withValue("tokens.nfts.maxBatchSizeWipe", 5) + .getOrCreateConfig(); mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -258,7 +268,7 @@ void fungibleAmountExceedsBatchSize() { .accountNumber(TREASURY_ACCOUNT_9876.accountNumOrThrow()) .build()); writableTokenStore = newWritableStoreWithTokens(); - final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, maxBatchSize + 1); + final var txn = newWipeTxn(ACCOUNT_4680, TOKEN_531, 6); final var context = mockContext(txn); assertThatThrownBy(() -> subject.handle(context)) @@ -268,7 +278,10 @@ void fungibleAmountExceedsBatchSize() { @Test void nftAmountExceedsBatchSize() { - mockConfig(2, true); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.areEnabled", true) + .withValue("tokens.nfts.maxBatchSizeWipe", 2) + .getOrCreateConfig(); mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -288,7 +301,7 @@ void nftAmountExceedsBatchSize() { @Test void tokenIdNotFound() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -308,7 +321,7 @@ void tokenIdNotFound() { @Test void tokenIsDeleted() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -331,7 +344,7 @@ void tokenIsDeleted() { @Test void tokenIsPaused() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -354,7 +367,7 @@ void tokenIsPaused() { @Test void tokenDoesntHaveWipeKey() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -379,7 +392,7 @@ void tokenDoesntHaveWipeKey() { @Test void accountRelDoesntExist() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -401,7 +414,7 @@ void accountRelDoesntExist() { @Test void givenAccountIsTreasury() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -422,7 +435,7 @@ void givenAccountIsTreasury() { @Test void fungibleAmountNegatesSupply() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -449,7 +462,7 @@ void fungibleAmountNegatesSupply() { @Test void fungibleAmountNegatesBalance() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -478,7 +491,7 @@ void fungibleAmountNegatesBalance() { @Test void fungibleAmountBurnedWithLeftoverAccountBalance() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -515,7 +528,7 @@ void fungibleAmountBurnedWithLeftoverAccountBalance() { @Test void fungibleAmountBurnedWithNoLeftoverAccountBalance() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -552,7 +565,7 @@ void fungibleAmountBurnedWithNoLeftoverAccountBalance() { @Test void nftSerialNumDoesntExist() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -576,7 +589,7 @@ void nftSerialNumDoesntExist() { @Test void nftNotOwnedByAccount() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -606,7 +619,7 @@ void nftNotOwnedByAccount() { @Test void numNftSerialsNegatesSupply() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -633,7 +646,7 @@ void numNftSerialsNegatesSupply() { @Test void nftSerialNumsIsEmpty() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -661,7 +674,6 @@ void nftSerialNumsIsEmpty() { void nftSerialsWipedWithLeftoverNftSerials() { // i.e. leftover NFT serials remaining with the owning account - mockConfig(); mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -740,7 +752,7 @@ void nftSerialsWipedWithLeftoverNftSerials() { @Test void nftSerialsWipedWithNoLeftoverNftSerials() { - mockConfig(); + mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -821,7 +833,6 @@ void nftSerialsWipedWithNoLeftoverNftSerials() { void duplicateNftSerials() { // This is a success case, and should be identical to the case without no duplicates above - mockConfig(); mockOkExpiryValidator(); writableAccountStore = newWritableStoreWithAccounts( Account.newBuilder() @@ -946,26 +957,12 @@ private HandleContext mockContext(TransactionBody txn) { given(context.writableStore(WritableTokenStore.class)).willReturn(writableTokenStore); given(context.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); given(context.writableStore(WritableNftStore.class)).willReturn(writableNftStore); + given(context.configuration()).willReturn(configuration); given(context.expiryValidator()).willReturn(validator); return context; } - - private void mockConfig() { - mockConfig(100, true); - } - - private void mockConfig(final int maxBatchSize, final boolean nftsEnabled) { - final var mockTokensConfig = mock(TokensConfig.class); - lenient().when(mockTokensConfig.nftsAreEnabled()).thenReturn(nftsEnabled); - lenient().when(mockTokensConfig.nftsMaxBatchSizeWipe()).thenReturn(maxBatchSize); - - final var mockConfig = mock(VersionedConfiguration.class); - lenient().when(mockConfig.getConfigData(TokensConfig.class)).thenReturn(mockTokensConfig); - - given(configProvider.getConfiguration()).willReturn(mockConfig); - } } private TransactionBody newWipeTxn(AccountID accountId, TokenID token, long fungibleAmount, Long... nftSerialNums) { diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java index 64a795b387b9..fa637ac4bbfe 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenBurnHandlerTest.java @@ -43,7 +43,6 @@ import static org.hamcrest.Matchers.contains; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.BDDMockito.given; -import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.mock; import com.hedera.hapi.node.base.AccountID; @@ -75,11 +74,11 @@ import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; -import com.hedera.node.config.ConfigProvider; -import com.hedera.node.config.VersionedConfiguration; -import com.hedera.node.config.data.TokensConfig; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import com.swirlds.config.api.Configuration; import java.util.Map; import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -89,10 +88,18 @@ class TokenBurnHandlerTest extends ParityTestBase { private static final AccountID ACCOUNT_1339 = BaseCryptoHandler.asAccount(1339); private static final TokenID TOKEN_123 = BaseTokenHandler.asToken(123); - - private final ConfigProvider configProvider = mock(ConfigProvider.class); - private TokenSupplyChangeOpsValidator validator = new TokenSupplyChangeOpsValidator(configProvider); + private TokenSupplyChangeOpsValidator validator = new TokenSupplyChangeOpsValidator(); private final TokenBurnHandler subject = new TokenBurnHandler(validator); + private Configuration configuration; + + @BeforeEach + public void setUp() { + super.setUp(); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.areEnabled", true) + .withValue("tokens.nfts.maxBatchSizeBurn", 100) + .getOrCreateConfig(); + } @Nested class PureChecks { @@ -211,7 +218,7 @@ void nullArg() { @Test void invalidFungibleAmount() { - mockConfig(); + final var txn = newBurnTxn(TOKEN_123, -1); final var context = mockContext(txn); @@ -222,7 +229,7 @@ void invalidFungibleAmount() { @Test void tokenIdNotFound() { - mockConfig(); + writableTokenStore = newWritableStoreWithTokens(); final var txn = newBurnTxn(BaseTokenHandler.asToken(999), 1); final var context = mockContext(txn); @@ -234,7 +241,7 @@ void tokenIdNotFound() { @Test void tokenIsDeleted() { - mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() .tokenNumber(TOKEN_123.tokenNum()) .tokenType(TokenType.FUNGIBLE_COMMON) @@ -253,7 +260,7 @@ void tokenIsDeleted() { @Test void tokenIsPaused() { - mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() .tokenNumber(TOKEN_123.tokenNum()) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) @@ -271,7 +278,7 @@ void tokenIsPaused() { @Test void tokenDoesntHaveSupplyKey() { - mockConfig(); + final var totalFungibleSupply = 5; writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() .tokenNumber(TOKEN_123.tokenNum()) @@ -295,7 +302,7 @@ void tokenDoesntHaveSupplyKey() { @Test void tokenTreasuryRelDoesntExist() { - mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() .tokenNumber(TOKEN_123.tokenNum()) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) @@ -314,8 +321,11 @@ void tokenTreasuryRelDoesntExist() { @Test void fungibleAmountExceedsBatchSize() { - mockConfig(1, true); - validator = new TokenSupplyChangeOpsValidator(configProvider); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.areEnabled", true) + .withValue("tokens.nfts.maxBatchSizeBurn", 1) + .getOrCreateConfig(); + validator = new TokenSupplyChangeOpsValidator(); final var txn = newBurnTxn(TOKEN_123, 2); final var context = mockContext(txn); @@ -327,7 +337,7 @@ void fungibleAmountExceedsBatchSize() { @Test void fungibleTokenTreasuryAccountDoesntExist() { - mockConfig(); + // Intentionally has no treasury account: writableAccountStore = newWritableStoreWithAccounts(); writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() @@ -352,7 +362,7 @@ void fungibleTokenTreasuryAccountDoesntExist() { @Test void fungibleAmountExceedsSupply() { - mockConfig(); + final var totalFungibleSupply = 5; writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() .tokenNumber(TOKEN_123.tokenNum()) @@ -376,7 +386,6 @@ void fungibleAmountExceedsSupply() { @Test void fungibleAmountExceedsBalance() { - mockConfig(); writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() .tokenNumber(TOKEN_123.tokenNum()) .tokenType(TokenType.FUNGIBLE_COMMON) @@ -402,7 +411,6 @@ void fungibleAmountExceedsBalance() { @Test void fungibleAmountBurnedWithLeftoverTreasuryBalance() { - mockConfig(); writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() .accountNumber(ACCOUNT_1339.accountNumOrThrow()) .numberTreasuryTitles(1) @@ -438,7 +446,7 @@ void fungibleAmountBurnedWithLeftoverTreasuryBalance() { @Test void fungibleAmountBurnedWithZeroTreasuryBalance() { - mockConfig(); + writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() .accountNumber(ACCOUNT_1339.accountNumOrThrow()) .numberTreasuryTitles(1) @@ -476,8 +484,11 @@ void fungibleAmountBurnedWithZeroTreasuryBalance() { @Test void nftsGivenButNotEnabled() { - mockConfig(100, false); - validator = new TokenSupplyChangeOpsValidator(configProvider); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.areEnabled", false) + .withValue("tokens.nfts.maxBatchSizeBurn", 100) + .getOrCreateConfig(); + validator = new TokenSupplyChangeOpsValidator(); final var txn = newBurnTxn(TOKEN_123, 0, 1L); final var context = mockContext(txn); @@ -489,8 +500,11 @@ void nftsGivenButNotEnabled() { @Test void nftSerialCountExceedsBatchSize() { - mockConfig(1, true); - validator = new TokenSupplyChangeOpsValidator(configProvider); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.areEnabled", true) + .withValue("tokens.nfts.maxBatchSizeBurn", 1) + .getOrCreateConfig(); + validator = new TokenSupplyChangeOpsValidator(); final var txn = newBurnTxn(TOKEN_123, 0, 1L, 2L); final var context = mockContext(txn); @@ -502,7 +516,7 @@ void nftSerialCountExceedsBatchSize() { @Test void invalidNftSerial() { - mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() .tokenNumber(TOKEN_123.tokenNum()) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) @@ -525,7 +539,7 @@ void invalidNftSerial() { @Test void nftSerialNotFound() { - mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() .tokenNumber(TOKEN_123.tokenNum()) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) @@ -551,7 +565,7 @@ void nftSerialNotFound() { @Test void nftSerialNumsEmpty() { - mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() .tokenNumber(TOKEN_123.tokenNum()) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) @@ -574,7 +588,7 @@ void nftSerialNumsEmpty() { @Test void nftNotOwnedByTreasury() { - mockConfig(); + writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() .tokenNumber(TOKEN_123.tokenNum()) .tokenType(TokenType.NON_FUNGIBLE_UNIQUE) @@ -606,7 +620,7 @@ void nftNotOwnedByTreasury() { @Test void nftTreasuryAccountDoesntExist() { - mockConfig(); + // Intentionally has no treasury account: writableAccountStore = newWritableStoreWithAccounts(); writableTokenStore = newWritableStoreWithTokens(Token.newBuilder() @@ -638,7 +652,7 @@ void nftTreasuryAccountDoesntExist() { @Test void numNftSerialsExceedsNftSupply() { - mockConfig(); + writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() .accountNumber(ACCOUNT_1339.accountNumOrThrow()) .numberTreasuryTitles(1) @@ -681,7 +695,7 @@ void numNftSerialsExceedsNftSupply() { @Test void nftSerialsBurnedWithLeftoverTreasuryBalance() { - mockConfig(); + writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() .accountNumber(ACCOUNT_1339.accountNumOrThrow()) .numberTreasuryTitles(1) @@ -741,7 +755,7 @@ void nftSerialsBurnedWithLeftoverTreasuryBalance() { @Test void nftSerialsBurnedWithNoLeftoverTreasuryBalance() { - mockConfig(); + writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() .accountNumber(ACCOUNT_1339.accountNumOrThrow()) .numberTreasuryTitles(1) @@ -803,7 +817,7 @@ void nftSerialsBurnedWithNoLeftoverTreasuryBalance() { @Test void duplicateNftSerials() { // This is a success case, and should be identical to the case without no duplicates above - mockConfig(); + writableAccountStore = newWritableStoreWithAccounts(Account.newBuilder() .accountNumber(ACCOUNT_1339.accountNumOrThrow()) .numberTreasuryTitles(1) @@ -869,24 +883,10 @@ private HandleContext mockContext(TransactionBody txn) { given(context.writableStore(WritableTokenStore.class)).willReturn(writableTokenStore); given(context.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); given(context.writableStore(WritableNftStore.class)).willReturn(writableNftStore); + given(context.configuration()).willReturn(configuration); return context; } - - private void mockConfig() { - mockConfig(100, true); - } - - private void mockConfig(final int maxBatchSize, final boolean nftsEnabled) { - final var mockTokensConfig = mock(TokensConfig.class); - lenient().when(mockTokensConfig.nftsAreEnabled()).thenReturn(nftsEnabled); - lenient().when(mockTokensConfig.nftsMaxBatchSizeBurn()).thenReturn(maxBatchSize); - - final var mockConfig = mock(VersionedConfiguration.class); - lenient().when(mockConfig.getConfigData(TokensConfig.class)).thenReturn(mockTokensConfig); - - given(configProvider.getConfiguration()).willReturn(mockConfig); - } } private TransactionBody newBurnTxn(TokenID token, long fungibleAmount, Long... nftSerialNums) { diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandleParityTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandleParityTest.java index 80d1d14358d4..85ef842cbc34 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandleParityTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandleParityTest.java @@ -70,18 +70,13 @@ import com.hedera.node.app.service.token.impl.validators.TokenCreateValidator; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.workflows.PreCheckException; -import com.hedera.node.config.ConfigProvider; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) class TokenCreateHandleParityTest { - @Mock - private ConfigProvider configProvider; - private ReadableAccountStore accountStore; private TokenCreateHandler subject; private CustomFeesValidator customFeesValidator; @@ -90,7 +85,7 @@ class TokenCreateHandleParityTest { @BeforeEach void setUp() { - tokenFieldsValidator = new TokenAttributesValidator(configProvider); + tokenFieldsValidator = new TokenAttributesValidator(); customFeesValidator = new CustomFeesValidator(); tokenCreateValidator = new TokenCreateValidator(tokenFieldsValidator); accountStore = SigReqAdapterUtils.wellKnownAccountStoreAt(); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandlerTest.java index 1eb0a40a5e8c..1e9f27117f69 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenCreateHandlerTest.java @@ -127,11 +127,11 @@ public void setUp() { super.setUp(); refreshWritableStores(); recordBuilder = new SingleTransactionRecordBuilder(consensusInstant); - tokenFieldsValidator = new TokenAttributesValidator(configProvider); + tokenFieldsValidator = new TokenAttributesValidator(); customFeesValidator = new CustomFeesValidator(); tokenCreateValidator = new TokenCreateValidator(tokenFieldsValidator); subject = new TokenCreateHandler(customFeesValidator, tokenCreateValidator); - givenStoresAndConfig(configProvider, handleContext); + givenStoresAndConfig(handleContext); } @Test diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerParityTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerParityTest.java index 2eecb284bd10..c533f3e649e1 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerParityTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerParityTest.java @@ -33,24 +33,19 @@ import com.hedera.node.app.service.token.impl.validators.TokenSupplyChangeOpsValidator; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.workflows.PreCheckException; -import com.hedera.node.config.ConfigProvider; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) class TokenMintHandlerParityTest extends ParityTestBase { - @Mock - private ConfigProvider configProvider; - private TokenSupplyChangeOpsValidator validator; private TokenMintHandler subject; @BeforeEach void setup() { - validator = new TokenSupplyChangeOpsValidator(configProvider); + validator = new TokenSupplyChangeOpsValidator(); subject = new TokenMintHandler(validator); } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerTest.java index 8d3e5f627bf8..394c30321e31 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerTest.java @@ -27,7 +27,6 @@ import com.hedera.hapi.node.base.TransactionID; import com.hedera.hapi.node.token.TokenMintTransactionBody; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.config.VersionedConfigImpl; import com.hedera.node.app.records.SingleTransactionRecordBuilder; import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.handlers.TokenMintHandler; @@ -38,7 +37,6 @@ import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; -import com.hedera.node.config.ConfigProvider; import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import com.hedera.pbj.runtime.io.buffer.Bytes; import java.time.Instant; @@ -48,9 +46,6 @@ import org.mockito.Mock; class TokenMintHandlerTest extends CryptoTokenHandlerTestBase { - @Mock(strictness = Mock.Strictness.LENIENT) - private ConfigProvider configProvider; - @Mock(strictness = Mock.Strictness.LENIENT) private HandleContext handleContext; @@ -64,8 +59,8 @@ class TokenMintHandlerTest extends CryptoTokenHandlerTestBase { public void setUp() { super.setUp(); refreshWritableStores(); - givenStoresAndConfig(configProvider, handleContext); - subject = new TokenMintHandler(new TokenSupplyChangeOpsValidator(configProvider)); + givenStoresAndConfig(handleContext); + subject = new TokenMintHandler(new TokenSupplyChangeOpsValidator()); recordBuilder = new SingleTransactionRecordBuilder(consensusNow); } @@ -75,7 +70,6 @@ void rejectsNftMintsWhenNftsNotEnabled() { configuration = new HederaTestConfigBuilder() .withValue("tokens.nfts.areEnabled", false) .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); given(handleContext.configuration()).willReturn(configuration); assertThatThrownBy(() -> subject.handle(handleContext)) @@ -184,7 +178,6 @@ void propagatesErrorOnBadMetadata() { configuration = new HederaTestConfigBuilder() .withValue("tokens.nfts.maxMetadataBytes", 1) .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); given(handleContext.configuration()).willReturn(configuration); assertThatThrownBy(() -> subject.handle(handleContext)) @@ -199,7 +192,6 @@ void propagatesErrorOnMaxBatchSizeReached() { configuration = new HederaTestConfigBuilder() .withValue("tokens.nfts.maxBatchSizeMint", 1) .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); given(handleContext.configuration()).willReturn(configuration); assertThatThrownBy(() -> subject.handle(handleContext)) @@ -214,7 +206,6 @@ void validatesMintingResourcesLimit() { configuration = new HederaTestConfigBuilder() .withValue("tokens.nfts.maxAllowedMints", 1) .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); given(handleContext.configuration()).willReturn(configuration); assertThatThrownBy(() -> subject.handle(handleContext)) diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java index 258756a2075d..5962f82c58f8 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java @@ -67,7 +67,6 @@ import com.hedera.node.app.spi.state.WritableStates; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.PreHandleContext; -import com.hedera.node.config.ConfigProvider; import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import com.hedera.pbj.runtime.io.buffer.Bytes; import com.swirlds.common.utility.CommonUtils; @@ -677,9 +676,8 @@ protected CustomFee withRoyaltyFee(final RoyaltyFee royaltyFee) { .build(); } - protected void givenStoresAndConfig(final ConfigProvider configProvider, final HandleContext handleContext) { + protected void givenStoresAndConfig(final HandleContext handleContext) { configuration = new HederaTestConfigBuilder().getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); given(handleContext.configuration()).willReturn(configuration); given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); given(handleContext.readableStore(ReadableAccountStore.class)).willReturn(readableAccountStore); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/AllowanceValidatorTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/AllowanceValidatorTest.java index 794c74d3fb3b..4851e0a4de71 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/AllowanceValidatorTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/AllowanceValidatorTest.java @@ -26,28 +26,21 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.token.NftAllowance; -import com.hedera.node.app.config.VersionedConfigImpl; import com.hedera.node.app.service.token.impl.ReadableAccountStoreImpl; import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoTokenHandlerTestBase; import com.hedera.node.app.service.token.impl.validators.AllowanceValidator; import com.hedera.node.app.spi.workflows.HandleException; -import com.hedera.node.config.ConfigProvider; -import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import java.util.ArrayList; import java.util.List; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) class AllowanceValidatorTest extends CryptoTokenHandlerTestBase { private AllowanceValidator subject; - @Mock - private ConfigProvider configProvider; - @BeforeEach public void setUp() { super.setUp(); @@ -59,7 +52,7 @@ public void setUp() { given(readableStates.get(ACCOUNTS)).willReturn(readableAccounts); readableAccountStore = new ReadableAccountStoreImpl(readableStates); - subject = new AllowanceValidator(configProvider); + subject = new AllowanceValidator(); } @Test @@ -84,21 +77,6 @@ void aggregatedListCorrectly() { assertThat(aggregateApproveNftAllowances(list)).isEqualTo(4); } - @Test - void checksFlagIfEnabled() { - final var trueConfig = new HederaTestConfigBuilder() - .withValue("hedera.allowances.isEnabled", true) - .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(trueConfig, 1)); - assertThat(subject.isEnabled()).isTrue(); - - final var falseConfig = new HederaTestConfigBuilder() - .withValue("hedera.allowances.isEnabled", false) - .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(falseConfig, 1)); - assertThat(subject.isEnabled()).isFalse(); - } - @Test void validatesAllowancesLimit() { assertThatNoException().isThrownBy(() -> AllowanceValidator.validateAllowanceLimit(ownerAccount, 100)); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/ApproveAllowanceValidatorTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/ApproveAllowanceValidatorTest.java index 52b3685747af..83071ccd1335 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/ApproveAllowanceValidatorTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/ApproveAllowanceValidatorTest.java @@ -30,12 +30,10 @@ import com.hedera.hapi.node.token.NftAllowance; import com.hedera.hapi.node.token.TokenAllowance; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.config.VersionedConfigImpl; import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoTokenHandlerTestBase; import com.hedera.node.app.service.token.impl.validators.ApproveAllowanceValidator; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; -import com.hedera.node.config.ConfigProvider; import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import java.util.List; import org.junit.jupiter.api.BeforeEach; @@ -45,17 +43,14 @@ class ApproveAllowanceValidatorTest extends CryptoTokenHandlerTestBase { private ApproveAllowanceValidator subject; - @Mock(strictness = LENIENT) - private ConfigProvider configProvider; - @Mock(strictness = LENIENT) private HandleContext handleContext; @BeforeEach public void setUp() { super.setUp(); - givenStoresAndConfig(configProvider, handleContext); - subject = new ApproveAllowanceValidator(configProvider); + givenStoresAndConfig(handleContext); + subject = new ApproveAllowanceValidator(); } @Test @@ -65,8 +60,7 @@ void notSupportedFails() { final var configuration = new HederaTestConfigBuilder() .withValue("hedera.allowances.isEnabled", false) .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - + given(handleContext.configuration()).willReturn(configuration); assertThatThrownBy(() -> subject.validate(handleContext, account, readableAccountStore)) .isInstanceOf(HandleException.class) .has(responseCode(NOT_SUPPORTED)); @@ -80,8 +74,7 @@ void returnsValidationOnceFailed() { final var configuration = new HederaTestConfigBuilder() .withValue("hedera.allowances.maxTransactionLimit", 1) .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - + given(handleContext.configuration()).willReturn(configuration); assertThatThrownBy(() -> subject.validate(handleContext, account, readableAccountStore)) .isInstanceOf(HandleException.class) .has(responseCode(MAX_ALLOWANCES_EXCEEDED)); @@ -344,7 +337,7 @@ void validateSerialsExistence() { .serialNumbers(List.of(1L, 2L, 3L)) .build())); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + given(handleContext.configuration()).willReturn(configuration); assertThatThrownBy(() -> subject.validate(handleContext, account, readableAccountStore)) .isInstanceOf(HandleException.class) .has(responseCode(INVALID_TOKEN_NFT_SERIAL_NUMBER)); @@ -364,7 +357,7 @@ void validateNegativeSerials() { .serialNumbers(List.of(1L, 2L, -3L)) .build())); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + given(handleContext.configuration()).willReturn(configuration); assertThatThrownBy(() -> subject.validate(handleContext, account, readableAccountStore)) .isInstanceOf(HandleException.class) .has(responseCode(INVALID_TOKEN_NFT_SERIAL_NUMBER)); @@ -384,7 +377,7 @@ void validatesAndFiltersRepeatedSerials() { .serialNumbers(List.of(1L, 2L, 2L, 1L)) .build())); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + given(handleContext.configuration()).willReturn(configuration); assertThatNoException().isThrownBy(() -> subject.validate(handleContext, account, readableAccountStore)); } @@ -396,8 +389,7 @@ void validatesTotalAllowancesInTxn() { final var configuration = new HederaTestConfigBuilder() .withValue("hedera.allowances.maxTransactionLimit", 1) .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - + given(handleContext.configuration()).willReturn(configuration); assertThatThrownBy(() -> subject.validate(handleContext, account, readableAccountStore)) .isInstanceOf(HandleException.class) .has(responseCode(MAX_ALLOWANCES_EXCEEDED)); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/DeleteAllowanceValidatorTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/DeleteAllowanceValidatorTest.java index bbb0d24538f5..fabe3d1cc946 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/DeleteAllowanceValidatorTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/DeleteAllowanceValidatorTest.java @@ -30,12 +30,10 @@ import com.hedera.hapi.node.token.CryptoDeleteAllowanceTransactionBody; import com.hedera.hapi.node.token.NftRemoveAllowance; import com.hedera.hapi.node.transaction.TransactionBody; -import com.hedera.node.app.config.VersionedConfigImpl; import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoTokenHandlerTestBase; import com.hedera.node.app.service.token.impl.validators.DeleteAllowanceValidator; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; -import com.hedera.node.config.ConfigProvider; import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import java.util.List; import org.junit.jupiter.api.BeforeEach; @@ -48,17 +46,14 @@ class DeleteAllowanceValidatorTest extends CryptoTokenHandlerTestBase { private DeleteAllowanceValidator subject; - @Mock(strictness = LENIENT) - private ConfigProvider configProvider; - @Mock(strictness = LENIENT) private HandleContext handleContext; @BeforeEach public void setUp() { super.setUp(); - givenStoresAndConfig(configProvider, handleContext); - subject = new DeleteAllowanceValidator(configProvider); + givenStoresAndConfig(handleContext); + subject = new DeleteAllowanceValidator(); } @Test @@ -67,8 +62,7 @@ void notSupportedFails() { final var configuration = new HederaTestConfigBuilder() .withValue("hedera.allowances.isEnabled", false) .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - + given(handleContext.configuration()).willReturn(configuration); final var nftAllowances = txn.cryptoDeleteAllowance().nftAllowancesOrElse(emptyList()); assertThatThrownBy(() -> subject.validate(handleContext, nftAllowances, account, readableAccountStore)) .isInstanceOf(HandleException.class) @@ -79,8 +73,7 @@ void notSupportedFails() { void rejectsMissingToken() { final var missingToken = TokenID.newBuilder().tokenNum(10000).build(); final var txn = cryptoDeleteAllowanceTransaction(payerId, ownerId, missingToken, List.of(1L, 2L)); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - + given(handleContext.configuration()).willReturn(configuration); final var nftAllowances = txn.cryptoDeleteAllowance().nftAllowancesOrElse(emptyList()); assertThatThrownBy(() -> subject.validate(handleContext, nftAllowances, account, readableAccountStore)) .isInstanceOf(HandleException.class) @@ -90,8 +83,7 @@ void rejectsMissingToken() { @Test void failsForFungibleToken() { final var txn = cryptoDeleteAllowanceTransaction(payerId, ownerId, fungibleTokenId, List.of(1L, 2L)); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - + given(handleContext.configuration()).willReturn(configuration); final var nftAllowances = txn.cryptoDeleteAllowance().nftAllowancesOrElse(emptyList()); assertThatThrownBy(() -> subject.validate(handleContext, nftAllowances, account, readableAccountStore)) .isInstanceOf(HandleException.class) @@ -102,8 +94,7 @@ void failsForFungibleToken() { void validatesIfOwnerExists() { final var missingOwner = AccountID.newBuilder().accountNum(10000).build(); final var txn = cryptoDeleteAllowanceTransaction(payerId, missingOwner, nonFungibleTokenId, List.of(1L, 2L)); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - + given(handleContext.configuration()).willReturn(configuration); final var nftAllowances = txn.cryptoDeleteAllowance().nftAllowancesOrElse(emptyList()); assertThatThrownBy(() -> subject.validate(handleContext, nftAllowances, account, readableAccountStore)) .isInstanceOf(HandleException.class) @@ -113,8 +104,7 @@ void validatesIfOwnerExists() { @Test void considersPayerIfOwnerMissing() { final var txn = cryptoDeleteAllowanceTransaction(payerId, null, nonFungibleTokenId, List.of(1L, 2L)); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - + given(handleContext.configuration()).willReturn(configuration); final var nftAllowances = txn.cryptoDeleteAllowance().nftAllowancesOrElse(emptyList()); assertThatNoException() .isThrownBy(() -> subject.validate(handleContext, nftAllowances, account, readableAccountStore)); @@ -124,8 +114,7 @@ void considersPayerIfOwnerMissing() { @Test void failsIfTokenNotAssociatedToAccount() { final var txn = cryptoDeleteAllowanceTransaction(payerId, spenderId, nonFungibleTokenId, List.of(1L, 2L)); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - + given(handleContext.configuration()).willReturn(configuration); final var nftAllowances = txn.cryptoDeleteAllowance().nftAllowancesOrElse(emptyList()); assertThatThrownBy(() -> subject.validate(handleContext, nftAllowances, account, readableAccountStore)) .isInstanceOf(HandleException.class) @@ -139,8 +128,7 @@ void validatesTotalAllowancesInTxn() { final var configuration = new HederaTestConfigBuilder() .withValue("hedera.allowances.maxTransactionLimit", 1) .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - + given(handleContext.configuration()).willReturn(configuration); final var nftAllowances = txn.cryptoDeleteAllowance().nftAllowancesOrElse(emptyList()); assertThatThrownBy(() -> subject.validate(handleContext, nftAllowances, account, readableAccountStore)) .isInstanceOf(HandleException.class) @@ -150,8 +138,7 @@ void validatesTotalAllowancesInTxn() { @Test void happyPath() { final var txn = cryptoDeleteAllowanceTransaction(payerId, ownerId, nonFungibleTokenId, List.of(1L, 2L)); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - + given(handleContext.configuration()).willReturn(configuration); final var nftAllowances = txn.cryptoDeleteAllowance().nftAllowancesOrElse(emptyList()); assertThatNoException() @@ -161,8 +148,7 @@ void happyPath() { @Test void validateSerialsExistence() { final var txn = cryptoDeleteAllowanceTransaction(payerId, ownerId, nonFungibleTokenId, List.of(1L, 2L, 100L)); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - + given(handleContext.configuration()).willReturn(configuration); final var nftAllowances = txn.cryptoDeleteAllowance().nftAllowancesOrElse(emptyList()); assertThatThrownBy(() -> subject.validate(handleContext, nftAllowances, account, readableAccountStore)) @@ -190,8 +176,7 @@ void aggregatesSerialsCorrectly() { @Test void validatesNegativeSerialsAreNotValid() { final var txn = cryptoDeleteAllowanceTransaction(payerId, ownerId, nonFungibleTokenId, List.of(1L, -2L)); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - + given(handleContext.configuration()).willReturn(configuration); final var nftAllowances = txn.cryptoDeleteAllowance().nftAllowancesOrElse(emptyList()); assertThatThrownBy(() -> subject.validate(handleContext, nftAllowances, account, readableAccountStore)) diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/TokenAttributesValidatorTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/TokenAttributesValidatorTest.java index 0df749f46043..eba761446fd7 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/TokenAttributesValidatorTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/TokenAttributesValidatorTest.java @@ -32,89 +32,65 @@ import static com.hedera.test.utils.KeyUtils.A_COMPLEX_KEY; import static org.assertj.core.api.Assertions.assertThatNoException; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.BDDMockito.given; import com.hedera.hapi.node.base.Key; -import com.hedera.node.app.config.VersionedConfigImpl; import com.hedera.node.app.service.token.impl.validators.TokenAttributesValidator; import com.hedera.node.app.spi.workflows.HandleException; -import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.TokensConfig; import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) class TokenAttributesValidatorTest { - @Mock - private ConfigProvider configProvider; - private TokenAttributesValidator subject; + private TokensConfig tokensConfig; @BeforeEach void setUp() { - subject = new TokenAttributesValidator(configProvider); - } - - @Test - void failsForZeroLengthSymbol() { + subject = new TokenAttributesValidator(); final var configuration = new HederaTestConfigBuilder() + .withValue("tokens.maxTokenNameUtf8Bytes", "10") .withValue("tokens.maxSymbolUtf8Bytes", "10") .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + tokensConfig = configuration.getConfigData(TokensConfig.class); + } - assertThatThrownBy(() -> subject.validateTokenSymbol("")) + @Test + void failsForZeroLengthSymbol() { + assertThatThrownBy(() -> subject.validateTokenSymbol("", tokensConfig)) .isInstanceOf(HandleException.class) .has(responseCode(MISSING_TOKEN_SYMBOL)); } @Test void failsForNullSymbol() { - final var configuration = new HederaTestConfigBuilder() - .withValue("tokens.maxSymbolUtf8Bytes", "10") - .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - - assertThatThrownBy(() -> subject.validateTokenSymbol(null)) + assertThatThrownBy(() -> subject.validateTokenSymbol(null, tokensConfig)) .isInstanceOf(HandleException.class) .has(responseCode(MISSING_TOKEN_SYMBOL)); } @Test void failsForVeryLongSymbol() { - final var configuration = new HederaTestConfigBuilder() - .withValue("tokens.maxSymbolUtf8Bytes", "10") - .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - assertThatThrownBy(() -> subject.validateTokenSymbol( - "12345678901234567890123456789012345678901234567890123456789012345678901234567890")) + "12345678901234567890123456789012345678901234567890123456789012345678901234567890", + tokensConfig)) .isInstanceOf(HandleException.class) .has(responseCode(TOKEN_SYMBOL_TOO_LONG)); } @Test void failsForZeroByteInSymbol() { - final var configuration = new HederaTestConfigBuilder() - .withValue("tokens.maxSymbolUtf8Bytes", "10") - .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - - assertThatThrownBy(() -> subject.validateTokenSymbol("\0")) + assertThatThrownBy(() -> subject.validateTokenSymbol("\0", tokensConfig)) .isInstanceOf(HandleException.class) .has(responseCode(INVALID_ZERO_BYTE_IN_STRING)); } @Test void failsForZeroByteInName() { - final var configuration = new HederaTestConfigBuilder() - .withValue("tokens.maxTokenNameUtf8Bytes", "10") - .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - - assertThatThrownBy(() -> subject.validateTokenName("\0")) + assertThatThrownBy(() -> subject.validateTokenName("\0", tokensConfig)) .isInstanceOf(HandleException.class) .has(responseCode(INVALID_ZERO_BYTE_IN_STRING)); } @@ -124,34 +100,25 @@ void failsForZeroLengthName() { final var configuration = new HederaTestConfigBuilder() .withValue("tokens.maxTokenNameUtf8Bytes", "10") .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + final var tokensConfig = configuration.getConfigData(TokensConfig.class); - assertThatThrownBy(() -> subject.validateTokenName("")) + assertThatThrownBy(() -> subject.validateTokenName("", tokensConfig)) .isInstanceOf(HandleException.class) .has(responseCode(MISSING_TOKEN_NAME)); } @Test void failsForNullName() { - final var configuration = new HederaTestConfigBuilder() - .withValue("tokens.maxTokenNameUtf8Bytes", "10") - .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - - assertThatThrownBy(() -> subject.validateTokenName(null)) + assertThatThrownBy(() -> subject.validateTokenName(null, tokensConfig)) .isInstanceOf(HandleException.class) .has(responseCode(MISSING_TOKEN_NAME)); } @Test void failsForVeryLongName() { - final var configuration = new HederaTestConfigBuilder() - .withValue("tokens.maxTokenNameUtf8Bytes", "10") - .getOrCreateConfig(); - given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); - assertThatThrownBy(() -> subject.validateTokenName( - "12345678901234567890123456789012345678901234567890123456789012345678901234567890")) + "12345678901234567890123456789012345678901234567890123456789012345678901234567890", + tokensConfig)) .isInstanceOf(HandleException.class) .has(responseCode(TOKEN_NAME_TOO_LONG)); } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/token/UniqueTokenManagementSpecs.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/token/UniqueTokenManagementSpecs.java index 07e4d2b75989..0f6c6c3b0351 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/token/UniqueTokenManagementSpecs.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/token/UniqueTokenManagementSpecs.java @@ -17,6 +17,7 @@ package com.hedera.services.bdd.suites.token; import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.onlyDefaultHapiSpec; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getReceipt; @@ -638,7 +639,7 @@ private HapiSpec mintUniqueTokenWorksWithRepeatedMetadata() { } private HapiSpec wipeHappyPath() { - return defaultHapiSpec("WipeHappyPath") + return onlyDefaultHapiSpec("WipeHappyPath") .given( newKeyNamed(SUPPLY_KEY), newKeyNamed(WIPE_KEY), @@ -654,13 +655,15 @@ private HapiSpec wipeHappyPath() { .treasury(TOKEN_TREASURY) .wipeKey(WIPE_KEY), tokenAssociate(ACCOUNT, NFT), + getTokenInfo(NFT).logged(), mintToken(NFT, List.of(ByteString.copyFromUtf8("memo"), ByteString.copyFromUtf8(MEMO_2))), + getTokenInfo(NFT).logged(), cryptoTransfer(movingUnique(NFT, 2L).between(TOKEN_TREASURY, ACCOUNT))) .when(wipeTokenAccount(NFT, ACCOUNT, List.of(2L)).via(WIPE_TXN)) .then( getAccountInfo(ACCOUNT).hasOwnedNfts(0), getAccountInfo(TOKEN_TREASURY).hasOwnedNfts(1), - getTokenInfo(NFT).hasTotalSupply(1), + getTokenInfo(NFT).hasTotalSupply(1).logged(), getTokenNftInfo(NFT, 2).hasCostAnswerPrecheck(INVALID_NFT_ID), getTokenNftInfo(NFT, 1).hasSerialNum(1), wipeTokenAccount(NFT, ACCOUNT, List.of(1L)).hasKnownStatus(ACCOUNT_DOES_NOT_OWN_WIPED_NFT)); From dbf8492cea7e79816a08f94b653e564ac2ed37c2 Mon Sep 17 00:00:00 2001 From: Kim Rader Date: Fri, 16 Jun 2023 07:58:36 -0700 Subject: [PATCH 47/70] TransactionGetFastRecord and CryptoGetStakers handlers (#7150) Signed-off-by: Kim Rader --- .../app/workflows/query/QueryDispatcher.java | 4 +- .../app/workflows/query/QueryHandlers.java | 2 + .../query/QueryWorkflowInjectionModule.java | 1 + .../workflows/query/QueryDispatcherTest.java | 14 ++- .../NetworkAdminServiceInjectionModule.java | 3 + .../impl/handlers/NetworkAdminHandlers.java | 9 ++ ...etworkTransactionGetFastRecordHandler.java | 73 ++++++++++++++++ ...rkTransactionGetFastRecordHandlerTest.java | 86 +++++++++++++++++++ .../handlers/CryptoGetStakersHandler.java | 6 +- .../handlers/TokenGetNftInfosHandler.java | 1 + .../handlers/CryptoGetStakersHandlerTest.java | 85 ++++++++++++++++++ 11 files changed, 278 insertions(+), 6 deletions(-) create mode 100644 hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetFastRecordHandler.java create mode 100644 hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetFastRecordHandlerTest.java create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetStakersHandlerTest.java diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryDispatcher.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryDispatcher.java index 1bbdb0400ee7..c53f2048511a 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryDispatcher.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryDispatcher.java @@ -32,7 +32,6 @@ public class QueryDispatcher { private static final String QUERY_NOT_SET = "Query not set"; - private static final String GET_FAST_RECORD_IS_NOT_SUPPORTED = "TransactionGetFastRecord is not supported"; private final QueryHandlers handlers; @@ -79,6 +78,7 @@ public QueryHandler getHandler(@NonNull final Query query) { case NETWORK_GET_EXECUTION_TIME -> handlers.networkGetExecutionTimeHandler(); case TRANSACTION_GET_RECEIPT -> handlers.networkTransactionGetReceiptHandler(); case TRANSACTION_GET_RECORD -> handlers.networkTransactionGetRecordHandler(); + case TRANSACTION_GET_FAST_RECORD -> handlers.networkTransactionGetFastRecordHandler(); case SCHEDULE_GET_INFO -> handlers.scheduleGetInfoHandler(); @@ -87,8 +87,6 @@ public QueryHandler getHandler(@NonNull final Query query) { case TOKEN_GET_NFT_INFO -> handlers.tokenGetNftInfoHandler(); case TOKEN_GET_NFT_INFOS -> handlers.tokenGetNftInfosHandler(); - case TRANSACTION_GET_FAST_RECORD -> throw new UnsupportedOperationException( - GET_FAST_RECORD_IS_NOT_SUPPORTED); case UNSET -> throw new UnsupportedOperationException(QUERY_NOT_SET); }; } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryHandlers.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryHandlers.java index 5837bdac59ef..07a8165e425f 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryHandlers.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryHandlers.java @@ -28,6 +28,7 @@ import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkGetByKeyHandler; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkGetExecutionTimeHandler; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkGetVersionInfoHandler; +import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkTransactionGetFastRecordHandler; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkTransactionGetReceiptHandler; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkTransactionGetRecordHandler; import com.hedera.node.app.service.schedule.impl.handlers.ScheduleGetInfoHandler; @@ -65,6 +66,7 @@ public record QueryHandlers( @NonNull NetworkGetVersionInfoHandler networkGetVersionInfoHandler, @NonNull NetworkTransactionGetReceiptHandler networkTransactionGetReceiptHandler, @NonNull NetworkTransactionGetRecordHandler networkTransactionGetRecordHandler, + @NonNull NetworkTransactionGetFastRecordHandler networkTransactionGetFastRecordHandler, @NonNull ScheduleGetInfoHandler scheduleGetInfoHandler, @NonNull TokenGetInfoHandler tokenGetInfoHandler, @NonNull TokenGetAccountNftInfosHandler tokenGetAccountNftInfosHandler, diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryWorkflowInjectionModule.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryWorkflowInjectionModule.java index 0eb24f860351..8d46a6c311b4 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryWorkflowInjectionModule.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/query/QueryWorkflowInjectionModule.java @@ -89,6 +89,7 @@ static QueryHandlers provideQueryHandlers( networkHandlers.networkGetVersionInfoHandler(), networkHandlers.networkTransactionGetReceiptHandler(), networkHandlers.networkTransactionGetRecordHandler(), + networkHandlers.networkTransactionGetFastRecordHandler(), scheduleHandlers.scheduleGetInfoHandler(), tokenHandlers.tokenGetInfoHandler(), tokenHandlers.tokenGetAccountNftInfosHandler(), diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryDispatcherTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryDispatcherTest.java index e528e9ad0090..877e30be4d93 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryDispatcherTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryDispatcherTest.java @@ -41,6 +41,7 @@ import com.hedera.hapi.node.token.TokenGetNftInfosQuery; import com.hedera.hapi.node.transaction.GetByKeyQuery; import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.TransactionGetFastRecordQuery; import com.hedera.hapi.node.transaction.TransactionGetReceiptQuery; import com.hedera.hapi.node.transaction.TransactionGetRecordQuery; import com.hedera.node.app.service.consensus.impl.handlers.ConsensusGetTopicInfoHandler; @@ -55,6 +56,7 @@ import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkGetByKeyHandler; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkGetExecutionTimeHandler; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkGetVersionInfoHandler; +import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkTransactionGetFastRecordHandler; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkTransactionGetReceiptHandler; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkTransactionGetRecordHandler; import com.hedera.node.app.service.schedule.impl.handlers.ScheduleGetInfoHandler; @@ -136,6 +138,9 @@ class QueryDispatcherTest { @Mock private NetworkTransactionGetReceiptHandler networkTransactionGetReceiptHandler; + @Mock + private NetworkTransactionGetFastRecordHandler networkTransactionGetFastRecordHandler; + @Mock private NetworkTransactionGetRecordHandler networkTransactionGetRecordHandler; @@ -180,6 +185,7 @@ void setup() { networkGetVersionInfoHandler, networkTransactionGetReceiptHandler, networkTransactionGetRecordHandler, + networkTransactionGetFastRecordHandler, scheduleGetInfoHandler, tokenGetInfoHandler, tokenGetAccountNftInfosHandler, @@ -361,6 +367,12 @@ private static Stream getDispatchParameters() { .transactionGetRecord( TransactionGetRecordQuery.newBuilder().build()) .build(), - (Function) QueryHandlers::networkTransactionGetRecordHandler)); + (Function) QueryHandlers::networkTransactionGetRecordHandler), + Arguments.of( + Query.newBuilder() + .transactionGetFastRecord(TransactionGetFastRecordQuery.newBuilder() + .build()) + .build(), + (Function) QueryHandlers::networkTransactionGetFastRecordHandler)); } } diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/NetworkAdminServiceInjectionModule.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/NetworkAdminServiceInjectionModule.java index 0c5c4cf3cd9d..1a087668c851 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/NetworkAdminServiceInjectionModule.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/NetworkAdminServiceInjectionModule.java @@ -21,6 +21,7 @@ import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkGetByKeyHandler; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkGetExecutionTimeHandler; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkGetVersionInfoHandler; +import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkTransactionGetFastRecordHandler; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkTransactionGetReceiptHandler; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkTransactionGetRecordHandler; import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkUncheckedSubmitHandler; @@ -46,5 +47,7 @@ public interface NetworkAdminServiceInjectionModule { NetworkTransactionGetRecordHandler networkTransactionGetRecordHandler(); + NetworkTransactionGetFastRecordHandler networkTransactionGetFastRecordHandler(); + NetworkUncheckedSubmitHandler networkUncheckedSubmitHandler(); } diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkAdminHandlers.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkAdminHandlers.java index cd4d25a7e3cb..0dee9c70c8bf 100644 --- a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkAdminHandlers.java +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkAdminHandlers.java @@ -42,6 +42,8 @@ public class NetworkAdminHandlers { private final NetworkTransactionGetRecordHandler networkTransactionGetRecordHandler; + private final NetworkTransactionGetFastRecordHandler networkTransactionGetFastRecordHandler; + private final NetworkUncheckedSubmitHandler networkUncheckedSubmitHandler; /** @@ -56,6 +58,7 @@ public NetworkAdminHandlers( @NonNull final NetworkGetVersionInfoHandler networkGetVersionInfoHandler, @NonNull final NetworkTransactionGetReceiptHandler networkTransactionGetReceiptHandler, @NonNull final NetworkTransactionGetRecordHandler networkTransactionGetRecordHandler, + @NonNull final NetworkTransactionGetFastRecordHandler networkTransactionGetFastRecordHandler, @NonNull final NetworkUncheckedSubmitHandler networkUncheckedSubmitHandler) { this.freezeHandler = requireNonNull(freezeHandler, "freezeHandler must not be null"); this.networkGetAccountDetailsHandler = @@ -69,6 +72,8 @@ public NetworkAdminHandlers( networkTransactionGetReceiptHandler, "networkTransactionGetReceiptHandler must not be null"); this.networkTransactionGetRecordHandler = requireNonNull( networkTransactionGetRecordHandler, "networkTransactionGetRecordHandler must not be null"); + this.networkTransactionGetFastRecordHandler = requireNonNull( + networkTransactionGetFastRecordHandler, "networkTransactionGetFastRecordHandler must not be null"); this.networkUncheckedSubmitHandler = requireNonNull(networkUncheckedSubmitHandler, "networkUncheckedSubmitHandler must not be null"); } @@ -109,4 +114,8 @@ public NetworkTransactionGetRecordHandler networkTransactionGetRecordHandler() { public NetworkUncheckedSubmitHandler networkUncheckedSubmitHandler() { return networkUncheckedSubmitHandler; } + + public NetworkTransactionGetFastRecordHandler networkTransactionGetFastRecordHandler() { + return networkTransactionGetFastRecordHandler; + } } diff --git a/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetFastRecordHandler.java b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetFastRecordHandler.java new file mode 100644 index 000000000000..2896bdaf0d73 --- /dev/null +++ b/hedera-node/hedera-network-admin-service-impl/src/main/java/com/hedera/node/app/service/networkadmin/impl/handlers/NetworkTransactionGetFastRecordHandler.java @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.networkadmin.impl.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static java.util.Objects.requireNonNull; + +import com.hedera.hapi.node.base.QueryHeader; +import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.hapi.node.transaction.TransactionGetFastRecordResponse; +import com.hedera.node.app.spi.workflows.FreeQueryHandler; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.spi.workflows.QueryContext; +import edu.umd.cs.findbugs.annotations.NonNull; +import javax.inject.Inject; +import javax.inject.Singleton; + +/** + * This class contains all workflow-related functionality regarding TRANSACTION_GET_FAST_RECORD. + *

+ * This network service call is not supported. Because protobufs promise backwards compatibility, + * we cannot remove it. However, it should not be used. + */ +@Singleton +public class NetworkTransactionGetFastRecordHandler extends FreeQueryHandler { + @Inject + public NetworkTransactionGetFastRecordHandler() { + // Exists for injection + } + + @Override + public QueryHeader extractHeader(@NonNull final Query query) { + requireNonNull(query); + return query.transactionGetFastRecordOrThrow().header(); + } + + @Override + public Response createEmptyResponse(@NonNull final ResponseHeader header) { + requireNonNull(header); + final var response = TransactionGetFastRecordResponse.newBuilder().header(header); + return Response.newBuilder().transactionGetFastRecord(response).build(); + } + + @Override + public void validate(@NonNull final QueryContext context) throws PreCheckException { + requireNonNull(context); + throw new PreCheckException(NOT_SUPPORTED); + } + + @Override + public Response findResponse(@NonNull final QueryContext context, @NonNull final ResponseHeader header) { + // this code should never be executed, as validate() should fail before we get here + requireNonNull(context); + requireNonNull(header); + throw new UnsupportedOperationException(NOT_SUPPORTED.toString()); + } +} diff --git a/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetFastRecordHandlerTest.java b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetFastRecordHandlerTest.java new file mode 100644 index 000000000000..153752ea08de --- /dev/null +++ b/hedera-node/hedera-network-admin-service-impl/src/test/java/com/hedera/node/app/service/networkadmin/impl/test/handlers/NetworkTransactionGetFastRecordHandlerTest.java @@ -0,0 +1,86 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.networkadmin.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.hedera.hapi.node.base.QueryHeader; +import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.hapi.node.transaction.TransactionGetFastRecordQuery; +import com.hedera.hapi.node.transaction.TransactionGetFastRecordResponse; +import com.hedera.node.app.service.networkadmin.impl.handlers.NetworkTransactionGetFastRecordHandler; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.spi.workflows.QueryContext; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class NetworkTransactionGetFastRecordHandlerTest { + @Mock + private QueryContext context; + + private NetworkTransactionGetFastRecordHandler subject; + + @BeforeEach + void setUp() { + subject = new NetworkTransactionGetFastRecordHandler(); + } + + @Test + void extractsHeader() { + final var data = TransactionGetFastRecordQuery.newBuilder() + .header(QueryHeader.newBuilder().build()) + .build(); + final var query = Query.newBuilder().transactionGetFastRecord(data).build(); + final var header = subject.extractHeader(query); + final var op = query.transactionGetFastRecordOrThrow(); + assertThat(op.header()).isEqualTo(header); + } + + @Test + void createsEmptyResponse() { + final var responseHeader = ResponseHeader.newBuilder().build(); + final var response = subject.createEmptyResponse(responseHeader); + final var expectedResponse = Response.newBuilder() + .transactionGetFastRecord( + TransactionGetFastRecordResponse.newBuilder().header(responseHeader)) + .build(); + assertThat(expectedResponse).isEqualTo(response); + } + + @Test + void validateThrowsPreCheck() { + assertThatThrownBy(() -> subject.validate(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void findResponseThrowsUnsupported() { + final var responseHeader = ResponseHeader.newBuilder().build(); + assertThatThrownBy(() -> subject.findResponse(context, responseHeader)) + .isInstanceOf(UnsupportedOperationException.class); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetStakersHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetStakersHandler.java index 29e709bd0b62..7265b216f563 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetStakersHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/CryptoGetStakersHandler.java @@ -16,6 +16,7 @@ package com.hedera.node.app.service.token.impl.handlers; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; import static java.util.Objects.requireNonNull; import com.hedera.hapi.node.base.HederaFunctionality; @@ -58,13 +59,14 @@ public Response createEmptyResponse(@NonNull final ResponseHeader header) { @Override public void validate(@NonNull final QueryContext context) throws PreCheckException { requireNonNull(context); - throw new UnsupportedOperationException("Not implemented"); + throw new PreCheckException(NOT_SUPPORTED); } @Override public Response findResponse(@NonNull final QueryContext context, @NonNull final ResponseHeader header) { + // this code never runs, since validate fails every time requireNonNull(context); requireNonNull(header); - throw new UnsupportedOperationException("Not implemented"); + throw new UnsupportedOperationException(NOT_SUPPORTED.toString()); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfosHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfosHandler.java index da73ec318943..1696c622aef6 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfosHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfosHandler.java @@ -67,6 +67,7 @@ public void validate(@NonNull final QueryContext context) throws PreCheckExcepti @Override public Response findResponse(@NonNull final QueryContext context, @NonNull final ResponseHeader header) { + // this code never runs, since validate fails every time requireNonNull(context); requireNonNull(header); throw new UnsupportedOperationException(NOT_SUPPORTED.toString()); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetStakersHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetStakersHandlerTest.java new file mode 100644 index 000000000000..97b102eaa945 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/CryptoGetStakersHandlerTest.java @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.NOT_SUPPORTED; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.hedera.hapi.node.base.QueryHeader; +import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.token.CryptoGetStakersQuery; +import com.hedera.hapi.node.token.CryptoGetStakersResponse; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.node.app.service.token.impl.handlers.CryptoGetStakersHandler; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.spi.workflows.QueryContext; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class CryptoGetStakersHandlerTest { + @Mock + private QueryContext context; + + private CryptoGetStakersHandler subject; + + @BeforeEach + void setUp() { + subject = new CryptoGetStakersHandler(); + } + + @Test + void extractsHeader() { + final var data = CryptoGetStakersQuery.newBuilder() + .header(QueryHeader.newBuilder().build()) + .build(); + final var query = Query.newBuilder().cryptoGetProxyStakers(data).build(); + final var header = subject.extractHeader(query); + final var op = query.cryptoGetProxyStakersOrThrow(); + assertThat(op.header()).isEqualTo(header); + } + + @Test + void createsEmptyResponse() { + final var responseHeader = ResponseHeader.newBuilder().build(); + final var response = subject.createEmptyResponse(responseHeader); + final var expectedResponse = Response.newBuilder() + .cryptoGetProxyStakers(CryptoGetStakersResponse.newBuilder().header(responseHeader)) + .build(); + assertThat(expectedResponse).isEqualTo(response); + } + + @Test + void validateThrowsPreCheck() { + assertThatThrownBy(() -> subject.validate(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(NOT_SUPPORTED)); + } + + @Test + void findResponseThrowsUnsupported() { + final var responseHeader = ResponseHeader.newBuilder().build(); + assertThatThrownBy(() -> subject.findResponse(context, responseHeader)) + .isInstanceOf(UnsupportedOperationException.class); + } +} From 65beb14af942c3ce6b0caafdd14179965847672d Mon Sep 17 00:00:00 2001 From: Cody Littley <56973212+cody-littley@users.noreply.github.com> Date: Fri, 16 Jun 2023 11:24:40 -0500 Subject: [PATCH 48/70] 06688 pces emergency reconnect3 (#7127) Signed-off-by: Cody Littley --- .../java/com/swirlds/platform/Browser.java | 23 +++++++- .../com/swirlds/platform/SwirldsPlatform.java | 33 ++++++----- .../PreconsensusEventFileManager.java | 14 +++++ .../recovery/EmergencyRecoveryManager.java | 9 +++ .../emergencyfile/EmergencyRecoveryFile.java | 2 +- .../PreconsensusEventFileManagerTests.java | 57 +++++++++++++++++++ 6 files changed, 121 insertions(+), 17 deletions(-) diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java index 2f2050b70796..96531b0cdc91 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java @@ -54,6 +54,7 @@ import com.swirlds.common.internal.ApplicationDefinition; import com.swirlds.common.io.config.RecycleBinConfig; import com.swirlds.common.io.config.TemporaryFileConfig; +import com.swirlds.common.io.utility.RecycleBin; import com.swirlds.common.merkle.synchronization.config.ReconnectConfig; import com.swirlds.common.metrics.Metrics; import com.swirlds.common.metrics.MetricsProvider; @@ -119,6 +120,7 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.io.UncheckedIOException; import java.net.SocketException; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; @@ -649,6 +651,14 @@ private Collection createLocalPlatforms( // the name of this swirld final String swirldName = appDefinition.getSwirldName(); final SoftwareVersion appVersion = appMain.getSoftwareVersion(); + + final RecycleBin recycleBin; + try { + recycleBin = RecycleBin.create(configuration, nodeId); + } catch (IOException e) { + throw new UncheckedIOException("unable to create recycle bin", e); + } + // We can't send a "real" dispatch, since the dispatcher will not have been started by the // time this class is used. final EmergencyRecoveryManager emergencyRecoveryManager = new EmergencyRecoveryManager( @@ -667,6 +677,15 @@ private Collection createLocalPlatforms( final boolean softwareUpgrade = BootstrapUtils.detectSoftwareUpgrade(appVersion, loadedSignedState.getNullable()); + if (softwareUpgrade) { + try { + logger.info(STARTUP.getMarker(), "Clearing recycle bin as part of software upgrade workflow."); + recycleBin.clear(); + } catch (final IOException e) { + throw new UncheckedIOException("Failed to clear recycle bin", e); + } + } + // Initialize the address book from the configuration and platform saved state. final AddressBookInitializer addressBookInitializer = new AddressBookInitializer( appVersion, @@ -685,6 +704,7 @@ private Collection createLocalPlatforms( final SwirldsPlatform platform = new SwirldsPlatform( platformContext, crypto.get(nodeId), + recycleBin, initialAddressBook, nodeId, mainClassName, @@ -692,8 +712,7 @@ private Collection createLocalPlatforms( appVersion, appMain::newState, loadedSignedState, - emergencyRecoveryManager, - softwareUpgrade); + emergencyRecoveryManager); platforms.add(platform); new InfoMember(infoSwirld, instanceNumber, platform); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java index 25df5296b3d3..488ef58c9bb1 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SwirldsPlatform.java @@ -290,6 +290,7 @@ public class SwirldsPlatform implements Platform, Startable { * @param platformContext the context for this platform * @param crypto an object holding all the public/private key pairs and the CSPRNG state for this * member + * @param recycleBin used to delete files that may be useful for later debugging * @param initialAddressBook the address book listing all members in the community * @param id the ID number for this member (if this computer has multiple members in one * swirld) @@ -299,11 +300,11 @@ public class SwirldsPlatform implements Platform, Startable { * @param genesisStateBuilder used to construct a genesis state if no suitable state from disk can be found * @param loadedSignedState used to initialize the loaded state * @param emergencyRecoveryManager used in emergency recovery. - * @param softwareUpgrade if true this is a software upgrade, if false then this is just a restart */ SwirldsPlatform( @NonNull final PlatformContext platformContext, @NonNull final Crypto crypto, + @NonNull final RecycleBin recycleBin, @NonNull final AddressBook initialAddressBook, @NonNull final NodeId id, @NonNull final String mainClassName, @@ -311,8 +312,7 @@ public class SwirldsPlatform implements Platform, Startable { @NonNull final SoftwareVersion appVersion, @NonNull final Supplier genesisStateBuilder, @NonNull final ReservedSignedState loadedSignedState, - @NonNull final EmergencyRecoveryManager emergencyRecoveryManager, - final boolean softwareUpgrade) { + @NonNull final EmergencyRecoveryManager emergencyRecoveryManager) { this.platformContext = Objects.requireNonNull(platformContext, "platformContext"); final Time time = Time.getCurrent(); @@ -350,14 +350,7 @@ public class SwirldsPlatform implements Platform, Startable { registerAddressBookMetrics(metrics, initialAddressBook, selfId); - try { - recycleBin = RecycleBin.create(platformContext.getConfiguration(), selfId); - if (softwareUpgrade) { - recycleBin.clear(); - } - } catch (final IOException e) { - throw new UncheckedIOException("Failed to initialize recycle bin", e); - } + this.recycleBin = Objects.requireNonNull(recycleBin); this.consensusMetrics = new ConsensusMetricsImpl(this.selfId, metrics); @@ -380,7 +373,7 @@ public class SwirldsPlatform implements Platform, Startable { final AppCommunicationComponent appCommunicationComponent = wiring.wireAppCommunicationComponent(notificationEngine); - preconsensusEventFileManager = buildPreconsensusEventFileManager(); + preconsensusEventFileManager = buildPreconsensusEventFileManager(emergencyRecoveryManager); preconsensusEventWriter = components.add(buildPreconsensusEventWriter(preconsensusEventFileManager)); stateManagementComponent = wiring.wireStateManagementComponent( @@ -993,9 +986,21 @@ private EventLinker buildEventLinker(@NonNull final List 0) { + final PreconsensusEventFile file = files.removeLast(); + file.deleteFile(databaseDirectory, recycleBin); + } + totalFileByteCount = 0; + updateFileSizeMetrics(); + } + /** * Update metrics with the latest data on file size. */ diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EmergencyRecoveryManager.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EmergencyRecoveryManager.java index 4dcf44c1a632..401d4858cd05 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EmergencyRecoveryManager.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/EmergencyRecoveryManager.java @@ -48,6 +48,15 @@ public EmergencyRecoveryManager( emergencyStateRequired = emergencyRecoveryFile != null; } + /** + * Returns whether an emergency recovery file was present at node boot time. + * + * @return {@code true} if an emergency recovery file was present, {@code false} otherwise + */ + public boolean isEmergencyRecoveryFilePresent() { + return emergencyRecoveryFile != null; + } + /** * Returns whether an emergency state is required to start the node. The state can be loaded from disk or acquired * via an emergency reconnect. diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/EmergencyRecoveryFile.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/EmergencyRecoveryFile.java index b99c5da5a68d..420db4e972b6 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/EmergencyRecoveryFile.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/recovery/emergencyfile/EmergencyRecoveryFile.java @@ -34,7 +34,7 @@ */ public record EmergencyRecoveryFile(@NonNull Recovery recovery) { private static final String OUTPUT_FILENAME = "emergencyRecovery.yaml"; - private static final String INPUT_FILENAME = + public static final String INPUT_FILENAME = ConfigurationHolder.getConfigData(StateConfig.class).emergencyStateFileName(); /** diff --git a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java index df6760324eab..d48e31489f6e 100644 --- a/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java +++ b/platform-sdk/swirlds-unit-tests/core/swirlds-platform-test/src/test/java/com/swirlds/platform/test/event/preconsensus/PreconsensusEventFileManagerTests.java @@ -1203,4 +1203,61 @@ void startAfterDiscontinuityInMiddleTest() throws IOException { // Iterating again without fixing discontinuities should still work. assertIteratorEquality(files.iterator(), manager.getFileIterator(startGeneration, false)); } + + @Test + @DisplayName("clear() Test") + void clearTest() throws IOException { + final Random random = getRandomPrintSeed(); + + final int fileCount = 100; + + final List files = new ArrayList<>(); + + // Intentionally pick values close to wrapping around the 3 digit to 4 digit sequence number. + // This will cause the files not to line up alphabetically, and this is a scenario that the + // code should be able to handle. + final long firstSequenceNumber = random.nextLong(950, 1000); + + final long maxDelta = random.nextLong(10, 20); + long minimumGeneration = random.nextLong(0, 1000); + final long nonExistentGeneration = minimumGeneration - 1; + long maximumGeneration = random.nextLong(minimumGeneration, minimumGeneration + maxDelta); + Instant timestamp = Instant.now(); + + for (long sequenceNumber = firstSequenceNumber; + sequenceNumber < firstSequenceNumber + fileCount; + sequenceNumber++) { + + final PreconsensusEventFile file = PreconsensusEventFile.of( + sequenceNumber, minimumGeneration, maximumGeneration, timestamp, fileDirectory, false); + + minimumGeneration = random.nextLong(minimumGeneration, maximumGeneration + 1); + maximumGeneration = + Math.max(maximumGeneration, random.nextLong(minimumGeneration, minimumGeneration + maxDelta)); + timestamp = timestamp.plusMillis(random.nextInt(1, 100_000)); + + files.add(file); + createDummyFile(file); + } + + final PlatformContext platformContext = buildContext(); + + final PreconsensusEventFileManager manager = new PreconsensusEventFileManager( + platformContext, Time.getCurrent(), TestRecycleBin.getInstance(), new NodeId(0)); + + assertIteratorEquality( + files.iterator(), manager.getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false)); + + assertIteratorEquality( + files.iterator(), manager.getFileIterator(files.get(0).getMaximumGeneration(), false)); + + // attempt to start a non-existent generation + assertIteratorEquality(files.iterator(), manager.getFileIterator(nonExistentGeneration, false)); + + manager.clear(); + + assertIteratorEquality( + Collections.emptyIterator(), + manager.getFileIterator(PreconsensusEventFileManager.NO_MINIMUM_GENERATION, false)); + } } From 3dfd36b88442ec299abb61a4eb7e6ce667d2f5e1 Mon Sep 17 00:00:00 2001 From: Iris Simon <122310714+iwsimon@users.noreply.github.com> Date: Fri, 16 Jun 2023 12:24:51 -0400 Subject: [PATCH 49/70] Implement TokenGetNftInfoHandler (#7149) Signed-off-by: Iris Simon --- .../impl/handlers/TokenGetNftInfoHandler.java | 79 ++++- .../handlers/TokenGetNftInfoHandlerTest.java | 305 ++++++++++++++++++ 2 files changed, 382 insertions(+), 2 deletions(-) create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetNftInfoHandlerTest.java diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfoHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfoHandler.java index 575d10771822..86eed7d34ce2 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfoHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenGetNftInfoHandler.java @@ -16,18 +16,31 @@ package com.hedera.node.app.service.token.impl.handlers; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_NFT_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_NFT_SERIAL_NUMBER; +import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; +import static com.hedera.hapi.node.base.ResponseType.COST_ANSWER; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck; import static java.util.Objects.requireNonNull; +import com.hedera.hapi.node.base.AccountID; import com.hedera.hapi.node.base.HederaFunctionality; import com.hedera.hapi.node.base.QueryHeader; import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.token.NftID; import com.hedera.hapi.node.token.TokenGetNftInfoResponse; +import com.hedera.hapi.node.token.TokenNftInfo; import com.hedera.hapi.node.transaction.Query; import com.hedera.hapi.node.transaction.Response; +import com.hedera.node.app.service.token.ReadableNftStore; import com.hedera.node.app.spi.workflows.PaidQueryHandler; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.QueryContext; +import com.hedera.node.config.data.LedgerConfig; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Optional; import javax.inject.Inject; import javax.inject.Singleton; @@ -58,13 +71,75 @@ public Response createEmptyResponse(@NonNull final ResponseHeader header) { @Override public void validate(@NonNull final QueryContext context) throws PreCheckException { requireNonNull(context); - throw new UnsupportedOperationException("Not implemented"); + final var query = context.query(); + final var nftStore = context.createStore(ReadableNftStore.class); + final var op = query.tokenGetNftInfoOrThrow(); + final var nftId = op.nftIDOrThrow(); + validateTruePreCheck(nftId.hasTokenID(), INVALID_TOKEN_ID); + validateTruePreCheck(nftId.serialNumber() > 0, INVALID_TOKEN_NFT_SERIAL_NUMBER); + + final var nft = nftStore.get(nftId.tokenID(), nftId.serialNumber()); + validateFalsePreCheck(nft == null, INVALID_NFT_ID); } @Override public Response findResponse(@NonNull final QueryContext context, @NonNull final ResponseHeader header) { requireNonNull(context); requireNonNull(header); - throw new UnsupportedOperationException("Not implemented"); + final var query = context.query(); + final var config = context.configuration().getConfigData(LedgerConfig.class); + final var nftStore = context.createStore(ReadableNftStore.class); + final var op = query.tokenGetNftInfoOrThrow(); + final var response = TokenGetNftInfoResponse.newBuilder(); + final var nftId = op.nftIDOrElse(NftID.DEFAULT); + + final var responseType = op.headerOrElse(QueryHeader.DEFAULT).responseType(); + response.header(header); + if (header.nodeTransactionPrecheckCode() == OK && responseType != COST_ANSWER) { + final var optionalInfo = infoForNft(nftId, nftStore, config); + if (optionalInfo.isPresent()) { + response.nft(optionalInfo.get()); + } else { + response.header(ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(INVALID_NFT_ID) + .cost(0)); // from mono service, need to validate in the future + } + } + + return Response.newBuilder().tokenGetNftInfo(response).build(); + } + /** + * Returns the {@link TokenNftInfo} for the given {@link NftID} if it exists. + * + * @param nftId + * the {@link NftID} to get the {@link TokenNftInfo} for + * @param readableNftStore + * the {@link ReadableNftStore} to get the {@link TokenNftInfo} from + * @param config + * the {@link LedgerConfig} to get the ledger ID from + * @return the {@link TokenNftInfo} for the given {@link NftID} if it exists + */ + private Optional infoForNft( + @NonNull final NftID nftId, + @NonNull final ReadableNftStore readableNftStore, + @NonNull final LedgerConfig config) { + requireNonNull(nftId); + requireNonNull(readableNftStore); + requireNonNull(config); + + final var nft = readableNftStore.get(nftId.tokenID(), nftId.serialNumber()); + if (nft == null) { + return Optional.empty(); + } else { + final var info = TokenNftInfo.newBuilder() + .ledgerId(config.id()) + .nftID(nftId) + .accountID(AccountID.newBuilder().accountNum(nft.ownerNumber())) + .creationTime(nft.mintTime()) + .metadata(nft.metadata()) + .spenderId(AccountID.newBuilder().accountNum(nft.spenderNumber())) + .build(); + return Optional.of(info); + } } } diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetNftInfoHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetNftInfoHandlerTest.java new file mode 100644 index 000000000000..c263680481ba --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenGetNftInfoHandlerTest.java @@ -0,0 +1,305 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseType.ANSWER_ONLY; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static org.assertj.core.api.Assertions.assertThatCode; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mock.Strictness.LENIENT; +import static org.mockito.Mockito.when; + +import com.hedera.hapi.node.base.QueryHeader; +import com.hedera.hapi.node.base.ResponseCodeEnum; +import com.hedera.hapi.node.base.ResponseHeader; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.state.common.UniqueTokenId; +import com.hedera.hapi.node.state.token.Nft; +import com.hedera.hapi.node.token.NftID; +import com.hedera.hapi.node.token.TokenGetNftInfoQuery; +import com.hedera.hapi.node.token.TokenGetNftInfoResponse; +import com.hedera.hapi.node.token.TokenNftInfo; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.node.app.service.token.ReadableNftStore; +import com.hedera.node.app.service.token.impl.ReadableNftStoreImpl; +import com.hedera.node.app.service.token.impl.handlers.TokenGetNftInfoHandler; +import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoTokenHandlerTestBase; +import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; +import com.hedera.node.app.spi.workflows.PreCheckException; +import com.hedera.node.app.spi.workflows.QueryContext; +import com.hedera.node.config.converter.BytesConverter; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class TokenGetNftInfoHandlerTest extends CryptoTokenHandlerTestBase { + + @Mock(strictness = LENIENT) + private QueryContext context; + + private TokenGetNftInfoHandler subject; + + @BeforeEach + public void setUp() { + super.setUp(); + subject = new TokenGetNftInfoHandler(); + } + + @Test + void extractsHeader() { + final var query = createTokenGetNftInfoQuery(uniqueTokenIdSl1); + final var header = subject.extractHeader(query); + final var op = query.tokenGetNftInfoOrThrow(); + assertEquals(op.header(), header); + } + + @Test + void createsEmptyResponse() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.FAIL_FEE) + .build(); + final var response = subject.createEmptyResponse(responseHeader); + final var expectedResponse = Response.newBuilder() + .tokenGetNftInfo(TokenGetNftInfoResponse.newBuilder().header(responseHeader)) + .build(); + assertEquals(expectedResponse, response); + } + + @Test + void validatesQueryWhenValidNft() { + final var query = createTokenGetNftInfoQuery(uniqueTokenIdSl1); + given(context.query()).willReturn(query); + given(context.createStore(ReadableNftStore.class)).willReturn(readableNftStore); + + assertThatCode(() -> subject.validate(context)).doesNotThrowAnyException(); + } + + @Test + void validatesQueryIfInvalidNft() { + final var state = MapReadableKVState.builder(NFTS).build(); + given(readableStates.get(NFTS)).willReturn(state); + final var store = new ReadableNftStoreImpl(readableStates); + + final var query = createTokenGetNftInfoQuery(uniqueTokenIdSl1); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableNftStore.class)).thenReturn(store); + + assertThatThrownBy(() -> subject.validate(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_NFT_ID)); + } + + @Test + void validatesQueryIfInvalidNftTokenId() { + final var query = createTokenGetNftInfoQueryInvalidTokenId(uniqueTokenIdSl1); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableNftStore.class)).thenReturn(readableNftStore); + + assertThatThrownBy(() -> subject.validate(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_TOKEN_ID)); + } + + @Test + void validatesQueryIfInvalidNftSerialNumb() { + final var query = createTokenGetNftInfoQueryInvalidSerialNum(uniqueTokenIdSl1); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableNftStore.class)).thenReturn(readableNftStore); + + assertThatThrownBy(() -> subject.validate(context)) + .isInstanceOf(PreCheckException.class) + .has(responseCode(ResponseCodeEnum.INVALID_TOKEN_NFT_SERIAL_NUMBER)); + } + + @Test + void validatesQueryIfInvalidNftInTrans() { + final var state = MapReadableKVState.builder(NFTS).build(); + given(readableStates.get(NFTS)).willReturn(state); + final var store = new ReadableNftStoreImpl(readableStates); + + final var query = createEmptyTokenGetNftInfoQuery(); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableNftStore.class)).thenReturn(store); + + assertThrows(NullPointerException.class, () -> subject.validate(context)); + } + + @Test + void getsResponseIfFailedResponse() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.FAIL_FEE) + .build(); + + final var query = createTokenGetNftInfoQuery(uniqueTokenIdSl1); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableNftStore.class)).thenReturn(readableNftStore); + + final var config = new HederaTestConfigBuilder() + .withValue("tokens.maxRelsPerInfoQuery", 1000) + .getOrCreateConfig(); + given(context.configuration()).willReturn(config); + + final var response = subject.findResponse(context, responseHeader); + final var op = response.tokenGetNftInfoOrThrow(); + assertEquals(ResponseCodeEnum.FAIL_FEE, op.header().nodeTransactionPrecheckCode()); + } + + @Test + void getsResponseIfInvalidNft() { + final var state = MapReadableKVState.builder(NFTS).build(); + given(readableStates.get(NFTS)).willReturn(state); + final var store = new ReadableNftStoreImpl(readableStates); + + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.OK) + .build(); + + final var query = createTokenGetNftInfoQuery(uniqueTokenIdSl1); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableNftStore.class)).thenReturn(store); + + final var config = new HederaTestConfigBuilder() + .withValue("tokens.maxRelsPerInfoQuery", 1000) + .getOrCreateConfig(); + given(context.configuration()).willReturn(config); + + final var response = subject.findResponse(context, responseHeader); + final var op = response.tokenGetNftInfoOrThrow(); + assertNull(op.nft()); + assertEquals(ResponseCodeEnum.INVALID_NFT_ID, op.header().nodeTransactionPrecheckCode()); + } + + @Test + void getsResponseIfOkResponse() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.OK) + .build(); + final var expectedInfo = getExpectedInfo(); + + nftSl1 = nftSl1.copyBuilder() + .spenderNumber(spenderId.accountNum()) + .mintTime(consensusTimestamp) + .metadata(Bytes.wrap(evmAddress)) + .build(); + final var state = MapReadableKVState.builder(NFTS) + .value(uniqueTokenIdSl1, nftSl1) + .build(); + given(readableStates.get(NFTS)).willReturn(state); + final var store = new ReadableNftStoreImpl(readableStates); + + checkResponse(responseHeader, expectedInfo, store); + } + + @Test + void getsResponseIfOkWithAnswerOnlyHead() { + final var responseHeader = ResponseHeader.newBuilder() + .nodeTransactionPrecheckCode(ResponseCodeEnum.OK) + .responseType(ANSWER_ONLY) + .build(); + final var expectedInfo = getExpectedInfo(); + nftSl1 = nftSl1.copyBuilder() + .spenderNumber(spenderId.accountNum()) + .mintTime(consensusTimestamp) + .metadata(Bytes.wrap(evmAddress)) + .build(); + final var state = MapReadableKVState.builder(NFTS) + .value(uniqueTokenIdSl1, nftSl1) + .build(); + given(readableStates.get(NFTS)).willReturn(state); + final var store = new ReadableNftStoreImpl(readableStates); + + checkResponse(responseHeader, expectedInfo, store); + } + + private void checkResponse( + final ResponseHeader responseHeader, final TokenNftInfo expectedInfo, ReadableNftStore ReadableNftStore) { + final var query = createTokenGetNftInfoQuery(uniqueTokenIdSl1); + when(context.query()).thenReturn(query); + when(context.createStore(ReadableNftStore.class)).thenReturn(ReadableNftStore); + + final var config = + new HederaTestConfigBuilder().withValue("ledger.id", "0x03").getOrCreateConfig(); + given(context.configuration()).willReturn(config); + + final var response = subject.findResponse(context, responseHeader); + final var nftInfoResponse = response.tokenGetNftInfoOrThrow(); + assertEquals(ResponseCodeEnum.OK, nftInfoResponse.header().nodeTransactionPrecheckCode()); + assertEquals(expectedInfo, nftInfoResponse.nft()); + } + + private TokenNftInfo getExpectedInfo() { + return TokenNftInfo.newBuilder() + .ledgerId(new BytesConverter().convert("0x03")) + .nftID(NftID.newBuilder() + .tokenID(TokenID.newBuilder().tokenNum(uniqueTokenIdSl1.tokenTypeNumber())) + .serialNumber(uniqueTokenIdSl1.serialNumber())) + .accountID(ownerId) + .creationTime(consensusTimestamp) + .metadata(Bytes.wrap(evmAddress)) + .spenderId(spenderId) + .build(); + } + + private Query createTokenGetNftInfoQuery(final UniqueTokenId uniqueTokenId) { + final var data = TokenGetNftInfoQuery.newBuilder() + .nftID(NftID.newBuilder() + .tokenID(TokenID.newBuilder().tokenNum(uniqueTokenId.tokenTypeNumber())) + .serialNumber(uniqueTokenId.serialNumber())) + .header(QueryHeader.newBuilder().build()) + .build(); + + return Query.newBuilder().tokenGetNftInfo(data).build(); + } + + private Query createTokenGetNftInfoQueryInvalidTokenId(final UniqueTokenId uniqueTokenId) { + final var data = TokenGetNftInfoQuery.newBuilder() + .nftID(NftID.newBuilder().serialNumber(uniqueTokenId.serialNumber())) + .header(QueryHeader.newBuilder().build()) + .build(); + + return Query.newBuilder().tokenGetNftInfo(data).build(); + } + + private Query createTokenGetNftInfoQueryInvalidSerialNum(final UniqueTokenId uniqueTokenId) { + final var data = TokenGetNftInfoQuery.newBuilder() + .nftID(NftID.newBuilder() + .tokenID(TokenID.newBuilder().tokenNum(uniqueTokenId.tokenTypeNumber())) + .serialNumber(-1L)) + .header(QueryHeader.newBuilder().build()) + .build(); + + return Query.newBuilder().tokenGetNftInfo(data).build(); + } + + private Query createEmptyTokenGetNftInfoQuery() { + final var data = TokenGetNftInfoQuery.newBuilder() + .header(QueryHeader.newBuilder().build()) + .build(); + + return Query.newBuilder().tokenGetNftInfo(data).build(); + } +} From 0acfc23fbefb538a2fa0b4d44f72465ca9416193 Mon Sep 17 00:00:00 2001 From: artemananiev <33361937+artemananiev@users.noreply.github.com> Date: Fri, 16 Jun 2023 10:20:15 -0700 Subject: [PATCH 50/70] 7087: ISS in account store after migration to disk (#7134) Fixes: https://github.com/hashgraph/hedera-services/pull/7134 Reviewed-by: Michael Tinker , Oleg Mazurov Signed-off-by: Artem Ananev --- .../service/mono/state/virtual/entities/OnDiskAccount.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/virtual/entities/OnDiskAccount.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/virtual/entities/OnDiskAccount.java index 83f70f12425b..5f1c84eb7bfe 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/virtual/entities/OnDiskAccount.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/virtual/entities/OnDiskAccount.java @@ -110,6 +110,11 @@ public OnDiskAccount(final OnDiskAccount that) { this.hbarAllowances = that.hbarAllowances; this.fungibleAllowances = that.fungibleAllowances; this.nftOperatorApprovals = that.nftOperatorApprovals; + this.firstStorageKeyNonZeroBytes = that.firstStorageKeyNonZeroBytes; + if (that.firstStorageKey != null) { + this.firstStorageKey = new int[that.firstStorageKey.length]; + System.arraycopy(that.firstStorageKey, 0, this.firstStorageKey, 0, that.firstStorageKey.length); + } System.arraycopy(that.ints, 0, this.ints, 0, IntValues.COUNT); System.arraycopy(that.longs, 0, this.longs, 0, LongValues.COUNT); } From e9a4f7d26cf3b1fc3aedd53e40de2a89b45eb73e Mon Sep 17 00:00:00 2001 From: Joseph Sinclair <121976561+jsync-swirlds@users.noreply.github.com> Date: Fri, 16 Jun 2023 11:53:12 -0700 Subject: [PATCH 51/70] Change `JKey` methods to throw `InvalidKeyException` and add `equals` and `hashCode` (#7143) Signed-off-by: Joseph Sinclair --- .../handle/AdaptedMonoProcessLogic.java | 4 +- .../codec/FileServiceStateTranslator.java | 6 +- .../codec/FileServiceStateTranslatorTest.java | 14 +- .../hedera/node/app/service/mono/Utils.java | 7 +- .../txns/UpdateTopicResourceUsage.java | 4 +- .../mono/ledger/accounts/AliasManager.java | 8 +- .../service/mono/legacy/core/jproto/JKey.java | 98 +++++---- .../app/service/mono/pbj/PbjConverter.java | 4 +- .../precompile/codec/TokenCreateWrapper.java | 4 +- .../impl/TokenCreatePrecompile.java | 8 +- .../consensus/TopicUpdateTransitionLogic.java | 8 +- .../crypto/CryptoUpdateTransitionLogic.java | 4 +- .../txns/file/FileUpdateTransitionLogic.java | 4 +- .../validation/ContextOptionValidator.java | 8 +- .../mono/txns/validation/PureValidation.java | 4 +- .../app/service/mono/utils/MiscUtils.java | 8 +- .../context/primitives/StateViewTest.java | 4 +- .../fees/calculation/FeeCalcUtilsTest.java | 3 +- .../UpdateMerkleTopicResourceUsageTest.java | 8 +- .../ledger/accounts/AliasManagerTest.java | 12 +- .../properties/AccountPropertyTest.java | 3 +- .../mono/legacy/core/jproto/JKeyListTest.java | 5 +- .../core/jproto/JKeySerializerTest.java | 6 +- .../mono/legacy/core/jproto/JKeyTest.java | 12 +- .../legacy/core/jproto/JThresholdKeyTest.java | 7 +- .../JKeyAdditionalTypeSupportTest.java | 13 +- .../BackedSystemAccountsCreatorTest.java | 4 +- .../BlocklistAccountCreatorTest.java | 4 +- .../HfsSystemFilesManagerTest.java | 4 +- .../MigrationRecordsManagerTest.java | 10 +- .../contracts/StaticEntityAccessTest.java | 4 +- .../store/contracts/WorldLedgersTest.java | 6 +- .../precompile/SyntheticTxnFactoryTest.java | 4 +- .../codec/TokenCreateWrapperTest.java | 4 +- .../MerkleTopicUpdateTransitionLogicTest.java | 116 +++++----- .../txns/crypto/AutoCreationLogicTest.java | 10 +- .../CryptoCreateTransitionLogicTest.java | 14 +- .../file/FileUpdateTransitionLogicTest.java | 4 +- .../app/service/mono/utils/MiscUtilsTest.java | 4 +- .../accounts/MerkleAccountFactory.java | 5 +- .../hedera/test/factories/keys/KeyTree.java | 4 +- .../scenarios/BadPayerScenarios.java | 16 +- .../ConsensusCreateTopicScenarios.java | 10 +- .../ConsensusDeleteTopicScenarios.java | 23 +- .../ConsensusSubmitMessageScenarios.java | 21 +- .../ConsensusUpdateTopicScenarios.java | 20 +- .../scenarios/ContractCreateScenarios.java | 40 ++-- .../scenarios/ContractDeleteScenarios.java | 23 +- .../scenarios/ContractUpdateScenarios.java | 20 +- .../scenarios/CryptoAllowanceScenarios.java | 22 +- .../scenarios/CryptoCreateScenarios.java | 18 +- .../scenarios/CryptoDeleteScenarios.java | 18 +- .../scenarios/CryptoTransferScenarios.java | 78 +++---- .../scenarios/CryptoUpdateScenarios.java | 30 +-- .../scenarios/FileAppendScenarios.java | 12 +- .../scenarios/FileCreateScenarios.java | 14 +- .../scenarios/FileDeleteScenarios.java | 27 ++- .../scenarios/FileUpdateScenarios.java | 14 +- .../scenarios/ScheduleCreateScenarios.java | 208 +++++++++++------- .../scenarios/ScheduleDeleteScenarios.java | 31 ++- .../scenarios/ScheduleSignScenarios.java | 28 ++- .../scenarios/SystemDeleteScenarios.java | 40 ++-- .../scenarios/SystemUndeleteScenarios.java | 16 +- .../scenarios/TokenAssociateScenarios.java | 44 ++-- .../scenarios/TokenBurnScenarios.java | 16 +- .../scenarios/TokenCreateScenarios.java | 54 ++--- .../scenarios/TokenDeleteScenarios.java | 34 ++- .../scenarios/TokenDissociateScenarios.java | 35 ++- .../TokenFeeScheduleUpdateScenarios.java | 36 ++- .../scenarios/TokenFreezeScenarios.java | 19 +- .../scenarios/TokenKycGrantScenarios.java | 13 +- .../scenarios/TokenKycRevokeScenarios.java | 33 ++- .../scenarios/TokenMintScenarios.java | 28 ++- .../scenarios/TokenPauseScenarios.java | 13 +- .../scenarios/TokenUnfreezeScenarios.java | 25 ++- .../scenarios/TokenUnpauseScenarios.java | 13 +- .../scenarios/TokenUpdateScenarios.java | 115 ++++++---- .../scenarios/TokenWipeScenarios.java | 28 ++- .../scenarios/TxnHandlingScenario.java | 16 +- .../test/factories/topics/TopicFactory.java | 3 +- .../ScheduleDeleteHandlerParityTest.java | 4 +- .../handlers/ScheduleDeleteHandlerTest.java | 4 +- 82 files changed, 1039 insertions(+), 686 deletions(-) diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/AdaptedMonoProcessLogic.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/AdaptedMonoProcessLogic.java index ee1c45c6b0de..9e0634baec60 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/AdaptedMonoProcessLogic.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/AdaptedMonoProcessLogic.java @@ -38,13 +38,13 @@ import com.swirlds.common.system.transaction.ConsensusTransaction; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.security.InvalidKeyException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import javax.inject.Inject; import javax.inject.Singleton; -import org.apache.commons.codec.DecoderException; @Singleton public class AdaptedMonoProcessLogic implements ProcessLogic { @@ -120,7 +120,7 @@ private List extract(@Nullable final Collection asHederaKey(final Key key) { return Optional.empty(); } return Optional.of(fcKey); - } catch (DecoderException ignore) { + } catch (InvalidKeyException ignore) { return Optional.empty(); } } @@ -61,10 +61,11 @@ public static Optional asHederaKey(@Nullable final com.hedera.hapi.no return Optional.empty(); } return Optional.of(fcKey); - } catch (DecoderException ignore) { + } catch (InvalidKeyException ignore) { return Optional.empty(); } } + /** * Converts a set of {@link com.hedera.hapi.node.base.Key} to a set of {@link HederaKey} * diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/fees/calculation/consensus/txns/UpdateTopicResourceUsage.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/fees/calculation/consensus/txns/UpdateTopicResourceUsage.java index d193336d926b..a672c28d06f0 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/fees/calculation/consensus/txns/UpdateTopicResourceUsage.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/fees/calculation/consensus/txns/UpdateTopicResourceUsage.java @@ -31,9 +31,9 @@ import com.hederahashgraph.api.proto.java.TransactionBody; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.security.InvalidKeyException; import javax.inject.Inject; import javax.inject.Singleton; -import org.apache.commons.codec.DecoderException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -86,7 +86,7 @@ public FeeData usageGivenExplicit( merkleTopic.hasAutoRenewAccountId(), expiry, txnBody.getConsensusUpdateTopic()); - } catch (final DecoderException illegal) { + } catch (final InvalidKeyException illegal) { log.warn("Usage estimation unexpectedly failed for {}!", txnBody, illegal); throw new InvalidTxBodyException(illegal); } diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/ledger/accounts/AliasManager.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/ledger/accounts/AliasManager.java index d71f9a3cd089..ecdc8f764009 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/ledger/accounts/AliasManager.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/ledger/accounts/AliasManager.java @@ -37,6 +37,7 @@ import com.hederahashgraph.api.proto.java.Key; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.security.InvalidKeyException; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; @@ -45,7 +46,6 @@ import java.util.function.UnaryOperator; import javax.inject.Inject; import javax.inject.Singleton; -import org.apache.commons.codec.DecoderException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.hyperledger.besu.datatypes.Address; @@ -183,7 +183,7 @@ public void rebuildAliasesMap( numEOAliases.incrementAndGet(); } } catch (final InvalidProtocolBufferException - | DecoderException + | InvalidKeyException | IllegalArgumentException ignore) { // any expected exception means no eth mapping } @@ -259,7 +259,7 @@ public static byte[] keyAliasToEVMAddress(final ByteString alias) { final Key key = Key.parseFrom(alias); final JKey jKey = JKey.mapKey(key); return tryAddressRecovery(jKey, ADDRESS_RECOVERY_FN); - } catch (InvalidProtocolBufferException | DecoderException | IllegalArgumentException ignore) { + } catch (InvalidProtocolBufferException | InvalidKeyException | IllegalArgumentException ignore) { // any expected exception means no eth mapping return null; } @@ -274,7 +274,7 @@ public static byte[] keyAliasToEVMAddress(@NonNull final Bytes alias) { final Key key = Key.parseFrom(ret); final JKey jKey = JKey.mapKey(key); return tryAddressRecovery(jKey, ADDRESS_RECOVERY_FN); - } catch (InvalidProtocolBufferException | DecoderException | IllegalArgumentException ignore) { + } catch (InvalidProtocolBufferException | InvalidKeyException | IllegalArgumentException ignore) { // any expected exception means no eth mapping return null; } diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKey.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKey.java index 3e79526aa8e5..286c9388419a 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKey.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKey.java @@ -16,8 +16,6 @@ package com.hedera.node.app.service.mono.legacy.core.jproto; -import static java.util.Collections.emptyList; - import com.google.protobuf.ByteString; import com.hedera.hapi.node.base.ContractID; import com.hedera.node.app.spi.key.HederaKey; @@ -29,10 +27,11 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.security.InvalidKeyException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Objects; -import org.apache.commons.codec.DecoderException; /** Maps to proto Key. */ public abstract class JKey implements HederaKey { @@ -59,9 +58,9 @@ public static boolean denotesImmutableEntity(@NonNull final JKey key) { * * @param key the proto Key to be converted * @return the generated JKey instance - * @throws DecoderException on an inconvertible given key + * @throws InvalidKeyException If the key is not a valid key type */ - public static JKey mapKey(Key key) throws DecoderException { + public static JKey mapKey(Key key) throws InvalidKeyException { return convertKey(key, 1); } @@ -70,9 +69,8 @@ public static JKey mapKey(Key key) throws DecoderException { * * @param key the proto Key to be converted * @return the generated JKey instance - * @throws DecoderException on an inconvertible given key */ - public static JKey mapKey(@NonNull final com.hedera.hapi.node.base.Key key) throws DecoderException { + public static JKey mapKey(@NonNull final com.hedera.hapi.node.base.Key key) throws InvalidKeyException { return convertKey(key, 1); } @@ -83,11 +81,11 @@ public static JKey mapKey(@NonNull final com.hedera.hapi.node.base.Key key) thro * @param key the current proto Key to be converted * @param depth current level that is to be verified. The first level has a value of 1. * @return the converted JKey instance - * @throws DecoderException on an inconvertible given key + * @throws InvalidKeyException If the key is not a valid key type or exceeds the allowable depth of nesting. */ - public static JKey convertKey(Key key, int depth) throws DecoderException { + public static JKey convertKey(Key key, int depth) throws InvalidKeyException { if (depth > MAX_KEY_DEPTH) { - throw new DecoderException("Exceeding max expansion depth of " + MAX_KEY_DEPTH); + throw new InvalidKeyException("Exceeding max expansion depth of " + MAX_KEY_DEPTH); } if (!(key.hasThresholdKey() || key.hasKeyList())) { @@ -120,12 +118,12 @@ public static JKey convertKey(Key key, int depth) throws DecoderException { * @param key the current proto Key to be converted * @param depth current level that is to be verified. The first level has a value of 1. * @return the converted JKey instance - * @throws DecoderException on an inconvertible given key + * @throws InvalidKeyException If the key is not a valid key type or exceeds the allowable depth of nesting. */ public static JKey convertKey(@NonNull final com.hedera.hapi.node.base.Key key, final int depth) - throws DecoderException { + throws InvalidKeyException { if (depth > MAX_KEY_DEPTH) { - throw new DecoderException("Exceeding max expansion depth of " + MAX_KEY_DEPTH); + throw new InvalidKeyException("Exceeding max expansion depth of " + MAX_KEY_DEPTH); } if (!(key.hasThresholdKey() || key.hasKeyList())) { @@ -134,7 +132,7 @@ public static JKey convertKey(@NonNull final com.hedera.hapi.node.base.Key key, if (key.hasThresholdKey()) { final var thresholdKey = key.thresholdKeyOrThrow(); - List tKeys = thresholdKey.keys().keysOrElse(emptyList()); + List tKeys = thresholdKey.keys().keysOrElse(Collections.emptyList()); List jkeys = new ArrayList<>(); for (var aKey : tKeys) { JKey res = convertKey(aKey, depth + 1); @@ -160,9 +158,9 @@ public static JKey convertKey(@NonNull final com.hedera.hapi.node.base.Key key, * * @param key proto Key to be converted * @return the converted JKey instance - * @throws DecoderException on an inconvertible given key + * @throws InvalidKeyException If the key is not a valid key type */ - private static JKey convertBasic(Key key) throws DecoderException { + private static JKey convertBasic(Key key) throws InvalidKeyException { JKey rv; if (!key.getEd25519().isEmpty()) { byte[] pubKeyBytes = key.getEd25519().toByteArray(); @@ -185,7 +183,7 @@ private static JKey convertBasic(Key key) throws DecoderException { } else if (!key.getDelegatableContractId().getEvmAddress().isEmpty()) { rv = new JDelegatableContractAliasKey(key.getDelegatableContractId()); } else { - throw new DecoderException("Key type not implemented: key=" + key); + throw new InvalidKeyException("Key type not implemented: key=" + key); } return rv; @@ -196,9 +194,9 @@ private static JKey convertBasic(Key key) throws DecoderException { * * @param key proto Key to be converted * @return the converted JKey instance - * @throws DecoderException on an inconvertible given key + * @throws InvalidKeyException If the key is not a valid key type */ - private static JKey convertBasic(final com.hedera.hapi.node.base.Key key) throws DecoderException { + private static JKey convertBasic(final com.hedera.hapi.node.base.Key key) throws InvalidKeyException { final var oneOf = key.key(); return switch (oneOf.kind()) { case ED25519 -> { @@ -229,7 +227,7 @@ private static JKey convertBasic(final com.hedera.hapi.node.base.Key key) throws .build(); yield new JContractIDKey(proto); } else { - throw new DecoderException("Unable to decode contract key=" + key); + throw new InvalidKeyException("Unable to decode contract key=" + key); } } case DELEGATABLE_CONTRACT_ID -> { @@ -244,10 +242,10 @@ private static JKey convertBasic(final com.hedera.hapi.node.base.Key key) throws .build(); yield new JDelegatableContractIDKey(proto); } else { - throw new DecoderException("Unable to decode contract key=" + key); + throw new InvalidKeyException("Unable to decode contract key=" + key); } } - default -> throw new DecoderException("Key type not implemented: key=" + key); + default -> throw new InvalidKeyException("Key type not implemented: key=" + key); }; } @@ -256,9 +254,9 @@ private static JKey convertBasic(final com.hedera.hapi.node.base.Key key) throws * * @param jkey JKey object to be converted * @return the converted proto Key instance - * @throws DecoderException on an inconvertible given key + * @throws InvalidKeyException If the key is not a valid key type */ - static Key convertJKeyBasic(JKey jkey) throws DecoderException { + static Key convertJKeyBasic(JKey jkey) throws InvalidKeyException { Key rv; if (jkey.hasEd25519Key()) { rv = Key.newBuilder() @@ -294,9 +292,11 @@ static Key convertJKeyBasic(JKey jkey) throws DecoderException { jkey.getDelegatableContractAliasKey().getContractID()) .build(); } else { - throw new DecoderException("Key type not implemented: key=" + jkey); + // Warning: Do Not allow anything that calls toString, equals, or hashCode on JKey here. + // Object.toString calls hashCode, and equals and hashCode both call this method + // so you would create an infinite recursion. + throw new InvalidKeyException("Key type not implemented."); } - return rv; } @@ -306,14 +306,13 @@ static Key convertJKeyBasic(JKey jkey) throws DecoderException { * @param jkey the current JKey to be converted * @param depth current level that is to be verified. The first level has a value of 1. * @return the converted proto Key instance - * @throws DecoderException on an inconvertible given key + * @throws InvalidKeyException If the key is not a valid key type or exceeds the allowable depth of nesting. */ - public static Key convertJKey(JKey jkey, int depth) throws DecoderException { + public static Key convertJKey(JKey jkey, int depth) throws InvalidKeyException { if (depth > MAX_KEY_DEPTH) { - throw new DecoderException("Exceeding max expansion depth of " + MAX_KEY_DEPTH); + throw new InvalidKeyException("Exceeding max expansion depth of " + MAX_KEY_DEPTH); } - - if (!(jkey.hasThresholdKey() || jkey.hasKeyList())) { + if (!(jkey.hasThresholdKey() || jkey.hasKeyList() || jkey.isEmpty())) { return convertJKeyBasic(jkey); } else if (jkey.hasThresholdKey()) { List jKeys = jkey.getThresholdKey().getKeys().getKeysList(); @@ -327,8 +326,8 @@ public static Key convertJKey(JKey jkey, int depth) throws DecoderException { Key result = Key.newBuilder() .setThresholdKey(ThresholdKey.newBuilder().setKeys(keys).setThreshold(thd)) .build(); - return (result); - } else { + return result; + } else if (jkey.hasKeyList()) { List jKeys = jkey.getKeyList().getKeysList(); List tkeys = new ArrayList<>(); for (JKey aKey : jKeys) { @@ -336,8 +335,9 @@ public static Key convertJKey(JKey jkey, int depth) throws DecoderException { tkeys.add(res); } KeyList keys = KeyList.newBuilder().addAllKeys(tkeys).build(); - Key result = Key.newBuilder().setKeyList(keys).build(); - return (result); + return Key.newBuilder().setKeyList(keys).build(); + } else { + return Key.newBuilder().build(); } } @@ -355,14 +355,38 @@ public static boolean equalUpToDecodability(JKey a, JKey b) { return Objects.equals(aKey, bKey); } + @Override + public boolean equals(final Object other) { + if (this == other) { + return true; + } + if (other == null || getClass() != other.getClass()) { + return false; + } + try { + return Objects.equals(mapJKey(this), mapJKey((JKey) other)); + } catch (InvalidKeyException ignore) { + return false; + } + } + + @Override + public int hashCode() { + try { + return Objects.hashCode(mapJKey(this)); + } catch (InvalidKeyException ignore) { + return Integer.MIN_VALUE; + } + } + /** * Maps a JKey instance to a proto Key instance. * * @param jkey the JKey to be converted * @return the converted proto Key instance - * @throws DecoderException on an inconvertible given key + * @throws InvalidKeyException If the key is not a valid key type */ - public static Key mapJKey(JKey jkey) throws DecoderException { + public static Key mapJKey(JKey jkey) throws InvalidKeyException { return convertJKey(jkey, 1); } diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/pbj/PbjConverter.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/pbj/PbjConverter.java index 5ef5270e51a0..81804e12e010 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/pbj/PbjConverter.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/pbj/PbjConverter.java @@ -55,9 +55,9 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.lang.reflect.InvocationTargetException; +import java.security.InvalidKeyException; import java.util.Objects; import java.util.Optional; -import org.apache.commons.codec.DecoderException; public final class PbjConverter { public static @NonNull AccountID toPbj(@NonNull com.hederahashgraph.api.proto.java.AccountID accountID) { @@ -1284,7 +1284,7 @@ public static com.hedera.hapi.node.base.Key asPbjKey(@NonNull final JKey jKey) { requireNonNull(jKey); try { return toPbj(JKey.mapJKey(jKey)); - } catch (DecoderException e) { + } catch (InvalidKeyException e) { throw new RuntimeException(e); } } diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/contracts/precompile/codec/TokenCreateWrapper.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/contracts/precompile/codec/TokenCreateWrapper.java index 043d2d722982..052c86dc9668 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/contracts/precompile/codec/TokenCreateWrapper.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/contracts/precompile/codec/TokenCreateWrapper.java @@ -30,9 +30,9 @@ import com.hederahashgraph.api.proto.java.RoyaltyFee; import com.hederahashgraph.api.proto.java.TokenID; import java.math.BigInteger; +import java.security.InvalidKeyException; import java.util.List; import java.util.Optional; -import org.apache.commons.codec.DecoderException; public class TokenCreateWrapper { private final boolean isFungible; @@ -153,7 +153,7 @@ public void setRoyaltyFees(final List royaltyFees) { this.royaltyFees = royaltyFees; } - public void setAllInheritedKeysTo(final JKey senderKey) throws DecoderException { + public void setAllInheritedKeysTo(final JKey senderKey) throws InvalidKeyException { for (final var tokenKey : tokenKeys) { if (tokenKey.key().isShouldInheritAccountKeySet()) { tokenKey.key().setInheritedKey(JKey.mapJKey(senderKey)); diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/contracts/precompile/impl/TokenCreatePrecompile.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/contracts/precompile/impl/TokenCreatePrecompile.java index a78bd1d69854..86da41ceae9f 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/contracts/precompile/impl/TokenCreatePrecompile.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/contracts/precompile/impl/TokenCreatePrecompile.java @@ -90,6 +90,7 @@ import com.hederahashgraph.api.proto.java.TransactionID; import edu.umd.cs.findbugs.annotations.NonNull; import java.math.BigInteger; +import java.security.InvalidKeyException; import java.time.Instant; import java.util.ArrayList; import java.util.List; @@ -97,7 +98,6 @@ import java.util.function.Predicate; import java.util.function.UnaryOperator; import javax.inject.Provider; -import org.apache.commons.codec.DecoderException; import org.apache.tuweni.bytes.Bytes; import org.hyperledger.besu.datatypes.Address; import org.hyperledger.besu.datatypes.Wei; @@ -346,7 +346,7 @@ public TransactionBody.Builder body(final Bytes input, final UnaryOperator asUsableFcKey(final Key key) { return Optional.empty(); } return Optional.of(fcKey); - } catch (final DecoderException ignore) { + } catch (final InvalidKeyException ignore) { return Optional.empty(); } } @@ -600,7 +600,7 @@ public static String describe(final JKey k) { } try { return mapJKey(k).toString(); - } catch (final DecoderException ignore) { + } catch (final InvalidKeyException ignore) { return ""; } } diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/primitives/StateViewTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/primitives/StateViewTest.java index a12d9d50c17a..a3beee9d5ce8 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/primitives/StateViewTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/primitives/StateViewTest.java @@ -132,6 +132,7 @@ import com.swirlds.common.crypto.CryptographyHolder; import com.swirlds.common.utility.CommonUtils; import com.swirlds.merkle.map.MerkleMap; +import java.security.InvalidKeyException; import java.time.Instant; import java.util.Arrays; import java.util.Collections; @@ -139,7 +140,6 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import org.apache.commons.codec.DecoderException; import org.bouncycastle.util.encoders.Hex; import org.hyperledger.besu.datatypes.Address; import org.junit.jupiter.api.BeforeEach; @@ -364,7 +364,7 @@ public void setup() throws Throwable { subject.contractBytecode = bytecode; } - private void setUpToken(final MerkleToken token) throws DecoderException { + private void setUpToken(final MerkleToken token) throws InvalidKeyException { token.setMemo(tokenMemo); token.setAdminKey(TxnHandlingScenario.TOKEN_ADMIN_KT.asJKey()); token.setFreezeKey(TxnHandlingScenario.TOKEN_FREEZE_KT.asJKey()); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/fees/calculation/FeeCalcUtilsTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/fees/calculation/FeeCalcUtilsTest.java index 5d13b1c05e7a..33976beb23c7 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/fees/calculation/FeeCalcUtilsTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/fees/calculation/FeeCalcUtilsTest.java @@ -41,6 +41,7 @@ import com.hederahashgraph.api.proto.java.Key; import com.hederahashgraph.api.proto.java.Timestamp; import com.swirlds.merkle.map.MerkleMap; +import java.security.InvalidKeyException; import java.text.MessageFormat; import java.util.MissingResourceException; import java.util.Optional; @@ -102,7 +103,7 @@ void returnsZeroAccountExpiryIfUnavail() { } @Test - void returnsFileExpiryIfAvail() throws Exception { + void returnsFileExpiryIfAvail() throws InvalidKeyException { final var view = mock(StateView.class); final var fid = IdUtils.asFile("1.2.3"); final var wacl = JKey.mapKey(Key.newBuilder() diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/fees/calculation/consensus/txns/UpdateMerkleTopicResourceUsageTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/fees/calculation/consensus/txns/UpdateMerkleTopicResourceUsageTest.java index c3472f9889a7..0b23188477a3 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/fees/calculation/consensus/txns/UpdateMerkleTopicResourceUsageTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/fees/calculation/consensus/txns/UpdateMerkleTopicResourceUsageTest.java @@ -55,8 +55,8 @@ import com.hederahashgraph.api.proto.java.TransactionID; import com.swirlds.common.utility.CommonUtils; import edu.umd.cs.findbugs.annotations.Nullable; +import java.security.InvalidKeyException; import java.util.Optional; -import org.apache.commons.codec.DecoderException; import org.hamcrest.Matchers; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -115,7 +115,7 @@ void getFeeThrowsExceptionForBadTxBody() { } @Test - void getFeeThrowsExceptionForBadKeys() throws DecoderException, IllegalArgumentException { + void getFeeThrowsExceptionForBadKeys() throws InvalidKeyException, IllegalArgumentException { final var txnBody = makeTransactionBody( topicId, defaultMemo, @@ -128,7 +128,7 @@ void getFeeThrowsExceptionForBadKeys() throws DecoderException, IllegalArgumentE new MerkleTopic(defaultMemo, adminKey, submitKey, 0, new EntityId(0, 1, 2), new RichInstant(36_000, 0)); given(topics.get(EntityNum.fromTopicId(topicId))).willReturn(merkleTopic); final var mockedJkey = mockStatic(JKey.class); - mockedJkey.when(() -> JKey.mapJKey(any())).thenThrow(new DecoderException()); + mockedJkey.when(() -> JKey.mapJKey(any())).thenThrow(new InvalidKeyException()); assertThrows(InvalidTxBodyException.class, () -> subject.usageGiven(txnBody, sigValueObj, view)); assertThat( @@ -138,7 +138,7 @@ void getFeeThrowsExceptionForBadKeys() throws DecoderException, IllegalArgumentE } @Test - void updateToMissingTopic() throws DecoderException, InvalidTxBodyException { + void updateToMissingTopic() throws InvalidKeyException, InvalidTxBodyException { final var txBody = makeTransactionBody( topicId, defaultMemo, diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/ledger/accounts/AliasManagerTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/ledger/accounts/AliasManagerTest.java index cc4c853f5be6..8edcf5806696 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/ledger/accounts/AliasManagerTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/ledger/accounts/AliasManagerTest.java @@ -44,10 +44,10 @@ import com.swirlds.common.constructable.ConstructableRegistryException; import com.swirlds.fchashmap.FCHashMap; import com.swirlds.merkle.map.MerkleMap; +import java.security.InvalidKeyException; import java.util.Collections; import java.util.Map; import java.util.function.BiConsumer; -import org.apache.commons.codec.DecoderException; import org.apache.tuweni.bytes.Bytes; import org.bouncycastle.util.encoders.Hex; import org.hyperledger.besu.datatypes.Address; @@ -113,7 +113,7 @@ void canLinkAndUnlinkAddresses() { } @Test - void canLinkAndUnlinkEthereumAddresses() throws InvalidProtocolBufferException, DecoderException { + void canLinkAndUnlinkEthereumAddresses() throws InvalidProtocolBufferException, InvalidKeyException { final Key key = Key.parseFrom(ECDSA_PUBLIC_KEY); final JKey jKey = JKey.mapKey(key); final boolean added = subject.maybeLinkEvmAddress(jKey, num); @@ -125,7 +125,7 @@ void canLinkAndUnlinkEthereumAddresses() throws InvalidProtocolBufferException, } @Test - void publicKeyCouldNotBeParsed() throws InvalidProtocolBufferException, DecoderException { + void publicKeyCouldNotBeParsed() throws InvalidProtocolBufferException, InvalidKeyException { Key key = Key.parseFrom(ECDSA_PUBLIC_KEY); JKey jKey = JKey.mapKey(key); subject.maybeLinkEvmAddress(jKey, num); @@ -145,7 +145,7 @@ void noopOnTryingToForgetMalformattedSecp256k1Key() { } @Test - void skipsUnrecoverableEthereumAddresses() throws InvalidProtocolBufferException, DecoderException { + void skipsUnrecoverableEthereumAddresses() throws InvalidProtocolBufferException, InvalidKeyException { final Key key = Key.parseFrom(ECDSA_PUBLIC_KEY); final JKey jKey = JKey.mapKey(key); final boolean added = subject.maybeLinkEvmAddress(jKey, num, any -> null); @@ -158,7 +158,7 @@ void ignoresNullKeys() { } @Test - void wontLinkOrUnlinked25519Key() throws DecoderException { + void wontLinkOrUnlinked25519Key() throws InvalidKeyException { final var keyData = ByteString.copyFrom("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".getBytes()); final Key key = Key.newBuilder().setEd25519(keyData).build(); final JKey jKey = JKey.mapKey(key); @@ -205,7 +205,7 @@ void isAliasChecksForMapMembershipOnly() { @Test void lookupIdByECDSAKeyAliasShouldReturnNumFromEVMAddressAliasMap() - throws InvalidProtocolBufferException, DecoderException { + throws InvalidProtocolBufferException, InvalidKeyException { subject.link(ByteString.copyFrom(ECDSA_PUBLIC_KEY_ADDRESS), num); assertEquals(num, subject.lookupIdBy(ByteString.copyFrom(ECDSA_PUBLIC_KEY))); } diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/ledger/properties/AccountPropertyTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/ledger/properties/AccountPropertyTest.java index c2661f03b166..8dd700fd1101 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/ledger/properties/AccountPropertyTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/ledger/properties/AccountPropertyTest.java @@ -69,6 +69,7 @@ import com.hederahashgraph.api.proto.java.TokenID; import com.hederahashgraph.api.proto.java.TransactionReceipt; import com.hederahashgraph.api.proto.java.TransactionRecord; +import java.security.InvalidKeyException; import java.util.ArrayList; import java.util.List; import java.util.TreeMap; @@ -108,7 +109,7 @@ void canGetAndSetNullAutoRenewAccountId() { @Test @SuppressWarnings("java:S5961") - void gettersAndSettersWork() throws Exception { + void gettersAndSettersWork() throws NegativeAccountBalanceException, InvalidKeyException { final boolean origIsDeleted = false; final boolean origIsReceiverSigReq = false; final boolean origIsContract = false; diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeyListTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeyListTest.java index 79f6e03066ad..8c011bc420ec 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeyListTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeyListTest.java @@ -28,6 +28,7 @@ import com.swirlds.common.io.streams.SerializableDataInputStream; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.security.InvalidKeyException; import java.util.List; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -73,7 +74,7 @@ void isNotEmpty() { } @Test - void invalidJKeyListTest() throws Exception { + void invalidJKeyListTest() throws InvalidKeyException { Key validED25519Key = Key.newBuilder() .setEd25519(TxnUtils.randomUtf8ByteString(JEd25519Key.ED25519_BYTE_LENGTH)) .build(); @@ -111,7 +112,7 @@ void invalidJKeyListTest() throws Exception { } @Test - void validJKeyListTest() throws Exception { + void validJKeyListTest() throws InvalidKeyException { Key validED25519Key = Key.newBuilder() .setEd25519(TxnUtils.randomUtf8ByteString(JEd25519Key.ED25519_BYTE_LENGTH)) .build(); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeySerializerTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeySerializerTest.java index e95cce494b21..95ba03b4f2bb 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeySerializerTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeySerializerTest.java @@ -39,6 +39,7 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.security.InvalidKeyException; import java.security.PrivateKey; import java.util.ArrayList; import java.util.HashMap; @@ -47,7 +48,6 @@ import java.util.concurrent.atomic.AtomicInteger; import net.i2p.crypto.eddsa.EdDSAPublicKey; import net.i2p.crypto.eddsa.KeyPairGenerator; -import org.apache.commons.codec.DecoderException; import org.apache.commons.lang3.NotImplementedException; import org.junit.jupiter.api.Test; @@ -438,7 +438,7 @@ void jKeyListSerDes() throws IOException { } @Test - void jKeyProtoSerDes() throws IOException, DecoderException { + void jKeyProtoSerDes() throws IOException, InvalidKeyException { final Map pubKey2privKeyMap = new HashMap<>(); Key protoKey; JKey jkey = null; @@ -470,7 +470,7 @@ void jKeyProtoSerDes() throws IOException, DecoderException { } @Test - void jKeyECDSASecp256k1KeySerDes() throws Exception { + void jKeyECDSASecp256k1KeySerDes() throws InvalidKeyException { final Map pubKey2privKeyMap = new HashMap<>(); Key protoKey; protoKey = genSingleECDSASecp256k1Key(pubKey2privKeyMap); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeyTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeyTest.java index f000d9918f3a..729b49c85ac8 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeyTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeyTest.java @@ -34,9 +34,9 @@ import com.hedera.test.utils.IdUtils; import com.hedera.test.utils.TxnUtils; import com.hederahashgraph.api.proto.java.Key; +import java.security.InvalidKeyException; import java.util.Arrays; import java.util.List; -import org.apache.commons.codec.DecoderException; import org.junit.jupiter.api.Test; class JKeyTest { @@ -64,7 +64,7 @@ void negativeConvertKeyTest() { // expect: assertThrows( - DecoderException.class, + InvalidKeyException.class, () -> JKey.convertKey(keyTooDeep, 1), "Exceeding max expansion depth of " + JKey.MAX_KEY_DEPTH); } @@ -76,7 +76,7 @@ void negativeConvertJKeyTest() { // expect: assertThrows( - DecoderException.class, + InvalidKeyException.class, () -> JKey.convertJKey(jKeyTooDeep, 1), "Exceeding max expansion depth of " + JKey.MAX_KEY_DEPTH); } @@ -108,7 +108,7 @@ void canGetPrimitiveKeyForEd25519OrSecp256k1() { } @Test - void canMapDelegateToGrpc() throws DecoderException { + void canMapDelegateToGrpc() throws InvalidKeyException { final var id = IdUtils.asContract("1.2.3"); final var expected = Key.newBuilder().setDelegatableContractId(id).build(); @@ -119,7 +119,7 @@ void canMapDelegateToGrpc() throws DecoderException { } @Test - void canMapDelegateFromGrpc() throws DecoderException { + void canMapDelegateFromGrpc() throws InvalidKeyException { final var id = IdUtils.asContract("1.2.3"); final var input = Key.newBuilder().setDelegatableContractId(id).build(); @@ -133,7 +133,7 @@ void canMapDelegateFromGrpc() throws DecoderException { void rejectsEmptyKey() { // expect: assertThrows( - DecoderException.class, + InvalidKeyException.class, () -> JKey.convertJKeyBasic(new JKey() { @Override public boolean isEmpty() { diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JThresholdKeyTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JThresholdKeyTest.java index 7bd4e097388e..4bd88a66476f 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JThresholdKeyTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JThresholdKeyTest.java @@ -25,6 +25,7 @@ import com.hederahashgraph.api.proto.java.Key; import com.hederahashgraph.api.proto.java.KeyList; import com.hederahashgraph.api.proto.java.ThresholdKey; +import java.security.InvalidKeyException; import java.util.List; import org.junit.jupiter.api.Test; @@ -60,12 +61,12 @@ private Key thresholdKey(final KeyList keyList, final int threshold) { .build(); } - private JKey jThresholdKey(final KeyList keyList, final int threshold) throws Exception { + private JKey jThresholdKey(final KeyList keyList, final int threshold) throws InvalidKeyException { return JKey.convertKey(thresholdKey(keyList, threshold), 1); } @Test - void JThresholdKeyWithVariousThresholdTest() throws Exception { + void JThresholdKeyWithVariousThresholdTest() throws InvalidKeyException { final Key validContractIDKey = Key.newBuilder() .setContractID(ContractID.newBuilder().setContractNum(1L).build()) .build(); @@ -83,7 +84,7 @@ void JThresholdKeyWithVariousThresholdTest() throws Exception { } @Test - void invalidJThresholdKeyTest() throws Exception { + void invalidJThresholdKeyTest() throws InvalidKeyException { final Key validED25519Key = Key.newBuilder() .setEd25519(TxnUtils.randomUtf8ByteString(JEd25519Key.ED25519_BYTE_LENGTH)) .build(); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/unit/serialization/JKeyAdditionalTypeSupportTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/unit/serialization/JKeyAdditionalTypeSupportTest.java index 201b12424272..2e7f9a0cedf0 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/unit/serialization/JKeyAdditionalTypeSupportTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/unit/serialization/JKeyAdditionalTypeSupportTest.java @@ -31,26 +31,27 @@ import com.hederahashgraph.api.proto.java.Key; import com.swirlds.common.io.streams.SerializableDataInputStream; import java.io.ByteArrayInputStream; -import org.apache.commons.codec.DecoderException; +import java.io.IOException; +import java.security.InvalidKeyException; import org.junit.jupiter.api.Test; class JKeyAdditionalTypeSupportTest { @Test - void serializingJContractIDKeyTest() throws Exception { + void serializingJContractIDKeyTest() throws IOException, InvalidKeyException { final var cid = ContractID.newBuilder().setShardNum(0).setRealmNum(0).setContractNum(1001); final var key = Key.newBuilder().setContractID(cid).build(); commonAssertions(key); } @Test - void serializingJRSA_3072KeyTest() throws Exception { + void serializingJRSA_3072KeyTest() throws IOException, InvalidKeyException { final var keyBytes = TxnUtils.randomUtf8ByteString(3072 / 8); final var key = Key.newBuilder().setRSA3072(keyBytes).build(); commonAssertions(key); } @Test - void canMapAndUnmapContractAliasKeys() throws DecoderException { + void canMapAndUnmapContractAliasKeys() throws InvalidKeyException { final byte[] mockAddr = unhex("aaaaaaaaaaaaaaaaaaaaaaaa9abcdefabcdefbbb"); final var input = ContractID.newBuilder() .setEvmAddress(ByteString.copyFrom(mockAddr)) @@ -65,7 +66,7 @@ void canMapAndUnmapContractAliasKeys() throws DecoderException { } @Test - void canMapAndUnmapContractDelegateAliasKeys() throws DecoderException { + void canMapAndUnmapContractDelegateAliasKeys() throws InvalidKeyException { final byte[] mockAddr = unhex("aaaaaaaaaaaaaaaaaaaaaaaa9abcdefabcdefbbb"); final var input = ContractID.newBuilder() .setEvmAddress(ByteString.copyFrom(mockAddr)) @@ -79,7 +80,7 @@ void canMapAndUnmapContractDelegateAliasKeys() throws DecoderException { assertTrue(JKey.equalUpToDecodability(subject, reconstructed)); } - private void commonAssertions(final Key key) throws Exception { + private void commonAssertions(final Key key) throws InvalidKeyException, IOException { final var jKey = JKey.mapKey(key); final var ser = JKeySerializer.serialize(jKey); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/initialization/BackedSystemAccountsCreatorTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/initialization/BackedSystemAccountsCreatorTest.java index 4e58eeb323fa..8a2be1304075 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/initialization/BackedSystemAccountsCreatorTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/initialization/BackedSystemAccountsCreatorTest.java @@ -53,10 +53,10 @@ import com.hederahashgraph.api.proto.java.KeyList; import com.swirlds.common.system.address.Address; import com.swirlds.common.system.address.AddressBook; +import java.security.InvalidKeyException; import java.time.Instant; import java.util.List; import java.util.Set; -import org.apache.commons.codec.DecoderException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -87,7 +87,7 @@ class BackedSystemAccountsCreatorTest { @BeforeEach @SuppressWarnings("unchecked") - void setup() throws DecoderException, NegativeAccountBalanceException, IllegalArgumentException { + void setup() throws InvalidKeyException, NegativeAccountBalanceException, IllegalArgumentException { genesisKey = JKey.mapKey(Key.newBuilder() .setKeyList(KeyList.newBuilder().addKeys(MiscUtils.asKeyUnchecked(pretendKey))) .build()); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/initialization/BlocklistAccountCreatorTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/initialization/BlocklistAccountCreatorTest.java index 1c9231bf9da9..c9b80773e86e 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/initialization/BlocklistAccountCreatorTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/initialization/BlocklistAccountCreatorTest.java @@ -49,8 +49,8 @@ import com.hederahashgraph.api.proto.java.ScheduleID; import com.hederahashgraph.api.proto.java.TokenID; import com.hederahashgraph.api.proto.java.TopicID; +import java.security.InvalidKeyException; import java.util.function.Supplier; -import org.apache.commons.codec.DecoderException; import org.hamcrest.Matchers; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -91,7 +91,7 @@ class BlocklistAccountCreatorTest { private BlocklistAccountCreator subject; @BeforeEach - void setUp() throws DecoderException { + void setUp() throws InvalidKeyException { ids = new EntityIdSource() { long nextId = FIRST_UNUSED_ID; diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/initialization/HfsSystemFilesManagerTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/initialization/HfsSystemFilesManagerTest.java index 7a19909336cf..6d05989bce57 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/initialization/HfsSystemFilesManagerTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/initialization/HfsSystemFilesManagerTest.java @@ -83,13 +83,13 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; +import java.security.InvalidKeyException; import java.security.PublicKey; import java.util.Arrays; import java.util.Map; import java.util.Properties; import java.util.function.Consumer; import java.util.function.Supplier; -import org.apache.commons.codec.DecoderException; import org.hamcrest.Matchers; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -160,7 +160,7 @@ class HfsSystemFilesManagerTest { @BeforeEach @SuppressWarnings("unchecked") - void setup() throws DecoderException { + void setup() throws InvalidKeyException { final var keyBytes = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".getBytes(); masterKey = new JEd25519Key(keyBytes); expectedInfo = new HFileMeta( diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/migration/MigrationRecordsManagerTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/migration/MigrationRecordsManagerTest.java index aad9b7a63757..bfc1671a2418 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/migration/MigrationRecordsManagerTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/migration/MigrationRecordsManagerTest.java @@ -77,11 +77,11 @@ import com.swirlds.merkle.map.MerkleMap; import com.swirlds.merkle.tree.MerkleBinaryTree; import com.swirlds.merkle.tree.MerkleTreeInternalNode; +import java.security.InvalidKeyException; import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.codec.DecoderException; import org.hamcrest.Matchers; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -130,7 +130,7 @@ class MigrationRecordsManagerTest { genesisKey = JKey.mapKey(Key.newBuilder() .setKeyList(KeyList.newBuilder().addKeys(MiscUtils.asKeyUnchecked(systemAccountKey))) .build()); - } catch (DecoderException e) { + } catch (InvalidKeyException e) { throw new RuntimeException(e); } } @@ -323,7 +323,7 @@ void streamsSystemAccountCreationRecords() { } @Test - void streamsBlocklistedAccountWhenFeatureFlagIsEnabled() throws DecoderException { + void streamsBlocklistedAccountWhenFeatureFlagIsEnabled() throws InvalidKeyException { final var bodyCaptor = forClass(TransactionBody.Builder.class); given(consensusTimeTracker.unlimitedPreceding()).willReturn(true); given(bootstrapProperties.getBooleanProperty(PropertyNames.ACCOUNTS_BLOCKLIST_ENABLED)) @@ -592,7 +592,7 @@ private void givenSystemAccountsCreated() { given(systemAccountsCreator.getSystemAccountsCreated()).willReturn(systemAccounts); } - private void givenBlocklistedAccountsCreated() throws DecoderException { + private void givenBlocklistedAccountsCreated() throws InvalidKeyException { blocklistedAccounts.addAll(List.of( blockedAccount(ByteString.copyFromUtf8(EVM_ADDRESS_1), MEMO_1), blockedAccount(ByteString.copyFromUtf8(EVM_ADDRESS_2), MEMO_2))); @@ -601,7 +601,7 @@ private void givenBlocklistedAccountsCreated() throws DecoderException { given(blocklistAccountCreator.getBlockedAccountsCreated()).willReturn(blocklistedAccounts); } - private HederaAccount blockedAccount(ByteString evmAddress, String memo) throws DecoderException { + private HederaAccount blockedAccount(ByteString evmAddress, String memo) throws InvalidKeyException { return new HederaAccountCustomizer() .isReceiverSigRequired(true) .isDeclinedReward(true) diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/StaticEntityAccessTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/StaticEntityAccessTest.java index 0f4c654509ad..53b2ad9cb902 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/StaticEntityAccessTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/StaticEntityAccessTest.java @@ -79,12 +79,12 @@ import com.swirlds.virtualmap.VirtualMap; import java.math.BigInteger; import java.nio.charset.StandardCharsets; +import java.security.InvalidKeyException; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; -import org.apache.commons.codec.DecoderException; import org.apache.tuweni.bytes.Bytes; import org.apache.tuweni.units.bigints.UInt256; import org.hyperledger.besu.datatypes.Address; @@ -360,7 +360,7 @@ void getsZeroBalanceIfNoAssociation() { } @Test - void getKeys() throws DecoderException { + void getKeys() throws InvalidKeyException { token.setAdminKey(TxnHandlingScenario.TOKEN_ADMIN_KT.asJKey()); token.setKycKey(TxnHandlingScenario.TOKEN_KYC_KT.asJKey()); token.setFreezeKey(TxnHandlingScenario.TOKEN_FREEZE_KT.asJKey()); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/WorldLedgersTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/WorldLedgersTest.java index 0d0bb1d23042..9032e025e1f5 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/WorldLedgersTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/WorldLedgersTest.java @@ -104,12 +104,12 @@ import com.hederahashgraph.api.proto.java.TokenInfo; import com.hederahashgraph.api.proto.java.TokenNftInfo; import com.swirlds.fchashmap.FCHashMap; +import java.security.InvalidKeyException; import java.time.Instant; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; -import org.apache.commons.codec.DecoderException; import org.apache.commons.lang3.tuple.Pair; import org.apache.tuweni.bytes.Bytes; import org.hyperledger.besu.datatypes.Address; @@ -656,7 +656,7 @@ void staticKeyInfoWorks() { } @Test - void nonStaticKeyInfoWorks() throws DecoderException { + void nonStaticKeyInfoWorks() throws InvalidKeyException { given(tokensLedger.get(fungibleToken, TokenProperty.ADMIN_KEY)) .willReturn(TxnHandlingScenario.TOKEN_ADMIN_KT.asJKey()); given(tokensLedger.get(fungibleToken, TokenProperty.FREEZE_KEY)) @@ -1017,7 +1017,7 @@ void getsFungibleTokenMetaAvailableFromLedgers() { assertEquals(FUNGIBLE_COMMON, tokenAccessor.typeOf(fungibleTokenAddress)); } - private void setUpToken(final MerkleToken token) throws DecoderException { + private void setUpToken(final MerkleToken token) throws InvalidKeyException { token.setMemo(tokenMemo); token.setPauseKey(TxnHandlingScenario.TOKEN_PAUSE_KT.asJKey()); token.setDeleted(true); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/SyntheticTxnFactoryTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/SyntheticTxnFactoryTest.java index b9b5b90932ed..3da8398f8098 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/SyntheticTxnFactoryTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/SyntheticTxnFactoryTest.java @@ -105,10 +105,10 @@ import com.hederahashgraph.api.proto.java.TokenType; import com.hederahashgraph.api.proto.java.TransferList; import java.math.BigInteger; +import java.security.InvalidKeyException; import java.time.Instant; import java.util.Collections; import java.util.List; -import org.apache.commons.codec.DecoderException; import org.apache.tuweni.bytes.Bytes; import org.bouncycastle.util.encoders.Hex; import org.hyperledger.besu.datatypes.Address; @@ -462,7 +462,7 @@ void createsExpectedCryptoCreateWithEDKeyAlias() { } @Test - void createsExpectedCryptoCreateWithECKeyAlias() throws DecoderException { + void createsExpectedCryptoCreateWithECKeyAlias() throws InvalidKeyException { final var balance = 10L; final var key = KeyFactory.getDefaultInstance().newEcdsaSecp256k1(); final var alias = key.toByteString(); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/codec/TokenCreateWrapperTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/codec/TokenCreateWrapperTest.java index 05f0a6380c25..fbcff88964e0 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/codec/TokenCreateWrapperTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/store/contracts/precompile/codec/TokenCreateWrapperTest.java @@ -36,9 +36,9 @@ import com.hedera.node.app.service.mono.utils.EntityIdUtils; import com.hederahashgraph.api.proto.java.ContractID; import com.hederahashgraph.api.proto.java.Key; +import java.security.InvalidKeyException; import java.util.Collections; import java.util.List; -import org.apache.commons.codec.DecoderException; import org.hyperledger.besu.datatypes.Address; import org.junit.jupiter.api.Test; @@ -50,7 +50,7 @@ class TokenCreateWrapperTest { private final ContractID contractID = EntityIdUtils.contractIdFromEvmAddress(contractAddress); @Test - void setInheritedKeysToSpecificKeyWorksAsExpected() throws DecoderException { + void setInheritedKeysToSpecificKeyWorksAsExpected() throws InvalidKeyException { // given final var key = new JContractIDKey(contractID); final var wrapper = createTokenCreateWrapperWithKeys(List.of( diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/consensus/MerkleTopicUpdateTransitionLogicTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/consensus/MerkleTopicUpdateTransitionLogicTest.java index 0bcd9f805fcb..a77a7431561a 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/consensus/MerkleTopicUpdateTransitionLogicTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/consensus/MerkleTopicUpdateTransitionLogicTest.java @@ -72,6 +72,8 @@ import com.hederahashgraph.api.proto.java.TransactionBody; import com.hederahashgraph.api.proto.java.TransactionID; import com.swirlds.merkle.map.MerkleMap; +import java.io.IOException; +import java.security.InvalidKeyException; import java.time.Instant; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -167,7 +169,7 @@ void syntaxCheckWithInvalidAdminKey() { } @Test - void followsHappyPath() throws Throwable { + void followsHappyPath() throws InvalidKeyException, IOException { // given: givenExistingTopicWithAdminKey(); givenValidTransactionWithAllOptions(); @@ -193,7 +195,7 @@ void followsHappyPath() throws Throwable { } @Test - void clearsKeysIfRequested() throws Throwable { + void clearsKeysIfRequested() { // given: givenExistingTopicWithBothKeys(); givenTransactionClearingKeys(); @@ -214,7 +216,7 @@ void clearsKeysIfRequested() throws Throwable { } @Test - void failsOnInvalidMemo() throws Throwable { + void failsOnInvalidMemo() { // given: givenExistingTopicWithAdminKey(); givenTransactionWithInvalidMemo(); @@ -231,7 +233,7 @@ void failsOnInvalidMemo() throws Throwable { } @Test - void failsOnInvalidAdminKey() throws Throwable { + void failsOnInvalidAdminKey() { // given: givenExistingTopicWithAdminKey(); givenTransactionWithInvalidAdminKey(); @@ -248,7 +250,7 @@ void failsOnInvalidAdminKey() throws Throwable { } @Test - void failsOnInvalidSubmitKey() throws Throwable { + void failsOnInvalidSubmitKey() { // given: givenExistingTopicWithAdminKey(); givenTransactionWithInvalidSubmitKey(); @@ -265,7 +267,7 @@ void failsOnInvalidSubmitKey() throws Throwable { } @Test - void failsOnInvalidAutoRenewPeriod() throws Throwable { + void failsOnInvalidAutoRenewPeriod() { // given: givenExistingTopicWithAdminKey(); givenTransactionWithInvalidAutoRenewPeriod(); @@ -282,7 +284,7 @@ void failsOnInvalidAutoRenewPeriod() throws Throwable { } @Test - void failsOnInvalidExpirationTime() throws Throwable { + void failsOnInvalidExpirationTime() { // given: givenExistingTopicWithAdminKey(); givenTransactionWithInvalidExpirationTime(); @@ -299,7 +301,7 @@ void failsOnInvalidExpirationTime() throws Throwable { } @Test - void failsOnExpirationTimeReduction() throws Throwable { + void failsOnExpirationTimeReduction() { // given: givenExistingTopicWithAdminKey(); givenTransactionWithReducedExpirationTime(); @@ -316,7 +318,7 @@ void failsOnExpirationTimeReduction() throws Throwable { } @Test - void failsUnauthorizedOnMemoChange() throws Throwable { + void failsUnauthorizedOnMemoChange() { // given: givenExistingTopicWithoutAdminKey(); givenTransactionWithMemo(); @@ -345,7 +347,7 @@ void failsOnInvalidTopic() { } @Test - void failsOnInvalidAutoRenewAccount() throws Throwable { + void failsOnInvalidAutoRenewAccount() { // given: givenExistingTopicWithAdminKey(); givenTransactionWithInvalidAutoRenewAccount(); @@ -358,7 +360,7 @@ void failsOnInvalidAutoRenewAccount() throws Throwable { } @Test - void failsOnDetachedExistingAutoRenewAccount() throws Throwable { + void failsOnDetachedExistingAutoRenewAccount() { // given: givenExistingTopicWithAutoRenewAccount(); givenValidTransactionWithAllOptions(); @@ -372,7 +374,7 @@ void failsOnDetachedExistingAutoRenewAccount() throws Throwable { } @Test - void failsOnDetachedNewAutoRenewAccount() throws Throwable { + void failsOnDetachedNewAutoRenewAccount() { // given: givenExistingTopicWithAdminKey(); givenTransactionWithAutoRenewAccountNotClearingAdminKey(); @@ -386,7 +388,7 @@ void failsOnDetachedNewAutoRenewAccount() throws Throwable { } @Test - void failsOnAutoRenewAccountNotAllowed() throws Throwable { + void failsOnAutoRenewAccountNotAllowed() { // given: givenExistingTopicWithAdminKey(); givenTransactionWithAutoRenewAccountClearingAdminKey(); @@ -399,7 +401,7 @@ void failsOnAutoRenewAccountNotAllowed() throws Throwable { } @Test - void clearsAutoRenewAccountIfCorrectSentinelUsed() throws Throwable { + void clearsAutoRenewAccountIfCorrectSentinelUsed() { // given: givenExistingTopicWithAutoRenewAccount(); givenTransactionClearingAutoRenewAccount(); @@ -414,7 +416,7 @@ void clearsAutoRenewAccountIfCorrectSentinelUsed() throws Throwable { } @Test - void doesntClearAutoRenewAccountIfSentinelWithAliasUsed() throws Throwable { + void doesntClearAutoRenewAccountIfSentinelWithAliasUsed() { // given: givenExistingTopicWithAutoRenewAccount(); givenTransactionChangingAutoRenewAccountWithAliasId(); @@ -434,47 +436,61 @@ private void assertTopicNotUpdated(MerkleTopic originalMerkleTopic, MerkleTopic assertEquals(originalMerkleTopicClone, updatedTopic); // No change in values } - private void givenExistingTopicWithAdminKey() throws Throwable { - var existingTopic = new MerkleTopic( - EXISTING_MEMO, - JKey.mapKey(existingKey), - null, - EXISTING_AUTORENEW_PERIOD_SECONDS, - null, - EXISTING_EXPIRATION_TIME); - topics.put(EntityNum.fromTopicId(TOPIC_ID), existingTopic); - given(validator.queryableTopicStatus(TOPIC_ID, topics)).willReturn(OK); - } - - private void givenExistingTopicWithBothKeys() throws Throwable { - var existingTopic = new MerkleTopic( - EXISTING_MEMO, - JKey.mapKey(existingKey), - JKey.mapKey(existingKey), - EXISTING_AUTORENEW_PERIOD_SECONDS, - null, - EXISTING_EXPIRATION_TIME); - topics.put(EntityNum.fromTopicId(TOPIC_ID), existingTopic); - given(validator.queryableTopicStatus(TOPIC_ID, topics)).willReturn(OK); - } - - private void givenExistingTopicWithoutAdminKey() throws Throwable { + private void givenExistingTopicWithAdminKey() { + MerkleTopic existingTopic = null; + try { + existingTopic = new MerkleTopic( + EXISTING_MEMO, + JKey.mapKey(existingKey), + null, + EXISTING_AUTORENEW_PERIOD_SECONDS, + null, + EXISTING_EXPIRATION_TIME); + topics.put(EntityNum.fromTopicId(TOPIC_ID), existingTopic); + given(validator.queryableTopicStatus(TOPIC_ID, topics)).willReturn(OK); + } catch (InvalidKeyException e) { + throw new IllegalArgumentException("Invalid key in test scenario", e); + } + } + + private void givenExistingTopicWithBothKeys() { + MerkleTopic existingTopic = null; + try { + existingTopic = new MerkleTopic( + EXISTING_MEMO, + JKey.mapKey(existingKey), + JKey.mapKey(existingKey), + EXISTING_AUTORENEW_PERIOD_SECONDS, + null, + EXISTING_EXPIRATION_TIME); + topics.put(EntityNum.fromTopicId(TOPIC_ID), existingTopic); + given(validator.queryableTopicStatus(TOPIC_ID, topics)).willReturn(OK); + } catch (InvalidKeyException e) { + throw new IllegalArgumentException("Invalid key in test scenario", e); + } + } + + private void givenExistingTopicWithoutAdminKey() { var existingTopic = new MerkleTopic( EXISTING_MEMO, null, null, EXISTING_AUTORENEW_PERIOD_SECONDS, null, EXISTING_EXPIRATION_TIME); topics.put(EntityNum.fromTopicId(TOPIC_ID), existingTopic); given(validator.queryableTopicStatus(TOPIC_ID, topics)).willReturn(OK); } - private void givenExistingTopicWithAutoRenewAccount() throws Throwable { - var existingTopic = new MerkleTopic( - EXISTING_MEMO, - JKey.mapKey(existingKey), - null, - EXISTING_AUTORENEW_PERIOD_SECONDS, - EntityId.fromGrpcAccountId(MISC_ACCOUNT), - EXISTING_EXPIRATION_TIME); - topics.put(EntityNum.fromTopicId(TOPIC_ID), existingTopic); - given(validator.queryableTopicStatus(TOPIC_ID, topics)).willReturn(OK); + private void givenExistingTopicWithAutoRenewAccount() { + try { + var existingTopic = new MerkleTopic( + EXISTING_MEMO, + JKey.mapKey(existingKey), + null, + EXISTING_AUTORENEW_PERIOD_SECONDS, + EntityId.fromGrpcAccountId(MISC_ACCOUNT), + EXISTING_EXPIRATION_TIME); + topics.put(EntityNum.fromTopicId(TOPIC_ID), existingTopic); + given(validator.queryableTopicStatus(TOPIC_ID, topics)).willReturn(OK); + } catch (InvalidKeyException e) { + throw new IllegalArgumentException("Invalid key in test scenario", e); + } } private void givenTransaction(ConsensusUpdateTopicTransactionBody.Builder body) { diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/crypto/AutoCreationLogicTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/crypto/AutoCreationLogicTest.java index 6c5825b6c7da..b1567c192dec 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/crypto/AutoCreationLogicTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/crypto/AutoCreationLogicTest.java @@ -73,11 +73,11 @@ import com.hederahashgraph.api.proto.java.NftTransfer; import com.hederahashgraph.api.proto.java.TokenID; import com.hederahashgraph.api.proto.java.TransactionBody; +import java.security.InvalidKeyException; import java.time.Instant; import java.util.Collections; import java.util.HashMap; import java.util.List; -import org.apache.commons.codec.DecoderException; import org.apache.commons.lang3.tuple.Pair; import org.bouncycastle.util.encoders.Hex; import org.junit.jupiter.api.BeforeEach; @@ -216,7 +216,7 @@ final var record = pendingCreations.recordBuilder().build(); } @Test - void happyPathECKeyAliasWithHbarChangeWorks() throws InvalidProtocolBufferException, DecoderException { + void happyPathECKeyAliasWithHbarChangeWorks() throws InvalidProtocolBufferException, InvalidKeyException { givenCollaborators(mockBuilder, AUTO_MEMO); final var key = Key.parseFrom(ecdsaKeyBytes); final var pretendAddress = keyAliasToEVMAddress(ecKeyAlias); @@ -253,7 +253,7 @@ final var record = pendingCreations.recordBuilder().build(); } @Test - void hollowAccountWithHbarChangeWorks() throws InvalidProtocolBufferException, DecoderException { + void hollowAccountWithHbarChangeWorks() throws InvalidProtocolBufferException, InvalidKeyException { final var jKey = JKey.mapKey(Key.parseFrom(ecdsaKeyBytes)); final var evmAddressAlias = ByteString.copyFrom(EthSigsUtils.recoverAddressFromPubKey(jKey.getECDSASecp256k1Key())); @@ -291,7 +291,7 @@ void hollowAccountWithHbarChangeWorks() throws InvalidProtocolBufferException, D } @Test - void hollowAccountWithFtChangeWorks() throws InvalidProtocolBufferException, DecoderException { + void hollowAccountWithFtChangeWorks() throws InvalidProtocolBufferException, InvalidKeyException { final var jKey = JKey.mapKey(Key.parseFrom(ecdsaKeyBytes)); final var evmAddressAlias = ByteString.copyFrom(EthSigsUtils.recoverAddressFromPubKey(jKey.getECDSASecp256k1Key())); @@ -327,7 +327,7 @@ void hollowAccountWithFtChangeWorks() throws InvalidProtocolBufferException, Dec } @Test - void hollowAccountWithNFTChangeWorks() throws InvalidProtocolBufferException, DecoderException { + void hollowAccountWithNFTChangeWorks() throws InvalidProtocolBufferException, InvalidKeyException { final var jKey = JKey.mapKey(Key.parseFrom(ecdsaKeyBytes)); final var evmAddressAlias = ByteString.copyFrom(EthSigsUtils.recoverAddressFromPubKey(jKey.getECDSASecp256k1Key())); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/crypto/CryptoCreateTransitionLogicTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/crypto/CryptoCreateTransitionLogicTest.java index f2a5dd1d4e17..8e6da96abd8c 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/crypto/CryptoCreateTransitionLogicTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/crypto/CryptoCreateTransitionLogicTest.java @@ -95,8 +95,8 @@ import com.hederahashgraph.api.proto.java.TransactionID; import com.swirlds.common.utility.CommonUtils; import com.swirlds.merkle.map.MerkleMap; +import java.security.InvalidKeyException; import java.time.Instant; -import org.apache.commons.codec.DecoderException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; @@ -565,7 +565,7 @@ void rejectsECKeyAsAliasWhenCreateWithAliasIsEnabled() { } @Test - void acceptsECKeyWhenECKeyAndExtractedEVMAddressAreNotUnique() throws DecoderException { + void acceptsECKeyWhenECKeyAndExtractedEVMAddressAreNotUnique() throws InvalidKeyException { final var captor = ArgumentCaptor.forClass(HederaAccountCustomizer.class); final var opBuilder = CryptoCreateTransactionBody.newBuilder().setKey(ECDSA_KEY); @@ -726,7 +726,7 @@ void followsHappyPathWithOverrides() throws Throwable { } @Test - void followsHappyPathECKeyAndEVMAddressAlias() throws DecoderException { + void followsHappyPathECKeyAndEVMAddressAlias() throws InvalidKeyException { final var captor = ArgumentCaptor.forClass(HederaAccountCustomizer.class); final var opBuilder = CryptoCreateTransactionBody.newBuilder() .setMemo(MEMO) @@ -769,7 +769,7 @@ void followsHappyPathECKeyAndEVMAddressAlias() throws DecoderException { } @Test - void followsHappyPathECKey() throws DecoderException { + void followsHappyPathECKey() throws InvalidKeyException { final var captor = ArgumentCaptor.forClass(HederaAccountCustomizer.class); final var opBuilder = CryptoCreateTransactionBody.newBuilder().setKey(ECDSA_KEY); @@ -809,7 +809,7 @@ void followsHappyPathECKey() throws DecoderException { } @Test - void followsHappyPathECKeyWithBothFlagsAreEnabled() throws DecoderException { + void followsHappyPathECKeyWithBothFlagsAreEnabled() throws InvalidKeyException { final var captor = ArgumentCaptor.forClass(HederaAccountCustomizer.class); final var opBuilder = CryptoCreateTransactionBody.newBuilder().setKey(ECDSA_KEY); @@ -849,7 +849,7 @@ void followsHappyPathECKeyWithBothFlagsAreEnabled() throws DecoderException { } @Test - void followsHappyPathECKeyAndCreateWithAliasAndLazyCreateDisabled() throws DecoderException { + void followsHappyPathECKeyAndCreateWithAliasAndLazyCreateDisabled() throws InvalidKeyException { final var captor = ArgumentCaptor.forClass(HederaAccountCustomizer.class); final var opBuilder = CryptoCreateTransactionBody.newBuilder().setKey(ECDSA_KEY); @@ -888,7 +888,7 @@ void followsHappyPathECKeyAndCreateWithAliasAndLazyCreateDisabled() throws Decod } @Test - void followsHappyPathEDKey() throws DecoderException { + void followsHappyPathEDKey() throws InvalidKeyException { final var captor = ArgumentCaptor.forClass(HederaAccountCustomizer.class); final var opBuilder = CryptoCreateTransactionBody.newBuilder().setKey(aPrimitiveEDKey); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/file/FileUpdateTransitionLogicTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/file/FileUpdateTransitionLogicTest.java index c5106c84e740..661c7fe6c773 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/file/FileUpdateTransitionLogicTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/txns/file/FileUpdateTransitionLogicTest.java @@ -67,10 +67,10 @@ import com.hederahashgraph.api.proto.java.Timestamp; import com.hederahashgraph.api.proto.java.TransactionBody; import com.hederahashgraph.api.proto.java.TransactionID; +import java.security.InvalidKeyException; import java.time.Instant; import java.util.Arrays; import java.util.EnumSet; -import org.apache.commons.codec.DecoderException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; @@ -425,7 +425,7 @@ void transitionCatchesInvalidExpiryIfPresent() { void transitionCatchesBadlyEncodedKey() { givenTxnCtxUpdating(EnumSet.of(UpdateTarget.KEY)); // and: - willThrow(new IllegalArgumentException(new DecoderException())) + willThrow(new IllegalArgumentException(new InvalidKeyException())) .given(hfs) .setattr(argThat(nonSysFileTarget::equals), any()); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/utils/MiscUtilsTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/utils/MiscUtilsTest.java index 352366f1153b..2bb43789e541 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/utils/MiscUtilsTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/utils/MiscUtilsTest.java @@ -212,6 +212,7 @@ import java.lang.reflect.Method; import java.math.BigInteger; import java.nio.ByteBuffer; +import java.security.InvalidKeyException; import java.time.Instant; import java.util.HashMap; import java.util.HashSet; @@ -220,7 +221,6 @@ import java.util.Set; import java.util.function.BiConsumer; import java.util.stream.Stream; -import org.apache.commons.codec.DecoderException; import org.apache.logging.log4j.Logger; import org.apache.tuweni.bytes.Bytes; import org.junit.jupiter.api.Test; @@ -736,7 +736,7 @@ void perm64Test() { } @Test - void describesCorrectly() throws DecoderException { + void describesCorrectly() throws InvalidKeyException { assertEquals("", describe(null)); final var key = Key.newBuilder() diff --git a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/accounts/MerkleAccountFactory.java b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/accounts/MerkleAccountFactory.java index 626d3c64a897..618c5658842c 100644 --- a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/accounts/MerkleAccountFactory.java +++ b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/accounts/MerkleAccountFactory.java @@ -29,6 +29,7 @@ import com.hederahashgraph.api.proto.java.AccountID; import com.hederahashgraph.api.proto.java.Key; import com.hederahashgraph.api.proto.java.TokenID; +import java.security.InvalidKeyException; import java.util.HashSet; import java.util.List; import java.util.Optional; @@ -192,11 +193,11 @@ public MerkleAccountFactory keyFactory(final KeyFactory keyFactory) { return this; } - public MerkleAccountFactory accountKeys(final KeyTree kt) throws Exception { + public MerkleAccountFactory accountKeys(final KeyTree kt) throws InvalidKeyException { return accountKeys(kt.asKey(keyFactory)); } - public MerkleAccountFactory accountKeys(final Key k) throws Exception { + public MerkleAccountFactory accountKeys(final Key k) throws InvalidKeyException { return accountKeys(JKey.mapKey(k)); } diff --git a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/keys/KeyTree.java b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/keys/KeyTree.java index 4292f9ab971a..608f192dc544 100644 --- a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/keys/KeyTree.java +++ b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/keys/KeyTree.java @@ -20,9 +20,9 @@ import com.hedera.node.app.service.mono.pbj.PbjConverter; import com.hedera.node.app.service.mono.utils.MiscUtils; import com.hederahashgraph.api.proto.java.Key; +import java.security.InvalidKeyException; import java.util.function.Consumer; import java.util.function.Predicate; -import org.apache.commons.codec.DecoderException; public class KeyTree { private final KeyTreeNode root; @@ -52,7 +52,7 @@ public void traverse(final Predicate shouldVisit, final Consumer wellKnownAccounts() { } } - default HederaFs hfs() throws Exception { + default HederaFs hfs() throws InvalidKeyException { HederaFs hfs = mock(HederaFs.class); given(hfs.exists(MISC_FILE)).willReturn(true); given(hfs.exists(SYS_FILE)).willReturn(true); @@ -267,7 +270,7 @@ default MerkleMap topics() { return topics; } - private static HFileMeta convert(final FileGetInfoResponse.FileInfo fi) throws DecoderException { + private static HFileMeta convert(final FileGetInfoResponse.FileInfo fi) throws InvalidKeyException { return new HFileMeta( fi.getDeleted(), JKey.mapKey(Key.newBuilder().setKeyList(fi.getKeys()).build()), @@ -388,7 +391,8 @@ default TokenStore tokenStore() { return tokenStore; } - default byte[] extantSchedulingBodyBytes() throws Throwable { + default byte[] extantSchedulingBodyBytes() + throws InvalidProtocolBufferException, SignatureException, NoSuchAlgorithmException, InvalidKeyException { return scheduleCreateTxnWith( Key.getDefaultInstance(), "", diff --git a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/topics/TopicFactory.java b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/topics/TopicFactory.java index 6b6f1bd4024f..7ebaf7206a12 100644 --- a/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/topics/TopicFactory.java +++ b/hedera-node/hedera-mono-service/src/testFixtures/java/com/hedera/test/factories/topics/TopicFactory.java @@ -23,6 +23,7 @@ import com.hederahashgraph.api.proto.java.AccountID; import com.hederahashgraph.api.proto.java.Key; import com.hederahashgraph.api.proto.java.Timestamp; +import java.security.InvalidKeyException; import java.util.Optional; import java.util.OptionalLong; @@ -55,7 +56,7 @@ public MerkleTopic get() throws Exception { private JKey uncheckedMap(Key k) { try { return JKey.mapKey(k); - } catch (Exception ignore) { + } catch (InvalidKeyException ignore) { } throw new AssertionError("Valid key failed to map!"); } diff --git a/hedera-node/hedera-schedule-service-impl/src/test/java/com/hedera/node/app/service/schedule/impl/test/handlers/ScheduleDeleteHandlerParityTest.java b/hedera-node/hedera-schedule-service-impl/src/test/java/com/hedera/node/app/service/schedule/impl/test/handlers/ScheduleDeleteHandlerParityTest.java index 382c36e25964..c3d91a7a4346 100644 --- a/hedera-node/hedera-schedule-service-impl/src/test/java/com/hedera/node/app/service/schedule/impl/test/handlers/ScheduleDeleteHandlerParityTest.java +++ b/hedera-node/hedera-schedule-service-impl/src/test/java/com/hedera/node/app/service/schedule/impl/test/handlers/ScheduleDeleteHandlerParityTest.java @@ -96,11 +96,11 @@ import com.hedera.test.factories.scenarios.TxnHandlingScenario; import com.hedera.test.utils.IdUtils; import com.hedera.test.utils.TestFixturesKeyLookup; +import java.security.InvalidKeyException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; -import org.apache.commons.codec.DecoderException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -192,7 +192,7 @@ public static ReadableStates mockStates(final Map keysT } public static ReadableScheduleStore mockSchedule(Long schedId, KeyTree key, TransactionBody txnBody) - throws DecoderException { + throws InvalidKeyException { final ScheduleID scheduleID = ScheduleID.newBuilder().scheduleNum(schedId).build(); given(schedule.hasAdminKey()).willReturn(key == null ? false : true); diff --git a/hedera-node/hedera-schedule-service-impl/src/test/java/com/hedera/node/app/service/schedule/impl/test/handlers/ScheduleDeleteHandlerTest.java b/hedera-node/hedera-schedule-service-impl/src/test/java/com/hedera/node/app/service/schedule/impl/test/handlers/ScheduleDeleteHandlerTest.java index dd630c46b74a..4f35306fcd79 100644 --- a/hedera-node/hedera-schedule-service-impl/src/test/java/com/hedera/node/app/service/schedule/impl/test/handlers/ScheduleDeleteHandlerTest.java +++ b/hedera-node/hedera-schedule-service-impl/src/test/java/com/hedera/node/app/service/schedule/impl/test/handlers/ScheduleDeleteHandlerTest.java @@ -37,9 +37,9 @@ import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.state.ReadableKVStateBase; import com.hedera.node.app.spi.workflows.PreCheckException; +import java.security.InvalidKeyException; import java.util.Optional; import java.util.Set; -import org.apache.commons.codec.DecoderException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.BDDMockito; @@ -69,7 +69,7 @@ void setUp() { } @Test - void scheduleDeleteHappyPath() throws DecoderException, PreCheckException { + void scheduleDeleteHappyPath() throws InvalidKeyException, PreCheckException, InvalidKeyException { final var txn = scheduleDeleteTransaction(); scheduledTxn = givenSetupForScheduleDelete(txn); BDDMockito.given(schedule.hasAdminKey()).willReturn(true); From eab950e8344d7f86b3e29bf7a2d90e73c8e1a197 Mon Sep 17 00:00:00 2001 From: Michael Tinker Date: Fri, 16 Jun 2023 15:46:53 -0500 Subject: [PATCH 52/70] Automatically assert `FastCopyable` copies have stable serialization (#7154) Signed-off-by: Michael Tinker --- .../com/hedera/test/serde/SerializedForms.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/SerializedForms.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/SerializedForms.java index 4a11a5614e82..daa47ed4e72b 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/SerializedForms.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/test/serde/SerializedForms.java @@ -70,6 +70,7 @@ import com.hedera.node.app.service.mono.stream.RecordsRunningHashLeaf; import com.hedera.test.utils.SeededPropertySource; import com.hedera.test.utils.SerdeUtils; +import com.swirlds.common.FastCopyable; import com.swirlds.common.io.SelfSerializable; import com.swirlds.common.utility.CommonUtils; import com.swirlds.virtualmap.VirtualValue; @@ -110,6 +111,7 @@ public static byte[] loadForm( } } + @SuppressWarnings("unchecked") public static void assertSameSerialization( final Class type, final Function factory, @@ -120,8 +122,13 @@ public static void assertSameSerialization( final var actual = SerdeUtils.serialize(example); final var expected = loadForm(type, version, testCaseNo); assertArrayEquals(expected, actual, "Regression in serializing test case #" + testCaseNo); + if (type.isAssignableFrom(FastCopyable.class)) { + assertSameCopySerialization( + testCaseNo, (FastCopyable) example, copy -> SerdeUtils.serialize((T) copy), expected); + } } + @SuppressWarnings("unchecked") public static void assertSameBufferSerialization( final Class type, final Function factory, @@ -132,6 +139,16 @@ public static void assertSameBufferSerialization( final var actual = SerdeUtils.serializeToBuffer(example, MAX_SERIALIAZED_LEN); final var expected = loadForm(type, version, testCaseNo); assertArrayEquals(expected, actual, "Regression in serializing test case #" + testCaseNo); + assertSameCopySerialization( + testCaseNo, (FastCopyable) example, copy -> SerdeUtils.serialize((T) copy), expected); + } + + private static void assertSameCopySerialization( + final int testCaseNo, final T example, final Function serializer, final byte[] expected) { + final var copy = example.copy(); + @SuppressWarnings("unchecked") + final var copyActual = serializer.apply((T) copy); + assertArrayEquals(expected, copyActual, "Regression in serializing test case #" + testCaseNo + " (copy)"); } private static void generateSerializedData() { From 9cd85ea17047de5b5291af47699386e7a68f76cd Mon Sep 17 00:00:00 2001 From: Nathan Klick Date: Fri, 16 Jun 2023 23:00:28 -0500 Subject: [PATCH 53/70] feat: update ci workflows to remove job gating based on labels (#7161) Signed-off-by: Nathan Klick --- .../node-flow-pull-request-checks.yaml | 18 ---- .../platform-flow-pull-request-checks.yaml | 31 ------ ...platform-pull-request-extended-checks.yaml | 94 ++++++++++++------- 3 files changed, 62 insertions(+), 81 deletions(-) diff --git a/.github/workflows/node-flow-pull-request-checks.yaml b/.github/workflows/node-flow-pull-request-checks.yaml index 76b5e73e2471..e01f436a97d5 100644 --- a/.github/workflows/node-flow-pull-request-checks.yaml +++ b/.github/workflows/node-flow-pull-request-checks.yaml @@ -22,7 +22,6 @@ on: - opened - reopened - synchronize - - labeled defaults: run: @@ -55,20 +54,9 @@ jobs: secrets: access-token: ${{ secrets.GITHUB_TOKEN }} - unit-label-check: - name: "Label Check [CI:UnitTests]" - runs-on: [self-hosted, Linux, medium, ephemeral] - if: ${{ contains(github.event.pull_request.labels.*.name, 'CI:UnitTests') || contains(github.event.pull_request.labels.*.name, 'CI:FinalChecks') }} - steps: - - name: Check Labels - if: github.event_name == 'pull_request' - run: echo PR labels that trigger the tests are [CI:UnitTests] and [CI:FinalChecks] - unit-tests: name: Unit Tests uses: ./.github/workflows/node-zxc-compile-application-code.yaml - needs: - - unit-label-check with: custom-job-label: Standard enable-javadoc: false @@ -83,8 +71,6 @@ jobs: eet-tests: name: E2E Tests uses: ./.github/workflows/node-zxc-compile-application-code.yaml - needs: - - unit-label-check with: custom-job-label: Standard enable-javadoc: false @@ -100,8 +86,6 @@ jobs: integration-tests: name: Integration Tests uses: ./.github/workflows/node-zxc-compile-application-code.yaml - needs: - - unit-label-check with: custom-job-label: Standard enable-javadoc: false @@ -117,8 +101,6 @@ jobs: snyk-scan: name: Snyk Scan uses: ./.github/workflows/node-zxc-compile-application-code.yaml - needs: - - unit-label-check with: custom-job-label: Standard enable-javadoc: false diff --git a/.github/workflows/platform-flow-pull-request-checks.yaml b/.github/workflows/platform-flow-pull-request-checks.yaml index 55fd73909278..7dbe34db2439 100644 --- a/.github/workflows/platform-flow-pull-request-checks.yaml +++ b/.github/workflows/platform-flow-pull-request-checks.yaml @@ -22,7 +22,6 @@ on: - opened - reopened - synchronize - - labeled paths: - 'platform-sdk/**' @@ -58,24 +57,9 @@ jobs: secrets: access-token: ${{ secrets.PLATFORM_GH_ACCESS_TOKEN }} - unit-label-check: - name: "Label Check [CI:UnitTests]" - runs-on: [self-hosted, Linux, small, scheduler, ephemeral] - if: ${{ contains(github.event.pull_request.labels.*.name, 'CI:UnitTests') || contains(github.event.pull_request.labels.*.name, 'CI:FinalChecks') }} - steps: - - name: "Check Labels" - uses: jesusvasquez333/verify-pr-label-action@v1.4.0 - if: github.event_name == 'pull_request' - with: - github-token: ${{ secrets.PLATFORM_GH_ACCESS_TOKEN }} - valid-labels: "CI:UnitTests, CI:FinalChecks" - disable-reviews: true - unit-tests: name: Unit Tests uses: ./.github/workflows/platform-zxc-compile-platform-code.yaml - needs: - - unit-label-check with: custom-job-label: Standard enable-javadoc: false @@ -87,24 +71,9 @@ jobs: sonar-token: ${{ secrets.PLATFORM_SONAR_TOKEN }} slack-api-token: ${{ secrets.PLATFORM_SLACK_API_TOKEN }} - final-label-check: - name: "Label Check [CI:FinalChecks]" - runs-on: [self-hosted, Linux, small, scheduler, ephemeral] - if: ${{ contains(github.event.pull_request.labels.*.name, 'CI:FinalChecks') }} - steps: - - name: "Check Labels" - uses: jesusvasquez333/verify-pr-label-action@v1.4.0 - if: github.event_name == 'pull_request' - with: - github-token: ${{ secrets.PLATFORM_GH_ACCESS_TOKEN }} - valid-labels: "CI:FinalChecks" - disable-reviews: true - abbreviated-panel: name: JRS Panel uses: ./.github/workflows/zxc-jrs-regression.yaml - needs: - - final-label-check with: panel-config: "configs/suites/GCP-PRCheck-Abbrev-4N.json" branch-name: ${{ github.head_ref || github.ref_name }} diff --git a/.github/workflows/platform-pull-request-extended-checks.yaml b/.github/workflows/platform-pull-request-extended-checks.yaml index b096c87556a3..f31606e8937c 100644 --- a/.github/workflows/platform-pull-request-extended-checks.yaml +++ b/.github/workflows/platform-pull-request-extended-checks.yaml @@ -17,40 +17,55 @@ name: "Platform: PR Extended Checks" on: workflow_dispatch: - pull_request: - types: - - opened - - reopened - - synchronize - - labeled + slack-results-channel: + description: "Slack Test Result Channel:" + required: false + type: string + default: "regression-test" + slack-summary-channel: + description: "Slack Summary Channel:" + required: false + type: string + default: "regression-test" + java-version: + description: "Java JDK Version:" + type: string + required: false + default: "17" + java-distribution: + description: "Java JDK Distribution:" + type: string + required: false + default: "temurin" + gradle-version: + description: "Gradle Version:" + type: string + required: false + default: "wrapper" defaults: run: shell: bash concurrency: - group: pr-checks-${{ github.workflow }}-${{ github.head_ref || github.run_id }} + group: pr-ext-checks-${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true jobs: - label-check: - name: "Label Check [CI:PlatformExtendedChecks]" - runs-on: [self-hosted, Linux, medium, ephemeral] - if: ${{ contains(github.event.pull_request.labels.*.name, 'CI:PlatformExtendedChecks') }} - steps: - - name: Check Labels - if: github.event_name == 'pull_request' - run: echo PR labels that trigger the tests is [CI:PlatformExtendedChecks] - reconnect: name: Reconnect uses: ./.github/workflows/zxc-jrs-regression.yaml - needs: - - label-check with: - ref: ${{ github.event.inputs.ref }} - branch-name: ${{ github.event.inputs.branch-name }} panel-config: "configs/suites/daily/4N/GCP-Daily-Reconnect-4N.json" + ref: ${{ github.event.inputs.ref || github.ref }} + branch-name: ${{ github.ref_name }} + slack-results-channel: ${{ github.event.inputs.slack-results-channel }} + slack-summary-channel: ${{ github.event.inputs.slack-summary-channel }} + java-version: ${{ github.event.inputs.java-version || '17' }} + java-distribution: ${{ github.event.inputs.java-distribution || 'temurin' }} + gradle-version: ${{ github.event.inputs.gradle-version || 'wrapper' }} + use-branch-for-slack-channel: false + custom-job-name: "Custom" secrets: access-token: ${{ secrets.PLATFORM_GH_ACCESS_TOKEN }} jrs-ssh-user-name: ${{ secrets.PLATFORM_JRS_SSH_USER_NAME }} @@ -62,12 +77,17 @@ jobs: restart: name: Restart uses: ./.github/workflows/zxc-jrs-regression.yaml - needs: - - label-check with: - ref: ${{ github.event.inputs.ref }} - branch-name: ${{ github.event.inputs.branch-name }} panel-config: "configs/suites/daily/4N/GCP-Daily-Restart-4N.json" + ref: ${{ github.event.inputs.ref || github.ref }} + branch-name: ${{ github.ref_name }} + slack-results-channel: ${{ github.event.inputs.slack-results-channel }} + slack-summary-channel: ${{ github.event.inputs.slack-summary-channel }} + java-version: ${{ github.event.inputs.java-version || '17' }} + java-distribution: ${{ github.event.inputs.java-distribution || 'temurin' }} + gradle-version: ${{ github.event.inputs.gradle-version || 'wrapper' }} + use-branch-for-slack-channel: false + custom-job-name: "Custom" secrets: access-token: ${{ secrets.PLATFORM_GH_ACCESS_TOKEN }} jrs-ssh-user-name: ${{ secrets.PLATFORM_JRS_SSH_USER_NAME }} @@ -79,12 +99,17 @@ jobs: dynamic-freeze: name: DynamicFreeze uses: ./.github/workflows/zxc-jrs-regression.yaml - needs: - - label-check with: - ref: ${{ github.event.inputs.ref }} - branch-name: ${{ github.event.inputs.branch-name }} panel-config: "configs/suites/daily/4N/GCP-Daily-DynamicFreeze-4N.json" + ref: ${{ github.event.inputs.ref || github.ref }} + branch-name: ${{ github.ref_name }} + slack-results-channel: ${{ github.event.inputs.slack-results-channel }} + slack-summary-channel: ${{ github.event.inputs.slack-summary-channel }} + java-version: ${{ github.event.inputs.java-version || '17' }} + java-distribution: ${{ github.event.inputs.java-distribution || 'temurin' }} + gradle-version: ${{ github.event.inputs.gradle-version || 'wrapper' }} + use-branch-for-slack-channel: false + custom-job-name: "Custom" secrets: access-token: ${{ secrets.PLATFORM_GH_ACCESS_TOKEN }} jrs-ssh-user-name: ${{ secrets.PLATFORM_JRS_SSH_USER_NAME }} @@ -96,12 +121,17 @@ jobs: NewNodesSimilarStake: name: NewNodesSimilarStake uses: ./.github/workflows/zxc-jrs-regression.yaml - needs: - - label-check with: - ref: ${{ github.event.inputs.ref }} - branch-name: ${{ github.event.inputs.branch-name }} panel-config: "configs/suites/daily/5N/GCP-Daily-RestartWithNewNodes-SimilarStake-5N.json" + ref: ${{ github.event.inputs.ref || github.ref }} + branch-name: ${{ github.ref_name }} + slack-results-channel: ${{ github.event.inputs.slack-results-channel }} + slack-summary-channel: ${{ github.event.inputs.slack-summary-channel }} + java-version: ${{ github.event.inputs.java-version || '17' }} + java-distribution: ${{ github.event.inputs.java-distribution || 'temurin' }} + gradle-version: ${{ github.event.inputs.gradle-version || 'wrapper' }} + use-branch-for-slack-channel: false + custom-job-name: "Custom" secrets: access-token: ${{ secrets.PLATFORM_GH_ACCESS_TOKEN }} jrs-ssh-user-name: ${{ secrets.PLATFORM_JRS_SSH_USER_NAME }} From ab455181ceb69e714a761331a911e42b9133bda0 Mon Sep 17 00:00:00 2001 From: Hendrik Ebbers Date: Tue, 20 Jun 2023 18:00:46 +0200 Subject: [PATCH 54/70] Minor changes in the readme based on the new platform modules (#5976) Signed-off-by: Hendrik Ebbers Co-authored-by: Nathan Klick --- README.md | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 5f2940c41bfd..ebfe5a47571f 100644 --- a/README.md +++ b/README.md @@ -7,17 +7,15 @@ # Hedera Services -Implementation of the [services offered](https://github.com/hashgraph/hedera-protobufs) by -nodes in the Hedera public network, which is built on the Platform. +Implementation of the Platform and the [services offered](https://github.com/hashgraph/hedera-protobufs) by +nodes in the [Hedera public network](https://hedera.com). ## Overview of child modules +* platform-sdk/_ - the basic Platform. * _hedera-node/_ - implementation of Hedera services on the Platform. -* _test-clients/_ - clients and frameworks for end-to-end testing of Services. -* _hapi-fees/_ - libraries to estimate resource usage of Services operations. -* _hapi-utils/_ - deprecated libraries primarily involved in fee calculation. ## JVM -JDK 17 is required. Adoptium builds of OpenJDK 17 are strongly recommended. +JDK 17 is required. The Temurin builds of [Eclipse Adoptium](https://adoptium.net/) are strongly recommended. ## Solidity Hedera Contracts support `pragma solidity <=0.8.9`. From 44ea444189d518d245256f9cc0a25ac069d8858e Mon Sep 17 00:00:00 2001 From: artemananiev <33361937+artemananiev@users.noreply.github.com> Date: Tue, 20 Jun 2023 11:36:43 -0700 Subject: [PATCH 55/70] 6944: Re-enable data on disk + MerkleDb in 0.40 in dev environments (#6945) Signed-off-by: Artem Ananev --- hedera-node/configuration/previewnet/bootstrap.properties | 4 ++++ hedera-node/data/config/bootstrap.properties | 8 ++++---- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/hedera-node/configuration/previewnet/bootstrap.properties b/hedera-node/configuration/previewnet/bootstrap.properties index 9ca933bbb98b..5ea2fd92e7a6 100644 --- a/hedera-node/configuration/previewnet/bootstrap.properties +++ b/hedera-node/configuration/previewnet/bootstrap.properties @@ -5,3 +5,7 @@ accounts.blocklist.enabled=false accounts.blocklist.resource= contracts.evm.version.dynamic=true contracts.maxNumWithHapiSigsAccess=0 +accounts.storeOnDisk=false +tokens.storeRelsOnDisk=false +tokens.nfts.useVirtualMerkle=false +virtualdatasource.jasperdbToMerkledb=true diff --git a/hedera-node/data/config/bootstrap.properties b/hedera-node/data/config/bootstrap.properties index 874eb88c1168..3322535fc377 100644 --- a/hedera-node/data/config/bootstrap.properties +++ b/hedera-node/data/config/bootstrap.properties @@ -6,10 +6,10 @@ scheduling.whitelist=ConsensusSubmitMessage,CryptoTransfer,TokenMint,TokenBurn,C hedera.workflows.enabled= #hedera.workflows.enabled=ConsensusCreateTopic,ConsensusUpdateTopic,ConsensusDeleteTopic,ConsensusSubmitMessage,ConsensusGetTopicInfo # Initial Service workflow schemas require on-disk storage, so all these need to be true if enabling workflows -accounts.storeOnDisk=false -tokens.storeRelsOnDisk=false -tokens.nfts.useVirtualMerkle=false -virtualdatasource.jasperdbToMerkledb=false +accounts.storeOnDisk=true +tokens.storeRelsOnDisk=true +tokens.nfts.useVirtualMerkle=true +virtualdatasource.jasperdbToMerkledb=true records.useConsolidatedFcq=false cache.cryptoTransfer.warmThreads=30 contracts.maxNumWithHapiSigsAccess=0 From 9edfef7f6f3d043d84b7798b2678bb97aff8f0a5 Mon Sep 17 00:00:00 2001 From: Michael Tinker Date: Tue, 20 Jun 2023 14:54:19 -0500 Subject: [PATCH 56/70] Restores mapping of "stand-in" `JContractIdKey` in hollow account finalization (#7178) Signed-off-by: Michael Tinker Signed-off-by: Joseph Sinclair Co-authored-by: Joseph Sinclair --- .../mono/legacy/core/jproto/JContractIDKey.java | 5 +++++ .../service/mono/legacy/core/jproto/JKey.java | 13 ++++++++++++- .../mono/legacy/core/jproto/JKeyList.java | 6 ++++++ .../legacy/core/jproto/JContractIDKeyTest.java | 17 +++++++++++++++-- 4 files changed, 38 insertions(+), 3 deletions(-) diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JContractIDKey.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JContractIDKey.java index 174d6783a1a3..e8c854dd8b53 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JContractIDKey.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JContractIDKey.java @@ -17,6 +17,7 @@ package com.hedera.node.app.service.mono.legacy.core.jproto; import com.hederahashgraph.api.proto.java.ContractID; +import com.hederahashgraph.api.proto.java.Key; /** * Maps to proto Key of type contractID. @@ -93,4 +94,8 @@ public boolean isEmpty() { public boolean isValid() { return !isEmpty(); } + + protected Key convertJKeyEmpty() { + return Key.newBuilder().setContractID(getContractID()).build(); + } } diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKey.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKey.java index 286c9388419a..d1d2892fe8e0 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKey.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKey.java @@ -312,7 +312,9 @@ public static Key convertJKey(JKey jkey, int depth) throws InvalidKeyException { if (depth > MAX_KEY_DEPTH) { throw new InvalidKeyException("Exceeding max expansion depth of " + MAX_KEY_DEPTH); } - if (!(jkey.hasThresholdKey() || jkey.hasKeyList() || jkey.isEmpty())) { + if (jkey.isEmpty()) { + return jkey.convertJKeyEmpty(); + } else if (!(jkey.hasThresholdKey() || jkey.hasKeyList())) { return convertJKeyBasic(jkey); } else if (jkey.hasThresholdKey()) { List jKeys = jkey.getThresholdKey().getKeys().getKeysList(); @@ -341,6 +343,15 @@ public static Key convertJKey(JKey jkey, int depth) throws InvalidKeyException { } } + /** + * Convert an empty JKey to an appropriate empty Key. + * Typically this just creates a new Key, but subclasses may override with specific behavior. + * @return An empty Key. + */ + protected Key convertJKeyEmpty() { + return Key.newBuilder().build(); + } + public static boolean equalUpToDecodability(JKey a, JKey b) { Key aKey = null; Key bKey = null; diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeyList.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeyList.java index 71da23a59d7b..831c30fb3ea0 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeyList.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/legacy/core/jproto/JKeyList.java @@ -16,6 +16,8 @@ package com.hedera.node.app.service.mono.legacy.core.jproto; +import com.hederahashgraph.api.proto.java.Key; +import com.hederahashgraph.api.proto.java.KeyList; import java.util.LinkedList; import java.util.List; @@ -100,4 +102,8 @@ public boolean isForScheduledTxn() { } return false; } + + protected Key convertJKeyEmpty() { + return Key.newBuilder().setKeyList(KeyList.newBuilder().build()).build(); + } } diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JContractIDKeyTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JContractIDKeyTest.java index b54e8f80a850..c630da49e0d6 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JContractIDKeyTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/legacy/core/jproto/JContractIDKeyTest.java @@ -16,10 +16,12 @@ package com.hedera.node.app.service.mono.legacy.core.jproto; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; +import com.hedera.node.app.service.mono.txns.contract.ContractCreateTransitionLogic; import com.hederahashgraph.api.proto.java.ContractID; +import com.hederahashgraph.api.proto.java.Key; +import java.security.InvalidKeyException; import org.junit.jupiter.api.Test; class JContractIDKeyTest { @@ -46,4 +48,15 @@ void scheduleOpsAsExpected() { subject.setForScheduledTxn(true); assertTrue(subject.isForScheduledTxn()); } + + @Test + void standinContractKeyConvertsPerUsual() throws InvalidKeyException { + final var expectedProtoKey = Key.newBuilder() + .setContractID(ContractID.newBuilder().setContractNum(0).build()) + .build(); + + final var actualProtoKey = JKey.mapJKey(ContractCreateTransitionLogic.STANDIN_CONTRACT_ID_KEY); + + assertEquals(expectedProtoKey, actualProtoKey); + } } From a944248c0a36e153aaef3bd4f49d37322cbbc6a8 Mon Sep 17 00:00:00 2001 From: David Bakin <117694041+david-bakin-sl@users.noreply.github.com> Date: Tue, 20 Jun 2023 13:44:55 -0700 Subject: [PATCH 57/70] Remove traceability migration code entirely (#7094) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Having been run (in 0.37 and again in 0.38) we no longer need it. And will no longer need it, ever. Also it is good to remove it now because migration for contract nonces is coming shortly, and if this code was still in the codebase it might very well be used as a "template". But this code was not implemented appropriately. The existing `SystemTask` mechanism which was used was _not_ built for migrations, and certainly not this kind of migration. There were several problems caused during migration attempts that led to operational difficulties and long delay in actually getting the migration to happen (and it took two attempts in production!). A proper migration facility will _not_ be built on `SystemTask`/`SystemTaskManager` (as it stands) and thus should not use this code as a template. Less code in product **⇒** fewer bugs in production. What's not to like? Fixes #6422. See also #6731. Signed-off-by: David Bakin <117694041+david-bakin-sl@users.noreply.github.com> --- .../src/test/resources/bootstrap.properties | 1 - .../properties/BootstrapProperties.java | 3 - .../properties/GlobalDynamicProperties.java | 7 - .../context/properties/PropertyNames.java | 2 - .../mono/state/expiry/EntityAutoExpiry.java | 6 - .../service/mono/state/tasks/TaskModule.java | 17 +- .../state/tasks/TraceabilityExportTask.java | 266 ---------- .../tasks/TraceabilityRecordsHelper.java | 68 --- .../src/main/resources/bootstrap.properties | 1 - .../properties/BootstrapPropertiesTest.java | 2 - .../GlobalDynamicPropertiesTest.java | 6 - .../mono/state/tasks/TaskModuleTest.java | 2 +- .../tasks/TraceabilityExportTaskTest.java | 454 ------------------ .../tasks/TraceabilityRecordsHelperTest.java | 104 ---- .../src/test/resources/bootstrap.properties | 1 - .../resources/bootstrap/standard.properties | 1 - .../src/main/resource/bootstrap.properties | 1 - 17 files changed, 2 insertions(+), 940 deletions(-) delete mode 100644 hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityExportTask.java delete mode 100644 hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityRecordsHelper.java delete mode 100644 hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityExportTaskTest.java delete mode 100644 hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityRecordsHelperTest.java diff --git a/hedera-node/hedera-app/src/test/resources/bootstrap.properties b/hedera-node/hedera-app/src/test/resources/bootstrap.properties index 678f2fe5a253..fd4c4b24aa05 100644 --- a/hedera-node/hedera-app/src/test/resources/bootstrap.properties +++ b/hedera-node/hedera-app/src/test/resources/bootstrap.properties @@ -103,7 +103,6 @@ fees.percentUtilizationScaleFactors=DEFAULT(90,10:1,95,25:1,99,100:1) fees.tokenTransferUsageMultiplier=380 files.maxNumber=1_000_000 files.maxSizeKb=1024 -hedera.recordStream.enableTraceabilityMigration=false hedera.recordStream.sidecarMaxSizeMb=256 hedera.transaction.maxMemoUtf8Bytes=100 hedera.transaction.maxValidDuration=180 diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/context/properties/BootstrapProperties.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/context/properties/BootstrapProperties.java index dc5bafeb2ff1..8572802170ee 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/context/properties/BootstrapProperties.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/context/properties/BootstrapProperties.java @@ -136,7 +136,6 @@ import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_PROFILES_ACTIVE; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_REALM; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_COMPRESS_FILES_ON_CREATION; -import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_ENABLE_TRACEABILITY_MIGRATION; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_IS_ENABLED; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_LOG_DIR; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_LOG_EVERY_TRANSACTION; @@ -494,7 +493,6 @@ public String getRawValue(final String name) { FEES_PERCENT_CONGESTION_MULTIPLIERS, FEES_PERCENT_UTILIZATION_SCALE_FACTORS, FEES_TOKEN_TRANSFER_USAGE_MULTIPLIER, - HEDERA_RECORD_STREAM_ENABLE_TRACEABILITY_MIGRATION, TRACEABILITY_MIN_FREE_TO_USED_GAS_THROTTLE_RATIO, TRACEABILITY_MAX_EXPORTS_PER_CONS_SEC, HEDERA_TXN_MAX_MEMO_UTF8_BYTES, @@ -673,7 +671,6 @@ public static Function transformFor(final String prop) { entry(HEDERA_RECORD_STREAM_SIG_FILE_VERSION, AS_INT), entry(HEDERA_RECORD_STREAM_QUEUE_CAPACITY, AS_INT), entry(HEDERA_RECORD_STREAM_SIDECAR_MAX_SIZE_MB, AS_INT), - entry(HEDERA_RECORD_STREAM_ENABLE_TRACEABILITY_MIGRATION, AS_BOOLEAN), entry(TRACEABILITY_MIN_FREE_TO_USED_GAS_THROTTLE_RATIO, AS_LONG), entry(TRACEABILITY_MAX_EXPORTS_PER_CONS_SEC, AS_LONG), entry(HEDERA_RECORD_STREAM_LOG_EVERY_TRANSACTION, AS_BOOLEAN), diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/context/properties/GlobalDynamicProperties.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/context/properties/GlobalDynamicProperties.java index 668964d00712..4dd0cdb24c41 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/context/properties/GlobalDynamicProperties.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/context/properties/GlobalDynamicProperties.java @@ -82,7 +82,6 @@ import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_ALLOWANCES_MAX_ACCOUNT_LIMIT; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_ALLOWANCES_MAX_TXN_LIMIT; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_COMPRESS_FILES_ON_CREATION; -import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_ENABLE_TRACEABILITY_MIGRATION; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_RECORD_FILE_VERSION; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_SIDECAR_MAX_SIZE_MB; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_SIG_FILE_VERSION; @@ -283,7 +282,6 @@ public class GlobalDynamicProperties implements EvmProperties { private ContractStoragePriceTiers storagePriceTiers; private boolean compressRecordFilesOnCreation; private boolean tokenAutoCreationsEnabled; - private boolean doTraceabilityExport; private boolean compressAccountBalanceFilesOnCreation; private long traceabilityMaxExportsPerConsSec; private long traceabilityMinFreeToUsedGasThrottleRatio; @@ -436,7 +434,6 @@ public void reload() { compressRecordFilesOnCreation = properties.getBooleanProperty(HEDERA_RECORD_STREAM_COMPRESS_FILES_ON_CREATION); tokenAutoCreationsEnabled = properties.getBooleanProperty(TOKENS_AUTO_CREATIONS_ENABLED); compressAccountBalanceFilesOnCreation = properties.getBooleanProperty(BALANCES_COMPRESS_ON_CREATION); - doTraceabilityExport = properties.getBooleanProperty(HEDERA_RECORD_STREAM_ENABLE_TRACEABILITY_MIGRATION); traceabilityMaxExportsPerConsSec = properties.getLongProperty(TRACEABILITY_MAX_EXPORTS_PER_CONS_SEC); traceabilityMinFreeToUsedGasThrottleRatio = properties.getLongProperty(TRACEABILITY_MIN_FREE_TO_USED_GAS_THROTTLE_RATIO); @@ -901,10 +898,6 @@ public boolean shouldCompressAccountBalanceFilesOnCreation() { return compressAccountBalanceFilesOnCreation; } - public boolean shouldDoTraceabilityExport() { - return doTraceabilityExport; - } - public long traceabilityMinFreeToUsedGasThrottleRatio() { return traceabilityMinFreeToUsedGasThrottleRatio; } diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/context/properties/PropertyNames.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/context/properties/PropertyNames.java index 2a76d01b4bec..e499d96803f3 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/context/properties/PropertyNames.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/context/properties/PropertyNames.java @@ -205,8 +205,6 @@ private PropertyNames() { public static final String HEDERA_ALLOWANCES_IS_ENABLED = "hedera.allowances.isEnabled"; public static final String ENTITIES_LIMIT_TOKEN_ASSOCIATIONS = "entities.limitTokenAssociations"; public static final String UTIL_PRNG_IS_ENABLED = "utilPrng.isEnabled"; - public static final String HEDERA_RECORD_STREAM_ENABLE_TRACEABILITY_MIGRATION = - "hedera.recordStream.enableTraceabilityMigration"; public static final String TRACEABILITY_MAX_EXPORTS_PER_CONS_SEC = "traceability.maxExportsPerConsSec"; public static final String TRACEABILITY_MIN_FREE_TO_USED_GAS_THROTTLE_RATIO = "traceability.minFreeToUsedGasThrottleRatio"; diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/expiry/EntityAutoExpiry.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/expiry/EntityAutoExpiry.java index d76fd7060fb1..ca645963e757 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/expiry/EntityAutoExpiry.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/expiry/EntityAutoExpiry.java @@ -56,12 +56,6 @@ public class EntityAutoExpiry { /** This class manages the system tasks that do a little bit of background work as each * consensus transaction is incorporated into the hashgraph. - * - * Currently, the name of this class is not accurate: It is no longer dealing only with - * auto-expiration entities. It controls the progress of all "system tasks" (via the - * {@link SystemTaskManager}) and one of those system tasks is to perform traceability migration - * of contracts that were created prior to the availability of contract sidecars - * ({@link com.hedera.node.app.service.mono.state.tasks.TraceabilityExportTask}. */ @Inject public EntityAutoExpiry( diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/tasks/TaskModule.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/tasks/TaskModule.java index c40532461655..46555bb35aec 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/tasks/TaskModule.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/tasks/TaskModule.java @@ -25,16 +25,7 @@ /** * Binds the {@link SystemTask} implementations, which should always include {@link ExpiryProcess}, - * but may include others such as the 0.31.x/0.36/0.37 {@link TraceabilityExportTask}. - * - *

Note we are keeping {@link TraceabilityExportTask} in the codebase at this time for two - * reasons: - * - *

    - *
  1. It is somewhat likely that we'll want to run it again; and, - *
  2. It provides a much simpler model task than expiration, which uses dozens of classes and - * thus obscures some of the details of the task framework. - *
+ * but may include others. */ @Module public interface TaskModule { @@ -43,10 +34,4 @@ public interface TaskModule { @Singleton @StringKey("1_ENTITY_EXPIRATION") SystemTask bindEntityExpirationTask(ExpiryProcess expiryProcess); - - @Binds - @IntoMap - @Singleton - @StringKey("2_TRACEABILITY_EXPORT") - SystemTask bindTraceabilityExportTask(TraceabilityExportTask traceabilityExportTask); } diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityExportTask.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityExportTask.java deleted file mode 100644 index 99dec2e3d909..000000000000 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityExportTask.java +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Copyright (C) 2022-2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.service.mono.state.tasks; - -import static com.hedera.node.app.service.mono.state.tasks.SystemTaskResult.DONE; -import static com.hedera.node.app.service.mono.state.tasks.SystemTaskResult.NEEDS_DIFFERENT_CONTEXT; -import static com.hedera.node.app.service.mono.state.tasks.SystemTaskResult.NOTHING_TO_DO; -import static com.hedera.node.app.service.mono.throttling.MapAccessType.ACCOUNTS_GET; -import static com.hedera.node.app.service.mono.throttling.MapAccessType.BLOBS_GET; -import static com.hedera.node.app.service.mono.throttling.MapAccessType.STORAGE_GET; - -import com.google.common.annotations.VisibleForTesting; -import com.hedera.node.app.hapi.utils.ByteStringUtils; -import com.hedera.node.app.service.mono.context.properties.GlobalDynamicProperties; -import com.hedera.node.app.service.mono.state.adapters.VirtualMapLike; -import com.hedera.node.app.service.mono.state.merkle.MerkleNetworkContext; -import com.hedera.node.app.service.mono.state.migration.AccountStorageAdapter; -import com.hedera.node.app.service.mono.state.migration.HederaAccount; -import com.hedera.node.app.service.mono.state.virtual.ContractKey; -import com.hedera.node.app.service.mono.state.virtual.IterableContractValue; -import com.hedera.node.app.service.mono.store.contracts.EntityAccess; -import com.hedera.node.app.service.mono.throttling.ExpiryThrottle; -import com.hedera.node.app.service.mono.throttling.FunctionalityThrottling; -import com.hedera.node.app.service.mono.throttling.annotations.HandleThrottle; -import com.hedera.node.app.service.mono.utils.EntityIdUtils; -import com.hedera.node.app.service.mono.utils.EntityNum; -import com.hedera.node.app.service.mono.utils.SidecarUtils; -import com.hedera.services.stream.proto.ContractStateChange; -import com.hedera.services.stream.proto.ContractStateChanges; -import com.hedera.services.stream.proto.StorageChange; -import com.hedera.services.stream.proto.TransactionSidecarRecord; -import com.hederahashgraph.api.proto.java.ContractID; -import java.time.Instant; -import java.util.ArrayList; -import java.util.List; -import java.util.function.Supplier; -import javax.inject.Inject; -import javax.inject.Singleton; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -/** - * A {@link SystemTask} added in release 0.31 that exports the bytecode and storage slots of all - * contracts from the post-upgrade saved state. - * - *

After all pre-existing entities have been scanned, always returns false from {@code - * isActive()}. - * - *

Enforces two kinds of "back-pressure" by returning {@link - * SystemTaskResult#NEEDS_DIFFERENT_CONTEXT} from {@code process()} if, - * - *

    - *
  1. More than {@code traceability.maxExportsPerConsSec} entities have been processed by the - * {@link SystemTaskManager} in the last consensus second; or, - *
  2. The free-to-used ratio of the consensus gas throttle has fallen below {@code - * traceability.minFreeToUsedGasThrottleRatio}. - *
- * - * With default settings, this stops traceability exports whenever gas usage is above 10 percent of - * capacity; or when there have already been 10 traceability exports in the current consensus - * second. - */ -@Singleton -public class TraceabilityExportTask implements SystemTask { - private static final Logger log = LogManager.getLogger(TraceabilityExportTask.class); - private static final int EXPORTS_PER_LOG = 1000; - - private final EntityAccess entityAccess; - private final ExpiryThrottle expiryThrottle; - private final FunctionalityThrottling handleThrottling; - private final GlobalDynamicProperties dynamicProperties; - private final TraceabilityRecordsHelper recordsHelper; - private final Supplier accounts; - private final Supplier> contractStorage; - - private boolean firstTime = true; - - // Used to occasionally log the progress of the traceability export; because this is - // not in state, will become inaccurate on a node that falls behind or restarts, but - // that doesn't matter---exports will finish within a few hours and we can just check - // the logs of a node that didn't fall behind - private int exportsCompleted = 0; - - @Inject - public TraceabilityExportTask( - final EntityAccess entityAccess, - final ExpiryThrottle expiryThrottle, - final GlobalDynamicProperties dynamicProperties, - final TraceabilityRecordsHelper recordsHelper, - final @HandleThrottle FunctionalityThrottling handleThrottling, - final Supplier accounts, - final Supplier> contractStorage) { - this.entityAccess = entityAccess; - this.expiryThrottle = expiryThrottle; - this.dynamicProperties = dynamicProperties; - this.accounts = accounts; - this.recordsHelper = recordsHelper; - this.contractStorage = contractStorage; - this.handleThrottling = handleThrottling; - } - - @Override - public boolean isActive(final long literalNum, final MerkleNetworkContext curNetworkCtx) { - return dynamicProperties.shouldDoTraceabilityExport() - && !curNetworkCtx.areAllPreUpgradeEntitiesScanned() - // No need to do traceability export for a contract created post-upgrade - && literalNum < curNetworkCtx.seqNoPostUpgrade(); - } - - @Override - public SystemTaskResult process( - final long literalNum, final Instant now, final MerkleNetworkContext curNetworkCtx) { - - if (firstTime) { - log.info("Traceability migration is active and beginning work"); - firstTime = false; - } - - if (!recordsHelper.canExportNow() || needsBackPressure(curNetworkCtx)) { - return NEEDS_DIFFERENT_CONTEXT; - } - // It would be a lot of work to split even a single sidecar's construction across - // multiple process() calls, so we just unconditionally register work in the - // throttle bucket; will only happen once per pre-existing contract - expiryThrottle.allowOne(ACCOUNTS_GET); - - final var key = EntityNum.fromLong(literalNum); - final var account = accounts.get().get(key); - if (account == null || !account.isSmartContract()) { - return NOTHING_TO_DO; - } - final var contractId = key.toGrpcContractID(); - final List sidecars = new ArrayList<>(); - addBytecodeSidecar(contractId, sidecars); - // We ignore contracts that don't have bytecode - if (!sidecars.isEmpty()) { - addStateChangesSideCar(contractId, account, sidecars); - recordsHelper.exportSidecarsViaSynthUpdate(literalNum, sidecars); - } - exportsCompleted++; - if (exportsCompleted % EXPORTS_PER_LOG == 0) { - log.info("Have exported traceability info for {} contracts now", exportsCompleted); - } - return DONE; - } - - @Override - public SystemTaskResult process(final long literalNum, final Instant now) { - throw new UnsupportedOperationException(); - } - - private boolean needsBackPressure(final MerkleNetworkContext curNetworkCtx) { - return inHighGasRegime() - || curNetworkCtx.getEntitiesTouchedThisSecond() >= dynamicProperties.traceabilityMaxExportsPerConsSec(); - } - - private boolean inHighGasRegime() { - return handleThrottling.gasLimitThrottle().instantaneousFreeToUsedRatio() - < dynamicProperties.traceabilityMinFreeToUsedGasThrottleRatio(); - } - - private void addStateChangesSideCar( - final ContractID contractId, - final HederaAccount contract, - final List sidecars) { - final var contractStorageKey = contract.getFirstContractStorageKey(); - if (contractStorageKey == null) { - return; - } - final var stateChangesSidecar = - generateMigrationStateChangesSidecar(contractId, contractStorageKey, contract.getNumContractKvPairs()); - sidecars.add(stateChangesSidecar); - } - - private void addBytecodeSidecar( - final ContractID contractId, final List sidecars) { - final var bytecodeSidecar = generateMigrationBytecodeSidecarFor(contractId); - if (bytecodeSidecar == null) { - log.warn( - "Contract 0.0.{} has no bytecode in state - no migration" + " sidecar records will be published.", - contractId.getContractNum()); - } else { - sidecars.add(bytecodeSidecar); - } - } - - private TransactionSidecarRecord.Builder generateMigrationBytecodeSidecarFor(final ContractID contractId) { - expiryThrottle.allowOne(BLOBS_GET); - final var runtimeCode = entityAccess.fetchCodeIfPresent(EntityIdUtils.asTypedEvmAddress(contractId)); - if (runtimeCode == null) { - return null; - } - final var bytecodeSidecar = - SidecarUtils.createContractBytecodeSidecarFrom(contractId, runtimeCode.toArrayUnsafe()); - bytecodeSidecar.setMigration(true); - return bytecodeSidecar; - } - - private TransactionSidecarRecord.Builder generateMigrationStateChangesSidecar( - final ContractID contractId, ContractKey contractStorageKey, int maxNumberOfKvPairsToIterate) { - final var contractStateChangeBuilder = ContractStateChange.newBuilder().setContractId(contractId); - - IterableContractValue iterableValue; - final var curStorage = contractStorage.get(); - while (maxNumberOfKvPairsToIterate > 0 && contractStorageKey != null) { - expiryThrottle.allowOne(STORAGE_GET); - iterableValue = curStorage.get(contractStorageKey); - contractStateChangeBuilder.addStorageChanges(StorageChange.newBuilder() - .setSlot(ByteStringUtils.wrapUnsafely(slotAsBytes(contractStorageKey))) - .setValueRead(ByteStringUtils.wrapUnsafely( - iterableValue.asUInt256().trimLeadingZeros().toArrayUnsafe())) - .build()); - contractStorageKey = iterableValue.getNextKeyScopedTo(contractStorageKey.getContractId()); - maxNumberOfKvPairsToIterate--; - } - - if (maxNumberOfKvPairsToIterate != 0) { - log.warn( - "After walking through all iterable storage of contract 0.0.{}," - + " numContractKvPairs field indicates that there should have been {} more" - + " k/v pair(s) left", - contractId.getContractNum(), - maxNumberOfKvPairsToIterate); - } - - return TransactionSidecarRecord.newBuilder() - .setStateChanges(ContractStateChanges.newBuilder() - .addContractStateChanges(contractStateChangeBuilder) - .build()) - .setMigration(true); - } - - private byte[] slotAsBytes(final ContractKey contractStorageKey) { - final var numOfNonZeroBytes = contractStorageKey.getUint256KeyNonZeroBytes(); - // getUint256KeyNonZeroBytes() returns 1 even if slot is 0, so - // check the least significant int in the int[] representation - // of the key to make sure we are in the edge case - if (numOfNonZeroBytes == 1 && contractStorageKey.getKey()[7] == 0) { - return new byte[0]; - } - final var contractKeyBytes = new byte[numOfNonZeroBytes]; - for (int i = numOfNonZeroBytes - 1, j = numOfNonZeroBytes - i - 1; i >= 0; i--, j++) { - contractKeyBytes[j] = contractStorageKey.getUint256Byte(i); - } - return contractKeyBytes; - } - - @VisibleForTesting - void setExportsCompleted(final int exportsCompleted) { - this.exportsCompleted = exportsCompleted; - } -} diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityRecordsHelper.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityRecordsHelper.java deleted file mode 100644 index c7ec4df86cb5..000000000000 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityRecordsHelper.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (C) 2022-2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.service.mono.state.tasks; - -import static com.hedera.node.app.service.mono.records.TxnAwareRecordsHistorian.timestampSidecars; -import static com.hedera.node.app.service.mono.state.expiry.ExpiryRecordsHelper.baseRecordWith; -import static com.hedera.node.app.service.mono.state.expiry.ExpiryRecordsHelper.finalizeAndStream; -import static com.hedera.node.app.service.mono.utils.EntityNum.fromLong; - -import com.hedera.node.app.service.mono.records.ConsensusTimeTracker; -import com.hedera.node.app.service.mono.records.RecordsHistorian; -import com.hedera.node.app.service.mono.state.logic.RecordStreaming; -import com.hedera.node.app.service.mono.store.contracts.precompile.SyntheticTxnFactory; -import com.hedera.services.stream.proto.TransactionSidecarRecord; -import java.util.List; -import javax.inject.Inject; -import javax.inject.Singleton; - -@Singleton -public class TraceabilityRecordsHelper { - private final RecordStreaming recordStreaming; - private final RecordsHistorian recordsHistorian; - private final SyntheticTxnFactory syntheticTxnFactory; - private final ConsensusTimeTracker consensusTimeTracker; - - @Inject - public TraceabilityRecordsHelper( - final RecordStreaming recordStreaming, - final RecordsHistorian recordsHistorian, - final SyntheticTxnFactory syntheticTxnFactory, - final ConsensusTimeTracker consensusTimeTracker) { - this.recordStreaming = recordStreaming; - this.recordsHistorian = recordsHistorian; - this.syntheticTxnFactory = syntheticTxnFactory; - this.consensusTimeTracker = consensusTimeTracker; - } - - public boolean canExportNow() { - return consensusTimeTracker.hasMoreStandaloneRecordTime(); - } - - public void exportSidecarsViaSynthUpdate( - final long contractNum, final List sidecars) { - final var eventTime = consensusTimeTracker.nextStandaloneRecordTime(); - final var txnId = recordsHistorian.computeNextSystemTransactionId(); - final var memo = "Traceability export for contract 0.0." + contractNum; - - final var expirableTxnRecord = baseRecordWith(eventTime, txnId).setMemo(memo); - final var synthBody = syntheticTxnFactory.synthNoopContractUpdate(fromLong(contractNum)); - - timestampSidecars(sidecars, eventTime); - finalizeAndStream(expirableTxnRecord, synthBody, eventTime, recordStreaming, sidecars); - } -} diff --git a/hedera-node/hedera-mono-service/src/main/resources/bootstrap.properties b/hedera-node/hedera-mono-service/src/main/resources/bootstrap.properties index c6eaaf471ed8..9d37b86183e4 100644 --- a/hedera-node/hedera-mono-service/src/main/resources/bootstrap.properties +++ b/hedera-node/hedera-mono-service/src/main/resources/bootstrap.properties @@ -105,7 +105,6 @@ fees.percentUtilizationScaleFactors=DEFAULT(0,1:1) fees.tokenTransferUsageMultiplier=380 files.maxNumber=1_000_000 files.maxSizeKb=1024 -hedera.recordStream.enableTraceabilityMigration=false hedera.recordStream.sidecarMaxSizeMb=256 hedera.transaction.maxMemoUtf8Bytes=100 hedera.transaction.maxValidDuration=180 diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/properties/BootstrapPropertiesTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/properties/BootstrapPropertiesTest.java index 0b0780b4b314..e80cf0f0474d 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/properties/BootstrapPropertiesTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/properties/BootstrapPropertiesTest.java @@ -136,7 +136,6 @@ import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_PROFILES_ACTIVE; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_REALM; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_COMPRESS_FILES_ON_CREATION; -import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_ENABLE_TRACEABILITY_MIGRATION; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_IS_ENABLED; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_LOG_DIR; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_LOG_EVERY_TRANSACTION; @@ -547,7 +546,6 @@ class BootstrapPropertiesTest { entry(CONTRACTS_SIDECARS, EnumSet.of(SidecarType.CONTRACT_STATE_CHANGE, SidecarType.CONTRACT_BYTECODE)), entry(CONTRACTS_SIDECAR_VALIDATION_ENABLED, false), entry(HEDERA_RECORD_STREAM_SIDECAR_MAX_SIZE_MB, 256), - entry(HEDERA_RECORD_STREAM_ENABLE_TRACEABILITY_MIGRATION, false), entry(TRACEABILITY_MIN_FREE_TO_USED_GAS_THROTTLE_RATIO, 9L), entry(TRACEABILITY_MAX_EXPORTS_PER_CONS_SEC, 10L), entry(HEDERA_RECORD_STREAM_LOG_EVERY_TRANSACTION, false), diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/properties/GlobalDynamicPropertiesTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/properties/GlobalDynamicPropertiesTest.java index b50ebc0b488d..afb262cfc173 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/properties/GlobalDynamicPropertiesTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/context/properties/GlobalDynamicPropertiesTest.java @@ -78,7 +78,6 @@ import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_ALLOWANCES_MAX_ACCOUNT_LIMIT; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_ALLOWANCES_MAX_TXN_LIMIT; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_COMPRESS_FILES_ON_CREATION; -import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_ENABLE_TRACEABILITY_MIGRATION; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_RECORD_FILE_VERSION; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_SIDECAR_MAX_SIZE_MB; import static com.hedera.node.app.service.mono.context.properties.PropertyNames.HEDERA_RECORD_STREAM_SIG_FILE_VERSION; @@ -232,7 +231,6 @@ void constructsFlagsAsExpected() { assertTrue(subject.areTokenAutoCreationsEnabled()); assertFalse(subject.dynamicEvmVersion()); assertFalse(subject.shouldCompressAccountBalanceFilesOnCreation()); - assertTrue(subject.shouldDoTraceabilityExport()); assertTrue(subject.isLazyCreationEnabled()); assertFalse(subject.isCryptoCreateWithAliasEnabled()); assertFalse(subject.isAtomicCryptoTransferEnabled()); @@ -663,8 +661,6 @@ private void givenPropsWithSeed(final int i) { .willReturn(Map.of(0L, 4L, 1L, 8L)); given(properties.getIntProperty(HEDERA_RECORD_STREAM_SIDECAR_MAX_SIZE_MB)) .willReturn((i + 88)); - given(properties.getBooleanProperty(HEDERA_RECORD_STREAM_ENABLE_TRACEABILITY_MIGRATION)) - .willReturn((i + 81) % 2 == 0); given(properties.getBooleanProperty(CONTRACTS_ITEMIZE_STORAGE_FEES)).willReturn((i + 79) % 2 == 1); given(properties.getLongProperty(CONTRACTS_REFERENCE_SLOT_LIFETIME)).willReturn(i + 86L); given(properties.getIntProperty(CONTRACTS_FREE_STORAGE_TIER_LIMIT)).willReturn(i + 87); @@ -675,8 +671,6 @@ private void givenPropsWithSeed(final int i) { given(properties.getBooleanProperty(CONTRACTS_DYNAMIC_EVM_VERSION)).willReturn(i % 2 == 0); given(properties.getStringProperty(CONTRACTS_EVM_VERSION)).willReturn(evmVersions[i % 2]); given(properties.getBooleanProperty(BALANCES_COMPRESS_ON_CREATION)).willReturn((i + 84) % 2 == 0); - given(properties.getBooleanProperty(HEDERA_RECORD_STREAM_ENABLE_TRACEABILITY_MIGRATION)) - .willReturn((i + 85) % 2 == 0); given(properties.getLongProperty(TRACEABILITY_MIN_FREE_TO_USED_GAS_THROTTLE_RATIO)) .willReturn(i + 87L); given(properties.getLongProperty(TRACEABILITY_MAX_EXPORTS_PER_CONS_SEC)).willReturn(i + 88L); diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/tasks/TaskModuleTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/tasks/TaskModuleTest.java index c8c349442092..b352b677adb7 100644 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/tasks/TaskModuleTest.java +++ b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/tasks/TaskModuleTest.java @@ -32,7 +32,7 @@ class TaskModuleTest { /** This array must contain the names of _all_ `SystemTask`s */ - static final String[] EXPECTED_SYSTEM_TASK_SIMPLE_NAMES = {"ExpiryProcess", "TraceabilityExportTask"}; + static final String[] EXPECTED_SYSTEM_TASK_SIMPLE_NAMES = {"ExpiryProcess"}; /** Scope for all modules containing `SystemTasks` (to limit the search for them) */ static final String MODULE_PREFIX_FOR_ALL_SYSTEM_TASKS = "com.hedera.node.app.service.*"; diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityExportTaskTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityExportTaskTest.java deleted file mode 100644 index 5f4b9f94c7e8..000000000000 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityExportTaskTest.java +++ /dev/null @@ -1,454 +0,0 @@ -/* - * Copyright (C) 2022-2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.service.mono.state.tasks; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.ArgumentCaptor.forClass; -import static org.mockito.BDDMockito.given; -import static org.mockito.Mockito.anyList; -import static org.mockito.Mockito.anyLong; -import static org.mockito.Mockito.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; - -import com.hedera.node.app.hapi.utils.ByteStringUtils; -import com.hedera.node.app.hapi.utils.throttles.GasLimitDeterministicThrottle; -import com.hedera.node.app.service.mono.context.properties.GlobalDynamicProperties; -import com.hedera.node.app.service.mono.state.adapters.VirtualMapLike; -import com.hedera.node.app.service.mono.state.merkle.MerkleAccount; -import com.hedera.node.app.service.mono.state.merkle.MerkleNetworkContext; -import com.hedera.node.app.service.mono.state.migration.AccountStorageAdapter; -import com.hedera.node.app.service.mono.state.virtual.ContractKey; -import com.hedera.node.app.service.mono.state.virtual.IterableContractValue; -import com.hedera.node.app.service.mono.store.contracts.EntityAccess; -import com.hedera.node.app.service.mono.throttling.ExpiryThrottle; -import com.hedera.node.app.service.mono.throttling.FunctionalityThrottling; -import com.hedera.node.app.service.mono.throttling.MapAccessType; -import com.hedera.node.app.service.mono.utils.EntityNum; -import com.hedera.node.app.service.mono.utils.SidecarUtils; -import com.hedera.services.stream.proto.ContractStateChange; -import com.hedera.services.stream.proto.ContractStateChanges; -import com.hedera.services.stream.proto.StorageChange; -import com.hedera.services.stream.proto.TransactionSidecarRecord; -import com.hedera.test.factories.accounts.MerkleAccountFactory; -import com.swirlds.virtualmap.VirtualMap; -import java.time.Instant; -import java.util.List; -import org.apache.tuweni.bytes.Bytes; -import org.apache.tuweni.units.bigints.UInt256; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -@SuppressWarnings("unchecked") -class TraceabilityExportTaskTest { - private static final long ENTITY_NUM = 1234L; - private static final Instant NOW = Instant.ofEpochSecond(1_234_567, 890); - private static final MerkleAccount AN_ACCOUNT = - MerkleAccountFactory.newAccount().get(); - - @Mock - private TraceabilityRecordsHelper recordsHelper; - - @Mock - private AccountStorageAdapter accounts; - - @Mock - private EntityAccess entityAccess; - - @Mock - private ExpiryThrottle expiryThrottle; - - @Mock - private GlobalDynamicProperties dynamicProperties; - - @Mock - private MerkleNetworkContext networkCtx; - - @Mock - private VirtualMap contractStorage; - - @Mock - private FunctionalityThrottling throttling; - - @Mock - private GasLimitDeterministicThrottle gasThrottle; - - private TraceabilityExportTask subject; - - @BeforeEach - void setUp() { - subject = new TraceabilityExportTask( - entityAccess, - expiryThrottle, - dynamicProperties, - recordsHelper, - throttling, - () -> accounts, - () -> VirtualMapLike.from(contractStorage)); - } - - @Test - void notActiveIfDisabled() { - assertFalse(subject.isActive(ENTITY_NUM, networkCtx)); - } - - @Test - void notActiveIfAllPreExistingEntitiesScanned() { - given(dynamicProperties.shouldDoTraceabilityExport()).willReturn(true); - given(networkCtx.areAllPreUpgradeEntitiesScanned()).willReturn(true); - assertFalse(subject.isActive(ENTITY_NUM, networkCtx)); - } - - @Test - void notActiveIfEntityWasNotPreExisting() { - given(dynamicProperties.shouldDoTraceabilityExport()).willReturn(true); - given(networkCtx.seqNoPostUpgrade()).willReturn(ENTITY_NUM); - assertFalse(subject.isActive(ENTITY_NUM, networkCtx)); - } - - @Test - void activeWhenExpected() { - given(dynamicProperties.shouldDoTraceabilityExport()).willReturn(true); - given(networkCtx.seqNoPostUpgrade()).willReturn(ENTITY_NUM + 1); - assertTrue(subject.isActive(ENTITY_NUM, networkCtx)); - } - - @Test - void requiresContextForProcess() { - assertThrows(UnsupportedOperationException.class, () -> subject.process(ENTITY_NUM, NOW)); - } - - @Test - void needsDifferentContextIfCannotExportRecords() { - assertEquals(SystemTaskResult.NEEDS_DIFFERENT_CONTEXT, subject.process(ENTITY_NUM, NOW, networkCtx)); - - verifyNoInteractions(expiryThrottle); - } - - @Test - void needsDifferentContextIfFreeToUsedRatioNotEnough() { - given(recordsHelper.canExportNow()).willReturn(true); - given(dynamicProperties.traceabilityMinFreeToUsedGasThrottleRatio()).willReturn(5L); - given(throttling.gasLimitThrottle()).willReturn(gasThrottle); - given(gasThrottle.instantaneousFreeToUsedRatio()).willReturn(4L); - - assertEquals(SystemTaskResult.NEEDS_DIFFERENT_CONTEXT, subject.process(ENTITY_NUM, NOW, networkCtx)); - } - - @Test - void needsDifferentContextIfTooManyEntitiesProcessedThisSecond() { - given(recordsHelper.canExportNow()).willReturn(true); - given(dynamicProperties.traceabilityMinFreeToUsedGasThrottleRatio()).willReturn(5L); - given(throttling.gasLimitThrottle()).willReturn(gasThrottle); - given(gasThrottle.instantaneousFreeToUsedRatio()).willReturn(6L); - given(networkCtx.getEntitiesTouchedThisSecond()).willReturn(21L); - given(dynamicProperties.traceabilityMaxExportsPerConsSec()).willReturn(20L); - - assertEquals(SystemTaskResult.NEEDS_DIFFERENT_CONTEXT, subject.process(ENTITY_NUM, NOW, networkCtx)); - } - - @Test - void nothingToDoIfNotAnAccount() { - given(dynamicProperties.traceabilityMaxExportsPerConsSec()).willReturn(1L); - given(recordsHelper.canExportNow()).willReturn(true); - given(throttling.gasLimitThrottle()).willReturn(gasThrottle); - - assertEquals(SystemTaskResult.NOTHING_TO_DO, subject.process(ENTITY_NUM, NOW, networkCtx)); - - verify(expiryThrottle).allowOne(MapAccessType.ACCOUNTS_GET); - } - - @Test - void nothingToDoIfNotAContract() { - given(dynamicProperties.traceabilityMaxExportsPerConsSec()).willReturn(1L); - given(recordsHelper.canExportNow()).willReturn(true); - - given(throttling.gasLimitThrottle()).willReturn(gasThrottle); - given(accounts.get(EntityNum.fromLong(ENTITY_NUM))).willReturn(AN_ACCOUNT); - assertEquals(SystemTaskResult.NOTHING_TO_DO, subject.process(ENTITY_NUM, NOW, networkCtx)); - - verify(expiryThrottle).allowOne(MapAccessType.ACCOUNTS_GET); - } - - @Test - void createsWellBehavedContractSideCars() { - given(dynamicProperties.traceabilityMaxExportsPerConsSec()).willReturn(1L); - given(recordsHelper.canExportNow()).willReturn(true); - final ArgumentCaptor> captor = forClass(List.class); - - // Setup mock contract with 4 slots - final var contract1 = mock(MerkleAccount.class); - given(contract1.isSmartContract()).willReturn(true); - given(contract1.getNumContractKvPairs()).willReturn(4); - final var contract1Num = 3L; - final var slot1 = UInt256.valueOf(1L); - final var contract1Key1 = ContractKey.from(contract1Num, slot1); - final var slot0 = UInt256.valueOf(0L); - final var contract1Key2 = ContractKey.from(contract1Num, slot0); - final var slot1555542 = UInt256.valueOf(155542L); - final var contract1Key3 = ContractKey.from(contract1Num, slot1555542); - final var slot999 = UInt256.valueOf(999L); - final var contract1Key4 = ContractKey.from(contract1Num, slot999); - final var contract1Value1 = mock(IterableContractValue.class); - final var contract1Value2 = mock(IterableContractValue.class); - final var contract1Value3 = mock(IterableContractValue.class); - final var contract1Value4 = mock(IterableContractValue.class); - given(contract1.getFirstContractStorageKey()).willReturn(contract1Key1); - given(contract1Value1.getNextKeyScopedTo(contract1Num)).willReturn(contract1Key2); - final var value1 = "value1".getBytes(); - given(contract1Value1.asUInt256()).willReturn(UInt256.fromBytes(Bytes.of(value1))); - given(contract1Value2.getNextKeyScopedTo(contract1Num)).willReturn(contract1Key3); - final var value2 = UInt256.valueOf(0); - given(contract1Value2.asUInt256()).willReturn(value2); - final var value3 = "value3".getBytes(); - given(contract1Value3.asUInt256()).willReturn(UInt256.fromBytes(Bytes.of(value3))); - given(contract1Value3.getNextKeyScopedTo(contract1Num)).willReturn(contract1Key4); - given(contract1Value4.asUInt256()).willReturn(UInt256.ZERO); - given(contract1Value4.getNextKeyScopedTo(contract1Num)).willReturn(null); - given(contractStorage.get(contract1Key1)).willReturn(contract1Value1); - given(contractStorage.get(contract1Key2)).willReturn(contract1Value2); - given(contractStorage.get(contract1Key3)).willReturn(contract1Value3); - given(contractStorage.get(contract1Key4)).willReturn(contract1Value4); - final var entityNum1 = EntityNum.fromLong(contract1Num); - final var runtimeBytes = "runtime".getBytes(); - given(entityAccess.fetchCodeIfPresent(entityNum1.toEvmAddress())).willReturn(Bytes.of(runtimeBytes)); - given(accounts.get(entityNum1)).willReturn(contract1); - given(throttling.gasLimitThrottle()).willReturn(gasThrottle); - subject.setExportsCompleted(999); - - // when: - final var result = subject.process(entityNum1.longValue(), NOW, networkCtx); - assertEquals(SystemTaskResult.DONE, result); - - // then: - verify(recordsHelper).exportSidecarsViaSynthUpdate(eq(entityNum1.longValue()), captor.capture()); - final var sidecarRecords = captor.getValue(); - assertEquals( - SidecarUtils.createContractBytecodeSidecarFrom(entityNum1.toGrpcContractID(), runtimeBytes) - .setMigration(true) - .build(), - sidecarRecords.get(0).build()); - final var contract1StateChanges = ContractStateChange.newBuilder() - .setContractId(entityNum1.toGrpcContractID()) - .addStorageChanges(StorageChange.newBuilder() - .setSlot(ByteStringUtils.wrapUnsafely( - slot1.trimLeadingZeros().toArrayUnsafe())) - .setValueRead(ByteStringUtils.wrapUnsafely(value1)) - .build()) - .addStorageChanges( - // as per HIP-260 - a contract that only reads a zero value from - // slot zero will have an empty message. - StorageChange.newBuilder().build()) - .addStorageChanges(StorageChange.newBuilder() - .setSlot(ByteStringUtils.wrapUnsafely( - slot1555542.trimLeadingZeros().toArrayUnsafe())) - .setValueRead(ByteStringUtils.wrapUnsafely(value3)) - .build()) - .addStorageChanges( - // as per HIP-260 - zero value read will not set the valueRead field - // of a - // storage change - StorageChange.newBuilder() - .setSlot(ByteStringUtils.wrapUnsafely( - slot999.trimLeadingZeros().toArrayUnsafe())) - .build()) - .build(); - final var expectedStateChangesContract1 = ContractStateChanges.newBuilder() - .addContractStateChanges(contract1StateChanges) - .build(); - assertEquals( - TransactionSidecarRecord.newBuilder() - .setStateChanges(expectedStateChangesContract1) - .setMigration(true) - .build(), - sidecarRecords.get(1).build()); - } - - @Test - void createsPoisonPillContractSideCars() { - given(dynamicProperties.traceabilityMaxExportsPerConsSec()).willReturn(1L); - given(recordsHelper.canExportNow()).willReturn(true); - final ArgumentCaptor> captor = forClass(List.class); - - // mock contract with 1 slot with loop - final var contract2 = mock(MerkleAccount.class); - given(contract2.isSmartContract()).willReturn(true); - given(contract2.getNumContractKvPairs()).willReturn(1); - final var contract1Num = 3L; - final var contract2Num = 4L; - final var contract2Slot257 = UInt256.valueOf(257L); - final var contract2Key1 = ContractKey.from(contract2Num, contract2Slot257); - final var contract2Key2 = ContractKey.from(contract1Num, UInt256.valueOf(2L)); - final var contract2Value1 = mock(IterableContractValue.class); - given(contract2.getFirstContractStorageKey()).willReturn(contract2Key1); - given(contract2Value1.getNextKeyScopedTo(contract2Num)).willReturn(contract2Key2); - final var value4 = UInt256.valueOf(1L); - given(contract2Value1.asUInt256()).willReturn(value4); - given(contractStorage.get(contract2Key1)).willReturn(contract2Value1); - final var entityNum2 = EntityNum.fromLong(contract2Num); - final var runtimeBytes2 = "runtime2".getBytes(); - given(entityAccess.fetchCodeIfPresent(entityNum2.toEvmAddress())).willReturn(Bytes.of(runtimeBytes2)); - given(accounts.get(entityNum2)).willReturn(contract2); - given(throttling.gasLimitThrottle()).willReturn(gasThrottle); - - // when: - final var result = subject.process(entityNum2.longValue(), NOW, networkCtx); - assertEquals(SystemTaskResult.DONE, result); - - // then: - verify(recordsHelper).exportSidecarsViaSynthUpdate(eq(entityNum2.longValue()), captor.capture()); - final var sidecarRecords = captor.getValue(); - assertEquals( - SidecarUtils.createContractBytecodeSidecarFrom(entityNum2.toGrpcContractID(), runtimeBytes2) - .setMigration(true) - .build(), - sidecarRecords.get(0).build()); - final var contract2StateChange = ContractStateChange.newBuilder() - .setContractId(entityNum2.toGrpcContractID()) - .addStorageChanges(StorageChange.newBuilder() - .setSlot(ByteStringUtils.wrapUnsafely( - contract2Slot257.trimLeadingZeros().toArrayUnsafe())) - .setValueRead(ByteStringUtils.wrapUnsafely( - value4.trimLeadingZeros().toArrayUnsafe())) - .build()) - .build(); - final var expectedStateChangesContract2 = ContractStateChanges.newBuilder() - .addContractStateChanges(contract2StateChange) - .build(); - assertEquals( - TransactionSidecarRecord.newBuilder() - .setStateChanges(expectedStateChangesContract2) - .setMigration(true) - .build(), - sidecarRecords.get(1).build()); - } - - @Test - void createsMisSizedContractSideCars() { - given(dynamicProperties.traceabilityMaxExportsPerConsSec()).willReturn(1L); - given(recordsHelper.canExportNow()).willReturn(true); - final ArgumentCaptor> captor = forClass(List.class); - - // mock contract with 1 slot but numKvPairs = 2 - final var contract1 = mock(MerkleAccount.class); - given(contract1.isSmartContract()).willReturn(true); - given(contract1.getNumContractKvPairs()).willReturn(2); - final var contract1Num = 3L; - final var contract1Key1 = ContractKey.from(contract1Num, UInt256.valueOf(1L)); - final var contract1Value1 = mock(IterableContractValue.class); - given(contract1.getFirstContractStorageKey()).willReturn(contract1Key1); - given(contract1Value1.getNextKeyScopedTo(contract1Num)).willReturn(null); - final var value = "value".getBytes(); - given(contract1Value1.asUInt256()).willReturn(UInt256.fromBytes(Bytes.of(value))); - given(contractStorage.get(contract1Key1)).willReturn(contract1Value1); - final var entityNum1 = EntityNum.fromLong(contract1Num); - final var runtimeBytes = "runtime".getBytes(); - given(entityAccess.fetchCodeIfPresent(entityNum1.toEvmAddress())).willReturn(Bytes.of(runtimeBytes)); - given(accounts.get(entityNum1)).willReturn(contract1); - given(throttling.gasLimitThrottle()).willReturn(gasThrottle); - - // when: - final var result = subject.process(entityNum1.longValue(), NOW, networkCtx); - assertEquals(SystemTaskResult.DONE, result); - - // then: - verify(recordsHelper).exportSidecarsViaSynthUpdate(eq(entityNum1.longValue()), captor.capture()); - final var sidecarRecords = captor.getValue(); - assertEquals( - SidecarUtils.createContractBytecodeSidecarFrom(entityNum1.toGrpcContractID(), runtimeBytes) - .setMigration(true) - .build(), - sidecarRecords.get(0).build()); - final var contract2StateChange = ContractStateChange.newBuilder() - .setContractId(entityNum1.toGrpcContractID()) - .addStorageChanges(StorageChange.newBuilder() - .setSlot(ByteStringUtils.wrapUnsafely( - UInt256.valueOf(1L).trimLeadingZeros().toArrayUnsafe())) - .setValueRead(ByteStringUtils.wrapUnsafely(value)) - .build()) - .build(); - final var expectedStateChangesContract2 = ContractStateChanges.newBuilder() - .addContractStateChanges(contract2StateChange) - .build(); - assertEquals( - TransactionSidecarRecord.newBuilder() - .setStateChanges(expectedStateChangesContract2) - .setMigration(true) - .build(), - sidecarRecords.get(1).build()); - } - - @Test - void createsStorageLessContractSideCar() { - given(dynamicProperties.traceabilityMaxExportsPerConsSec()).willReturn(1L); - given(recordsHelper.canExportNow()).willReturn(true); - final ArgumentCaptor> captor = forClass(List.class); - - // Mock contract no storage - final var contract = mock(MerkleAccount.class); - given(contract.isSmartContract()).willReturn(true); - given(contract.getFirstContractStorageKey()).willReturn(null); - given(throttling.gasLimitThrottle()).willReturn(gasThrottle); - final var entityNum = EntityNum.fromLong(1L); - final var runtimeBytes = "runtime".getBytes(); - given(entityAccess.fetchCodeIfPresent(entityNum.toEvmAddress())).willReturn(Bytes.of(runtimeBytes)); - given(accounts.get(entityNum)).willReturn(contract); - - // when: - final var result = subject.process(entityNum.longValue(), NOW, networkCtx); - assertEquals(SystemTaskResult.DONE, result); - - // then: - verify(recordsHelper).exportSidecarsViaSynthUpdate(eq(entityNum.longValue()), captor.capture()); - final var sidecarRecords = captor.getValue(); - assertEquals( - SidecarUtils.createContractBytecodeSidecarFrom(entityNum.toGrpcContractID(), runtimeBytes) - .setMigration(true) - .build(), - sidecarRecords.get(0).build()); - } - - @Test - void skipsBytecodeLessContractSideCars() { - given(dynamicProperties.traceabilityMaxExportsPerConsSec()).willReturn(1L); - given(recordsHelper.canExportNow()).willReturn(true); - // Mock contract no storage - final var contract = mock(MerkleAccount.class); - given(contract.isSmartContract()).willReturn(true); - given(throttling.gasLimitThrottle()).willReturn(gasThrottle); - final var contractNum = 1L; - final var contractEntityNum = EntityNum.fromLong(contractNum); - given(accounts.get(contractEntityNum)).willReturn(contract); - - // when: - final var result = subject.process(contractEntityNum.longValue(), NOW, networkCtx); - assertEquals(SystemTaskResult.DONE, result); - - // then: - verify(recordsHelper, never()).exportSidecarsViaSynthUpdate(anyLong(), anyList()); - } -} diff --git a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityRecordsHelperTest.java b/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityRecordsHelperTest.java deleted file mode 100644 index 85f917785c1c..000000000000 --- a/hedera-node/hedera-mono-service/src/test/java/com/hedera/node/app/service/mono/state/tasks/TraceabilityRecordsHelperTest.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (C) 2022-2023 Hedera Hashgraph, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.hedera.node.app.service.mono.state.tasks; - -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.BDDMockito.given; -import static org.mockito.Mockito.verify; - -import com.hedera.node.app.service.mono.records.ConsensusTimeTracker; -import com.hedera.node.app.service.mono.records.RecordsHistorian; -import com.hedera.node.app.service.mono.state.logic.RecordStreaming; -import com.hedera.node.app.service.mono.state.submerkle.TxnId; -import com.hedera.node.app.service.mono.store.contracts.precompile.SyntheticTxnFactory; -import com.hedera.node.app.service.mono.stream.RecordStreamObject; -import com.hedera.node.app.service.mono.utils.EntityNum; -import com.hedera.node.app.service.mono.utils.accessors.SignedTxnAccessor; -import com.hedera.services.stream.proto.TransactionSidecarRecord; -import com.hedera.test.utils.IdUtils; -import com.hederahashgraph.api.proto.java.ResponseCodeEnum; -import com.hederahashgraph.api.proto.java.TransactionBody; -import com.hederahashgraph.api.proto.java.TransactionID; -import java.time.Instant; -import java.util.List; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.ArgumentCaptor; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class TraceabilityRecordsHelperTest { - private final EntityNum someContract = EntityNum.fromLong(1234); - private final Instant now = Instant.ofEpochSecond(1_234_567L, 890); - - private final TransactionID mockSystemTxnId = TransactionID.newBuilder() - .setAccountID(IdUtils.asAccount("0.0.789")) - .build(); - - @Mock - private RecordStreaming recordStreaming; - - @Mock - private RecordsHistorian recordsHistorian; - - @Mock - private SyntheticTxnFactory syntheticTxnFactory; - - @Mock - private ConsensusTimeTracker consensusTimeTracker; - - private TransactionBody.Builder updateBuilder = TransactionBody.newBuilder(); - private TransactionSidecarRecord.Builder aBuilder = TransactionSidecarRecord.newBuilder(); - private TransactionSidecarRecord.Builder bBuilder = TransactionSidecarRecord.newBuilder(); - - private TraceabilityRecordsHelper subject; - - @BeforeEach - void setUp() { - subject = new TraceabilityRecordsHelper( - recordStreaming, recordsHistorian, syntheticTxnFactory, consensusTimeTracker); - } - - @Test - void cannotExportIfNoTimeLeft() { - assertFalse(subject.canExportNow()); - } - - @Test - void happyPathWorks() { - ArgumentCaptor captor = ArgumentCaptor.forClass(RecordStreamObject.class); - - given(consensusTimeTracker.nextStandaloneRecordTime()).willReturn(now); - given(recordsHistorian.computeNextSystemTransactionId()).willReturn(TxnId.fromGrpc(mockSystemTxnId)); - given(syntheticTxnFactory.synthNoopContractUpdate(someContract)).willReturn(updateBuilder); - - subject.exportSidecarsViaSynthUpdate(someContract.longValue(), List.of(aBuilder, bBuilder)); - - verify(recordStreaming).streamSystemRecord(captor.capture()); - final var rso = captor.getValue(); - - assertEquals(rso.getTimestamp(), now); - final var streamedRecord = rso.getExpirableTransactionRecord(); - final var streamedReceipt = streamedRecord.getReceipt(); - assertEquals(ResponseCodeEnum.SUCCESS, streamedReceipt.getEnumStatus()); - assertEquals(now, streamedRecord.getConsensusTime().toJava()); - final var accessor = SignedTxnAccessor.uncheckedFrom(rso.getTransaction()); - assertEquals(mockSystemTxnId, accessor.getTxnId()); - } -} diff --git a/hedera-node/hedera-mono-service/src/test/resources/bootstrap.properties b/hedera-node/hedera-mono-service/src/test/resources/bootstrap.properties index 2e9a5427934d..5969943e5493 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/bootstrap.properties +++ b/hedera-node/hedera-mono-service/src/test/resources/bootstrap.properties @@ -104,7 +104,6 @@ fees.percentUtilizationScaleFactors=DEFAULT(90,10:1,95,25:1,99,100:1) fees.tokenTransferUsageMultiplier=380 files.maxNumber=1_000_000 files.maxSizeKb=1024 -hedera.recordStream.enableTraceabilityMigration=false hedera.recordStream.sidecarMaxSizeMb=256 hedera.transaction.maxMemoUtf8Bytes=100 hedera.transaction.maxValidDuration=180 diff --git a/hedera-node/hedera-mono-service/src/test/resources/bootstrap/standard.properties b/hedera-node/hedera-mono-service/src/test/resources/bootstrap/standard.properties index 0b673a91ac8a..d4379d0eb6a5 100644 --- a/hedera-node/hedera-mono-service/src/test/resources/bootstrap/standard.properties +++ b/hedera-node/hedera-mono-service/src/test/resources/bootstrap/standard.properties @@ -103,7 +103,6 @@ fees.percentUtilizationScaleFactors=DEFAULT(90,10:1,95,25:1,99,100:1) fees.tokenTransferUsageMultiplier=380 files.maxNumber=1_000_000 files.maxSizeKb=1024 -hedera.recordStream.enableTraceabilityMigration=false hedera.recordStream.sidecarMaxSizeMb=256 hedera.transaction.maxMemoUtf8Bytes=100 hedera.transaction.maxValidDuration=180 diff --git a/hedera-node/test-clients/src/main/resource/bootstrap.properties b/hedera-node/test-clients/src/main/resource/bootstrap.properties index 7d6f12888897..87765c8a2000 100644 --- a/hedera-node/test-clients/src/main/resource/bootstrap.properties +++ b/hedera-node/test-clients/src/main/resource/bootstrap.properties @@ -107,7 +107,6 @@ fees.percentUtilizationScaleFactors=DEFAULT(0,1:1) fees.tokenTransferUsageMultiplier=380 files.maxNumber=1_000_000 files.maxSizeKb=1024 -hedera.recordStream.enableTraceabilityMigration=false hedera.recordStream.sidecarMaxSizeMb=256 hedera.transaction.maxMemoUtf8Bytes=100 hedera.transaction.maxValidDuration=180 From 29eb3e87aaf0d7815f99d96a99a42fe7c790af78 Mon Sep 17 00:00:00 2001 From: Ivan Malygin Date: Tue, 20 Jun 2023 16:58:32 -0400 Subject: [PATCH 58/70] 07046 Fixed excessive logging caused by MemoryIndexDiskKeyValueStoreMergeHammerTest (#7179) Signed-off-by: Ivan Malygin Co-authored-by: Cody Littley <56973212+cody-littley@users.noreply.github.com> --- .../swirlds-jasperdb/build.gradle.kts | 35 ++++++++++--------- ...IndexDiskKeyValueStoreMergeHammerTest.java | 25 +++++++++++++ 2 files changed, 43 insertions(+), 17 deletions(-) diff --git a/platform-sdk/swirlds-jasperdb/build.gradle.kts b/platform-sdk/swirlds-jasperdb/build.gradle.kts index a6b5d9d57893..93e6c22516a9 100644 --- a/platform-sdk/swirlds-jasperdb/build.gradle.kts +++ b/platform-sdk/swirlds-jasperdb/build.gradle.kts @@ -15,26 +15,27 @@ */ plugins { - id("com.swirlds.platform.conventions") - id("com.swirlds.platform.library") - id("com.swirlds.platform.maven-publish") - id("com.swirlds.platform.benchmark-conventions") + id("com.swirlds.platform.conventions") + id("com.swirlds.platform.library") + id("com.swirlds.platform.maven-publish") + id("com.swirlds.platform.benchmark-conventions") } dependencies { - // Individual Dependencies - implementation(project(":swirlds-base")) - api(project(":swirlds-virtualmap")) - api(project(":swirlds-config-api")) - compileOnly(libs.spotbugs.annotations) + // Individual Dependencies + implementation(project(":swirlds-base")) + api(project(":swirlds-virtualmap")) + api(project(":swirlds-config-api")) + compileOnly(libs.spotbugs.annotations) - // Bundle Dependencies - implementation(libs.bundles.eclipse) + // Bundle Dependencies + implementation(libs.bundles.eclipse) - // Test Dependencies - testImplementation(project(":swirlds-config-impl")) - testImplementation(project(":swirlds-unit-tests:common:swirlds-test-framework")) - testImplementation(project(":swirlds-unit-tests:common:swirlds-common-test")) - testImplementation(testLibs.bundles.junit) - testImplementation(testLibs.bundles.mocking) + // Test Dependencies + testImplementation(project(":swirlds-config-impl")) + testImplementation(project(":swirlds-unit-tests:common:swirlds-test-framework")) + testImplementation(project(":swirlds-unit-tests:common:swirlds-common-test")) + testImplementation(testLibs.bundles.junit) + testImplementation(testLibs.bundles.mocking) + testImplementation(libs.log4j.core) } diff --git a/platform-sdk/swirlds-jasperdb/src/test/java/com/swirlds/merkledb/files/MemoryIndexDiskKeyValueStoreMergeHammerTest.java b/platform-sdk/swirlds-jasperdb/src/test/java/com/swirlds/merkledb/files/MemoryIndexDiskKeyValueStoreMergeHammerTest.java index dc2cfd449a93..8c828dc7bb58 100644 --- a/platform-sdk/swirlds-jasperdb/src/test/java/com/swirlds/merkledb/files/MemoryIndexDiskKeyValueStoreMergeHammerTest.java +++ b/platform-sdk/swirlds-jasperdb/src/test/java/com/swirlds/merkledb/files/MemoryIndexDiskKeyValueStoreMergeHammerTest.java @@ -35,6 +35,14 @@ import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.config.Configuration; +import org.apache.logging.log4j.core.config.Configurator; +import org.apache.logging.log4j.core.config.LoggerConfig; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.function.ThrowingSupplier; import org.junit.jupiter.api.io.TempDir; @@ -47,11 +55,28 @@ */ class MemoryIndexDiskKeyValueStoreMergeHammerTest { + private static Level currentLogLevel; /** Temporary directory provided by JUnit */ @SuppressWarnings("unused") @TempDir Path testDirectory; + @BeforeAll + public static void setup() { + final LoggerContext ctx = (LoggerContext) LogManager.getContext(false); + final Configuration config = ctx.getConfiguration(); + final LoggerConfig loggerConfig = config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME); + currentLogLevel = loggerConfig.getLevel(); + // To prevent excessive logging we reduce the log level to WARN for this test. + // See https://github.com/hashgraph/hedera-services/issues/7083 for the context + loggerConfig.setLevel(Level.WARN); + } + + @AfterAll + public static void cleanUp() { + Configurator.setLevel(LogManager.ROOT_LOGGER_NAME, currentLogLevel); + } + /** * Hammers the {@link MemoryIndexDiskKeyValueStore} looking for any race conditions or weakness * in the implementation that can be observed due to load. The configuration options here are From 911234bcab679754323d0e29c4746a715538ae72 Mon Sep 17 00:00:00 2001 From: Richard Bair Date: Tue, 20 Jun 2023 13:59:16 -0700 Subject: [PATCH 59/70] Start TLS and non-TLS servers (#7073) Signed-off-by: Richard Bair --- .../java/com/hedera/node/app/spi/Service.java | 15 + .../node/app/spi/fixtures/util/LogCaptor.java | 12 +- .../itest/java/grpc/HelidonManagerTest.java | 508 ++++++++++++++++++ .../main/java/com/hedera/node/app/Hedera.java | 159 ++---- .../node/app/HederaInjectionComponent.java | 9 + .../node/app/grpc/GrpcInjectionModule.java | 27 + .../node/app/grpc/GrpcServerManager.java | 63 +++ .../app/grpc/HelidonGrpcServerManager.java | 241 +++++++++ .../node/app/services/ServicesRegistry.java | 36 ++ .../app/services/ServicesRegistryImpl.java | 41 ++ .../app/components/IngestComponentTest.java | 2 + .../grpc/HelidonGrpcServerManagerTest.java | 89 +++ .../workflows/query/QueryComponentTest.java | 2 + .../hedera/node/config/data/GrpcConfig.java | 31 +- .../hedera/node/config/data/NettyConfig.java | 32 +- .../service/consensus/ConsensusService.java | 8 + .../consensus/ConsensusServiceDefinition.java | 99 ++++ .../node/app/service/file/FileService.java | 8 + .../service/file/FileServiceDefinition.java | 60 +++ .../service/networkadmin/FreezeService.java | 8 + .../networkadmin/FreezeServiceDefinition.java | 51 ++ .../service/networkadmin/NetworkService.java | 8 + .../NetworkServiceDefinition.java | 56 ++ .../app/service/schedule/ScheduleService.java | 8 + .../schedule/ScheduleServiceDefinition.java | 56 ++ .../app/service/contract/ContractService.java | 8 + .../SmartContractServiceDefinition.java | 64 +++ .../token/CryptoServiceDefinition.java | 68 +++ .../node/app/service/token/TokenService.java | 8 + .../service/token/TokenServiceDefinition.java | 71 +++ .../node/app/service/util/UtilService.java | 8 + .../service/util/UtilServiceDefinition.java | 51 ++ settings.gradle.kts | 2 +- 33 files changed, 1787 insertions(+), 122 deletions(-) create mode 100644 hedera-node/hedera-app/src/itest/java/grpc/HelidonManagerTest.java create mode 100644 hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/GrpcInjectionModule.java create mode 100644 hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/GrpcServerManager.java create mode 100644 hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/HelidonGrpcServerManager.java create mode 100644 hedera-node/hedera-app/src/main/java/com/hedera/node/app/services/ServicesRegistry.java create mode 100644 hedera-node/hedera-app/src/main/java/com/hedera/node/app/services/ServicesRegistryImpl.java create mode 100644 hedera-node/hedera-app/src/test/java/com/hedera/node/app/grpc/HelidonGrpcServerManagerTest.java create mode 100644 hedera-node/hedera-consensus-service/src/main/java/com/hedera/node/app/service/consensus/ConsensusServiceDefinition.java create mode 100644 hedera-node/hedera-file-service/src/main/java/com/hedera/node/app/service/file/FileServiceDefinition.java create mode 100644 hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/FreezeServiceDefinition.java create mode 100644 hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/NetworkServiceDefinition.java create mode 100644 hedera-node/hedera-schedule-service/src/main/java/com/hedera/node/app/service/schedule/ScheduleServiceDefinition.java create mode 100644 hedera-node/hedera-smart-contract-service/src/main/java/com/hedera/node/app/service/contract/SmartContractServiceDefinition.java create mode 100644 hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/CryptoServiceDefinition.java create mode 100644 hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/TokenServiceDefinition.java create mode 100644 hedera-node/hedera-util-service/src/main/java/com/hedera/node/app/service/util/UtilServiceDefinition.java diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/Service.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/Service.java index e7014ff38001..f01a1974989f 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/Service.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/Service.java @@ -16,8 +16,12 @@ package com.hedera.node.app.spi; +import static java.util.Collections.emptySet; + import com.hedera.node.app.spi.state.SchemaRegistry; +import com.hedera.pbj.runtime.RpcServiceDefinition; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Set; /** * A definition of an interface that will be implemented by each conceptual "service" like @@ -33,6 +37,17 @@ public interface Service { @NonNull String getServiceName(); + /** + * If this service exposes RPC endpoints, then this method returns the RPC service definitions. + * Otherwise, it returns an empty set. + * + * @return The RPC service definitions if this service is exposed via RPC. + */ + @NonNull + default Set rpcDefinitions() { + return emptySet(); + } + /** * Registers the schemas this service really uses with the given {@link SchemaRegistry}. * diff --git a/hedera-node/hedera-app-spi/src/testFixtures/java/com/hedera/node/app/spi/fixtures/util/LogCaptor.java b/hedera-node/hedera-app-spi/src/testFixtures/java/com/hedera/node/app/spi/fixtures/util/LogCaptor.java index 64a4f8497c4b..c87a2d14063d 100644 --- a/hedera-node/hedera-app-spi/src/testFixtures/java/com/hedera/node/app/spi/fixtures/util/LogCaptor.java +++ b/hedera-node/hedera-app-spi/src/testFixtures/java/com/hedera/node/app/spi/fixtures/util/LogCaptor.java @@ -66,12 +66,16 @@ public void stopCapture() { this.logger.removeAppender(appender); } + public List debugLogs() { + return eventsAt(Level.DEBUG); + } + public List infoLogs() { - return eventsAt("INFO"); + return eventsAt(Level.INFO); } public List errorLogs() { - return eventsAt("ERROR"); + return eventsAt(Level.ERROR); } /** @@ -85,7 +89,7 @@ public List errorLogs() { * @param level Usually one of [DEBUG|INFO|WARN|ERROR] * @return List of log events at the given log level */ - private List eventsAt(@NonNull final String level) { + private List eventsAt(@NonNull final Level level) { requireNonNull(level); final String logText = capture.toString(); final Matcher m = EVENT_PATTERN.matcher(logText); @@ -102,7 +106,7 @@ private List eventsAt(@NonNull final String level) { } // now check if the current match is the level we're looking for String matchLevel = m.group(0); - if (level.equals(matchLevel)) { + if (level.equals(Level.getLevel(matchLevel))) { prevLevelMatch = true; } // move the start index for the next search to the end of the current match diff --git a/hedera-node/hedera-app/src/itest/java/grpc/HelidonManagerTest.java b/hedera-node/hedera-app/src/itest/java/grpc/HelidonManagerTest.java new file mode 100644 index 000000000000..c6db25c07a14 --- /dev/null +++ b/hedera-node/hedera-app/src/itest/java/grpc/HelidonManagerTest.java @@ -0,0 +1,508 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package grpc; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.hedera.hapi.node.base.Transaction; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.hapi.node.transaction.TransactionResponse; +import com.hedera.node.app.config.VersionedConfigImpl; +import com.hedera.node.app.grpc.GrpcServiceBuilder; +import com.hedera.node.app.grpc.HelidonGrpcServerManager; +import com.hedera.node.app.spi.Service; +import com.hedera.node.app.spi.fixtures.util.LogCaptor; +import com.hedera.node.config.data.GrpcConfig; +import com.hedera.node.config.data.NettyConfig; +import com.hedera.pbj.runtime.RpcMethodDefinition; +import com.hedera.pbj.runtime.RpcServiceDefinition; +import com.hedera.pbj.runtime.io.buffer.BufferedData; +import com.swirlds.common.metrics.Metrics; +import com.swirlds.common.metrics.config.MetricsConfig; +import com.swirlds.common.metrics.platform.DefaultMetrics; +import com.swirlds.common.metrics.platform.DefaultMetricsFactory; +import com.swirlds.common.metrics.platform.MetricKeyRegistry; +import com.swirlds.common.system.NodeId; +import com.swirlds.config.api.Configuration; +import com.swirlds.config.api.ConfigurationBuilder; +import com.swirlds.config.api.source.ConfigSource; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.grpc.ManagedChannelBuilder; +import io.helidon.grpc.client.ClientServiceDescriptor; +import io.helidon.grpc.client.GrpcServiceClient; +import java.net.ConnectException; +import java.net.InetSocketAddress; +import java.net.ServerSocket; +import java.net.Socket; +import java.nio.charset.StandardCharsets; +import java.util.NoSuchElementException; +import java.util.Set; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import org.apache.logging.log4j.LogManager; +import org.assertj.core.api.Assumptions; +import org.jetbrains.annotations.Nullable; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; + +final class HelidonManagerTest { + private static final ScheduledExecutorService METRIC_EXECUTOR = Executors.newSingleThreadScheduledExecutor(); + private static final NodeId THIS_NODE = new NodeId(3); + + private Metrics createMetrics(@NonNull final Configuration config) { + final MetricsConfig metricsConfig = config.getConfigData(MetricsConfig.class); + return new DefaultMetrics( + THIS_NODE, new MetricKeyRegistry(), METRIC_EXECUTOR, new DefaultMetricsFactory(), metricsConfig); + } + + private Configuration createConfig(@NonNull final TestSource testConfig) { + return ConfigurationBuilder.create() + .withConfigDataType(MetricsConfig.class) + .withConfigDataType(GrpcConfig.class) + .withConfigDataType(NettyConfig.class) + .withSource(testConfig) + .build(); + } + + private HelidonGrpcServerManager createServerManager(@NonNull final TestSource testConfig) { + final var config = createConfig(testConfig); + return new HelidonGrpcServerManager( + () -> new VersionedConfigImpl(config, 1), + Set::of, + (req, res) -> {}, + (req, res) -> {}, + createMetrics(config)); + } + + /** + * If the port number is set to 0, then we allow the computer to choose an "ephemeral" port for us automatically. + * We won't know ahead of time what the port number is. + */ + @Test + @DisplayName("Ephemeral ports are supported") + @Timeout(value = 5) + void ephemeralPorts() { + // Given a server with 0 as the port number for both port and tls port + final var subject = createServerManager(new TestSource().withPort(0).withTlsPort(0)); + + try { + // When we start the server + subject.start(); + + // Then we find that the server has started + assertThat(subject.isRunning()).isTrue(); + // And that the port numbers are no longer 0 + assertThat(subject.port()).isNotZero(); + assertThat(subject.tlsPort()).isNotZero(); + // And that the server is listening on the ports + // FUTURE: I'm only testing the plain port and not the tls port because these tests do not yet support tls + // properly. But when they do, we should check the tls port too. + assertThat(isListening(subject.port())).isTrue(); + } finally { + subject.stop(); + } + } + + /** + * Verifies that when actual port numbers are given, they are used. This test is inherently a little unstable, + * because nothing can be listening on the port that we select. We will attempt to make it more stable by + * selecting ports at random, and checking whether they are in use before proceeding. + */ + @Test + @DisplayName("Non-ephemeral ports are supported") + @Timeout(value = 5) + void nonEphemeralPorts() { + // Given a server configured with actual port numbers + final var subject = createServerManager(new TestSource().withFreePort().withFreeTlsPort()); + + try { + // When we start the server + subject.start(); + + // Then we find that the server has started + assertThat(subject.isRunning()).isTrue(); + // And that it is listening on the ports + // FUTURE: I'm only testing the plain port and not the tls port because these tests do not yet support tls + // properly. But when they do, we should check the tls port too. + assertThat(isListening(subject.port())).isTrue(); + } finally { + subject.stop(); + } + } + + @Test + @DisplayName("Starting a server twice throws") + @Timeout(value = 5) + void startingTwice() { + // Given a server with a configuration that will start + final var subject = createServerManager(new TestSource()); + + // When we start the server, we find that it starts. And when we start it again, it throws. + try { + subject.start(); + assertThat(subject.port()).isNotZero(); + assertThat(subject.isRunning()).isTrue(); + assertThat(isListening(subject.port())).isTrue(); + assertThatThrownBy(subject::start) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining("Server already started"); + } finally { + subject.stop(); + } + } + + @Test + @DisplayName("Stopping a server") + @Timeout(value = 5) + void stoppingAStartedServer() { + // Given a server with a configuration that will start + final var subject = createServerManager(new TestSource()); + + // When we start the server, we find that it starts, and when we stop it, we find that it stops again. + try { + subject.start(); + assertThat(subject.port()).isNotZero(); + assertThat(subject.isRunning()).isTrue(); + assertThat(isListening(subject.port())).isTrue(); + } finally { + subject.stop(); + } + + assertThat(subject.isRunning()).isFalse(); + assertThat(subject.port()).isEqualTo(-1); + assertThat(subject.tlsPort()).isEqualTo(-1); + } + + @Test + @DisplayName("Stopping a server is idempotent") + @Timeout(value = 5) + void stoppingIsIdempotent() { + // Given a server with a configuration that will start + final var subject = createServerManager(new TestSource()); + + // When we start the server, it starts. + try { + subject.start(); + assertThat(subject.port()).isNotZero(); + } finally { + subject.stop(); + } + + // And if we stop it multiple times, this is OK + assertThat(subject.isRunning()).isFalse(); + subject.stop(); + assertThat(subject.isRunning()).isFalse(); + assertThat(subject.port()).isEqualTo(-1); + assertThat(subject.tlsPort()).isEqualTo(-1); + } + + @Test + @DisplayName("Restarting a server") + @Timeout(value = 50) + void restart() { + // Given a server with a configuration that will start + final var subject = createServerManager(new TestSource()); + + // We can cycle over start / stop / start / stop cycles, and it is all good + for (int i = 0; i < 10; i++) { + try { + subject.start(); + assertThat(subject.port()).isNotZero(); + assertThat(subject.isRunning()).isTrue(); + assertThat(isListening(subject.port())).isTrue(); + } finally { + subject.stop(); + assertThat(subject.isRunning()).isFalse(); + assertThat(subject.port()).isEqualTo(-1); + assertThat(subject.tlsPort()).isEqualTo(-1); + } + } + } + + @Test + @Timeout(value = 5) + @DisplayName("Starting a server with a port already in use but is then released") + void portBecomesFreeEventually() throws Exception { + // Given a server with a configuration that will start + final var testConfig = new TestSource() + .withFreePort() + .withFreeTlsPort() + .withStartRetries(10) + .withStartRetryIntervalMs(10); + final var subject = createServerManager(testConfig); + // And a server socket listening on the port that the server intends to use + try (final var serverSocket = new ServerSocket()) { + serverSocket.setReuseAddress(true); + serverSocket.bind(new InetSocketAddress(testConfig.port)); + assertThat(serverSocket.isBound()).isTrue(); + + // When we start the gRPC server on that same port + final LogCaptor logCaptor = new LogCaptor(LogManager.getLogger(HelidonGrpcServerManager.class)); + final var th = new Thread(subject::start); + th.start(); + + // Wait for the server to try again to startup. We wait until we've seen that the server actually tried + // to start, and then we will proceed with the rest of the test (we want to make sure the port was occupied + // when the server tried to start). + while (true) { + assertThat(subject.isRunning()).isFalse(); + final var logs = String.join("\n", logCaptor.infoLogs()); + System.out.println(logs); + if (logs.contains("Still trying to start server... 9 tries remaining")) { + break; + } + TimeUnit.MILLISECONDS.sleep(10); + } + + // And when we stop the socket that was using the port + serverSocket.close(); + + // Wait for the server to start + while (!subject.isRunning()) { + TimeUnit.MILLISECONDS.sleep(10); + } + + // Then we find that the server finally started up! + assertThat(subject.isRunning()).isTrue(); + // FUTURE: I'm only testing the plain port and not the tls port because these tests do not yet support tls + // properly. But when they do, we should check the tls port too. + assertThat(isListening(subject.port())).isTrue(); + } finally { + subject.stop(); + } + } + + /** + * Start with a port that is already in use, and observe that after N retries and X millis per retry, the server + * ultimately fails to start. + * + *

NOTE: The Helidon server appears to have its own 5-second timeout while starting. As such, I have to have a + * longer timeout value here so the timeout will not expire prematurely. + */ + @Test + @Timeout(value = 50) + @DisplayName("Starting a server with a port already in use") + void portInUse() throws Exception { + // Given a server with a configuration that will start + final var testConfig = new TestSource() + .withFreePort() + .withFreeTlsPort() + .withStartRetries(1) + .withStartRetryIntervalMs(10); + final var subject = createServerManager(testConfig); + // And a server socket listening on the port that the server intends to use + try (final var serverSocket = new ServerSocket()) { + serverSocket.setReuseAddress(true); + serverSocket.bind(new InetSocketAddress(testConfig.port)); + assertThat(serverSocket.isBound()).isTrue(); + + // Start the gRPC server, trying to use the same port, which will eventually give up and throw + assertThatThrownBy(subject::start) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Failed to start gRPC server"); + } finally { + subject.stop(); + } + } + + @Test + @Timeout(value = 5) + @DisplayName("Transactions and Queries are routed") + void requestsAreRouted() { + // Given a server with a configuration that will start, and tx and query handlers that register they were + // called, so we can make sure calls work. + final var config = createConfig(new TestSource()); + final var txCounter = new AtomicInteger(0); + final var qCounter = new AtomicInteger(0); + + final var testService = new Service() { + @NonNull + @Override + public String getServiceName() { + return "TestService"; + } + + @NonNull + @Override + public Set rpcDefinitions() { + return Set.of(new RpcServiceDefinition() { + @NonNull + @Override + public String basePath() { + return "proto.TestService"; + } + + @NonNull + @Override + public Set> methods() { + return Set.of( + new RpcMethodDefinition<>("tx", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("q", Query.class, Response.class)); + } + }); + } + }; + + final var metrics = createMetrics(config); + final var subject = new HelidonGrpcServerManager( + () -> new VersionedConfigImpl(config, 1), + () -> Set.of(testService), + (req, res) -> txCounter.incrementAndGet(), + (req, res) -> qCounter.incrementAndGet(), + metrics); + + // When we start the server, we can actually make requests to it! + try { + subject.start(); + + final var channel = ManagedChannelBuilder.forAddress("localhost", subject.port()) + .usePlaintext() + .build(); + + final var sd = new GrpcServiceBuilder("proto.TestService", (req, res) -> {}, (req, res) -> {}) + .transaction("tx") + .query("q") + .build(metrics); + + final var builder = io.grpc.ServiceDescriptor.newBuilder("proto.TestService"); + sd.methods().forEach(m -> builder.addMethod(m.descriptor())); + final var clientServiceDescriptor = builder.build(); + final var client = GrpcServiceClient.builder( + channel, + ClientServiceDescriptor.builder(clientServiceDescriptor) + .build()) + .build(); + + final var bb = BufferedData.wrap("anything".getBytes(StandardCharsets.UTF_8)); + client.blockingUnary("tx", bb); + client.blockingUnary("q", bb); + + assertThat(txCounter.get()).isEqualTo(1); + assertThat(qCounter.get()).isEqualTo(1); + + } finally { + subject.stop(); + } + } + + /** + * Checks whether a server process is listening on the given port + * + * @param portNumber The port to check + */ + private static boolean isListening(int portNumber) { + try (final var socket = new Socket("localhost", portNumber)) { + return socket.isConnected(); + } catch (ConnectException connect) { + return false; + } catch (Exception e) { + throw new RuntimeException( + "Unexpected error while checking whether the port '" + portNumber + "' was free", e); + } + } + + /** + * A config source used by this test to specify the config values + */ + private static final class TestSource implements ConfigSource { + private int port = 0; + private int tlsPort = 0; + private int startRetries = 3; + private int startRetryIntervalMs = 100; + + @Override + public int getOrdinal() { + return 1000; + } + + @NonNull + @Override + public Set getPropertyNames() { + return Set.of("grpc.port", "grpc.tlsPort", "netty.startRetryIntervalMs", "netty.startRetries"); + } + + @Nullable + @Override + public String getValue(@NonNull String s) throws NoSuchElementException { + return switch (s) { + case "grpc.port" -> String.valueOf(port); + case "grpc.tlsPort" -> String.valueOf(tlsPort); + case "netty.startRetryIntervalMs" -> String.valueOf(startRetryIntervalMs); + case "netty.startRetries" -> String.valueOf(startRetries); + default -> null; + }; + } + + public TestSource withPort(final int value) { + this.port = value; + return this; + } + + // Locates a free port on its own + public TestSource withFreePort() { + this.port = findFreePort(); + Assumptions.assumeThat(this.port).isGreaterThan(0); + return this; + } + + public TestSource withTlsPort(final int value) { + this.tlsPort = value; + return this; + } + + // Locates a free port on its own + public TestSource withFreeTlsPort() { + this.tlsPort = findFreePort(); + Assumptions.assumeThat(this.tlsPort).isGreaterThan(0); + return this; + } + + public TestSource withStartRetries(final int value) { + this.startRetries = value; + return this; + } + + public TestSource withStartRetryIntervalMs(final int value) { + this.startRetryIntervalMs = value; + return this; + } + + private int findFreePort() { + for (int i = 1024; i < 10_000; i++) { + if (i != port && i != tlsPort && isPortFree(i)) { + return i; + } + } + + return -1; + } + + /** + * Checks whether the given port is free + * + * @param portNumber The port to check + */ + private boolean isPortFree(int portNumber) { + return !isListening(portNumber); + } + } +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java index 3b7e167512ab..90d7efe066a6 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/Hedera.java @@ -25,12 +25,8 @@ import com.hedera.hapi.node.base.AccountID; import com.hedera.node.app.config.ConfigProviderImpl; -import com.hedera.node.app.grpc.GrpcServiceBuilder; -import com.hedera.node.app.service.consensus.ConsensusService; import com.hedera.node.app.service.consensus.impl.ConsensusServiceImpl; -import com.hedera.node.app.service.contract.ContractService; import com.hedera.node.app.service.contract.impl.ContractServiceImpl; -import com.hedera.node.app.service.file.FileService; import com.hedera.node.app.service.file.impl.FileServiceImpl; import com.hedera.node.app.service.mono.context.StateChildrenProvider; import com.hedera.node.app.service.mono.context.properties.BootstrapProperties; @@ -52,10 +48,10 @@ import com.hedera.node.app.service.networkadmin.impl.NetworkServiceImpl; import com.hedera.node.app.service.schedule.ScheduleService; import com.hedera.node.app.service.schedule.impl.ScheduleServiceImpl; -import com.hedera.node.app.service.token.TokenService; import com.hedera.node.app.service.token.impl.TokenServiceImpl; -import com.hedera.node.app.service.util.UtilService; import com.hedera.node.app.service.util.impl.UtilServiceImpl; +import com.hedera.node.app.services.ServicesRegistry; +import com.hedera.node.app.services.ServicesRegistryImpl; import com.hedera.node.app.spi.Service; import com.hedera.node.app.spi.state.WritableFreezeStore; import com.hedera.node.app.spi.state.WritableKVState; @@ -88,16 +84,10 @@ import com.swirlds.platform.gui.SwirldsGui; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import io.helidon.grpc.server.GrpcRouting; -import io.helidon.grpc.server.GrpcServer; -import io.helidon.grpc.server.GrpcServerConfiguration; import java.nio.charset.Charset; -import java.nio.file.Path; import java.security.NoSuchAlgorithmException; -import java.util.HashMap; import java.util.Locale; -import java.util.Map; -import java.util.concurrent.CountDownLatch; +import java.util.Set; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -151,14 +141,14 @@ public final class Hedera implements SwirldMain { private record ServiceRegistration( @NonNull String name, @NonNull Service service, @NonNull MerkleSchemaRegistry registry) {} + /** Required for state management. Used by platform for deserialization of state. */ + private final ConstructableRegistry constructableRegistry; /** The registry of all known services */ - private final Map serviceRegistry; + private final ServicesRegistry servicesRegistry; /** The current version of THIS software */ private final SerializableSemVers version; /** The BootstrapProperties for this node */ private final BootstrapProperties bootstrapProps; - /** A latch used to signal shutdown of the gRPC server */ - private final CountDownLatch shutdownLatch = new CountDownLatch(1); /** The Hashgraph Platform. This is set during state initialization. */ private Platform platform; /** The configuration for this node */ @@ -189,6 +179,8 @@ private record ServiceRegistration( @NonNull final ConstructableRegistry constructableRegistry, @NonNull final BootstrapProperties bootstrapProps) { + this.constructableRegistry = requireNonNull(constructableRegistry); + // Load properties, configuration, and other things that can be done before a state is created. this.bootstrapProps = requireNonNull(bootstrapProps); @@ -196,14 +188,19 @@ private record ServiceRegistration( version = SEMANTIC_VERSIONS.deployedSoftwareVersion(); logger.info("Creating Hedera Consensus Node v{} with HAPI v{}", version.getServices(), version.getProto()); - // Create all the service implementations, and register their schemas. + // Create all the service implementations logger.info("Registering schemas for services"); - final var path = System.getProperty("merkle.db.path", null); // Might want to move to Bootstrap props - serviceRegistry = createServicesRegistry(constructableRegistry, path == null ? null : Path.of(path)); - serviceRegistry - .values() - .forEach(reg -> - logger.info("Registered service {} with implementation {}", reg.name, reg.service.getClass())); + // FUTURE: Use the service loader framework to load these services! + this.servicesRegistry = new ServicesRegistryImpl(Set.of( + new ConsensusServiceImpl(), + new ContractServiceImpl(), + new FileServiceImpl(), + new FreezeServiceImpl(), + new NetworkServiceImpl(), + new ScheduleServiceImpl(), + new TokenServiceImpl(), + new UtilServiceImpl(), + new RecordCacheService())); // Register MerkleHederaState with the ConstructableRegistry, so we can use a constructor // OTHER THAN the default constructor to make sure it has the config and other info @@ -218,37 +215,6 @@ private record ServiceRegistration( } } - /** - * Create all service implementations and register their schemas. Return these as a map of service name to - * {@link ServiceRegistration}. Later, when we migrate, we will use this map to migrate each service to its latest - * schema. - */ - private Map createServicesRegistry( - @NonNull final ConstructableRegistry constructableRegistry, @Nullable final Path storageDir) { - - final var services = Map.of( - ConsensusService.NAME, new ConsensusServiceImpl(), - ContractService.NAME, new ContractServiceImpl(), - FileService.NAME, new FileServiceImpl(), - FreezeService.NAME, new FreezeServiceImpl(), - NetworkService.NAME, new NetworkServiceImpl(), - ScheduleService.NAME, new ScheduleServiceImpl(), - TokenService.NAME, new TokenServiceImpl(), - UtilService.NAME, new UtilServiceImpl(), - RecordCacheService.NAME, new RecordCacheService()); - - final var map = new HashMap(); - for (final var entry : services.entrySet()) { - final var serviceName = entry.getKey(); - final var service = entry.getValue(); - final var registry = new MerkleSchemaRegistry(constructableRegistry, serviceName); - service.registerSchemas(registry); - map.put(serviceName, new ServiceRegistration(serviceName, service, registry)); - } - - return map; - } - /** * {@inheritDoc} *

@@ -271,8 +237,8 @@ public SoftwareVersion getSoftwareVersion() { /** * {@inheritDoc} - *

- * Called by the platform ONLY during genesis (that is, if there is no saved state). However, it is also + * + *

Called by the platform ONLY during genesis (that is, if there is no saved state). However, it is also * called indirectly by {@link ConstructableRegistry} due to registration in this class' constructor. * * @return A new {@link SwirldState} instance. @@ -341,10 +307,13 @@ private void onMigrate( deserializedVersion == null ? null : PbjConverter.toPbj(deserializedVersion.getServices()); final var currentVersion = PbjConverter.toPbj(version.getServices()); logger.info("Migrating from version {} to {}", previousVersion, currentVersion); - for (final var registration : serviceRegistry.values()) { + for (final var service : servicesRegistry.services()) { // FUTURE We should have metrics here to keep track of how long it takes to migrate each service - registration.registry.migrate(state, previousVersion, currentVersion, configProvider.getConfiguration()); - logger.info("Migrated Service {}", registration.name); + final var serviceName = service.getServiceName(); + final var registry = new MerkleSchemaRegistry(constructableRegistry, serviceName); + service.registerSchemas(registry); + registry.migrate(state, previousVersion, currentVersion, configProvider.getConfiguration()); + logger.info("Migrated Service {}", serviceName); } } @@ -356,14 +325,14 @@ private void onMigrate( /** * {@inheritDoc} - *

- * Called AFTER init and migrate have been called on the state (either the new state created from + * + *

Called AFTER init and migrate have been called on the state (either the new state created from * {@link #newState()} or an instance of {@link MerkleHederaState} created by the platform and loaded from the saved * state). */ @Override public void init(@NonNull final Platform platform, @NonNull final NodeId nodeId) { - assert this.platform == platform : "Platform should be the same instance"; + assert this.platform == platform : "Platform must be the same instance"; logger.info("Initializing Hedera app with HederaNode#{}", nodeId); // Ensure the prefetch queue is created and thread pool is active instead of waiting @@ -450,60 +419,20 @@ private void validateLedgerState() { /*================================================================================================================== * - * Run the app. + * Other app lifecycle methods * =================================================================================================================*/ /** * {@inheritDoc} - *

- * Called by the platform after ALL initialization to start the gRPC servers and begin operation. + * + *

Called by the platform after ALL initialization to start the gRPC servers and begin operation, or by + * the notification listener when it is time to restart the gRPC server after it had been stopped (such as during + * reconnect). */ @Override public void run() { - // Start the gRPC server. - logger.info("Starting modular gRPC server"); - final var port = daggerApp.nodeLocalProperties().workflowsPort(); - - // Create the Ingest and Query workflows. While we are in transition, some required facilities come - // from `hedera-app`, and some from `mono-service`. Eventually we'll transition all facilities to be - // from the app module. But this code can be ignorant of that complexity, since the Dagger dependency - // graph takes care of it. - final var ingestWorkflow = - daggerApp.ingestComponentFactory().get().create().ingestWorkflow(); - final var queryWorkflow = - daggerApp.queryComponentFactory().get().create().queryWorkflow(); - - // Setup and start the grpc server. - // At some point I'd like to somehow move the metadata for which transactions are supported - // by a service to the service, instead of having them all hardcoded here. It isn't clear - // yet what that API would look like, so for now we do it this way. Maybe we should have - // a set of annotations that generate the metadata, or maybe we have some code. Whatever - // we do should work also with workflows. - final var grpcServer = GrpcServer.create( - GrpcServerConfiguration.builder().port(port).build(), - GrpcRouting.builder() - .register(new GrpcServiceBuilder("proto.ConsensusService", ingestWorkflow, queryWorkflow) - .transaction("createTopic") - .transaction("updateTopic") - .transaction("deleteTopic") - .query("getTopicInfo") - .transaction("submitMessage") - .build(daggerApp.platform().getContext().getMetrics())) - .build()); - grpcServer.whenShutdown().thenAccept(server -> shutdownLatch.countDown()); - grpcServer.start(); - - // Block this main thread until the server terminates. - // TODO: Uncomment this code once we enable all operations to work with workflows. - // Currently, we are enabling each operation step-by-step to work with new Grpc binding. - // try { - // shutdownLatch.await(); - // } catch (InterruptedException ignored) { - // // An interrupt on this thread means we want to shut down the server. - // shutdown(); - // Thread.currentThread().interrupt(); - // } + startGrpcServer(); } /** @@ -530,15 +459,20 @@ private void onHandleConsensusRound( /*================================================================================================================== * - * Shutdown of a Hedera node + * gRPC Server Lifecycle * =================================================================================================================*/ + /** Start the gRPC Server if it is not already running. */ + void startGrpcServer() { + daggerApp.grpcServerManager().start(); + } + /** * Called to perform orderly shutdown of the gRPC servers. */ - public void shutdown() { - shutdownLatch.countDown(); + public void shutdownGrpcServer() { + daggerApp.grpcServerManager().stop(); } /*================================================================================================================== @@ -549,7 +483,7 @@ public void shutdown() { /** Implements the code flow for initializing the state of a new Hedera node with NO SAVED STATE. */ private void genesis(@NonNull final MerkleHederaState state, @NonNull final SwirldDualState dualState) { - logger.debug("Genesis Initialization"); + logger.info("Genesis Initialization"); logger.info("Initializing Configuration"); this.configProvider = new ConfigProviderImpl(true); @@ -756,6 +690,7 @@ private void initializeDagger(@NonNull final MerkleHederaState state, @NonNull f .crypto(CryptographyHolder.get()) .selfId(nodeSelfAccount) .genesisUsage(trigger == InitTrigger.GENESIS) + .servicesRegistry(servicesRegistry) .build(); } } diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/HederaInjectionComponent.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/HederaInjectionComponent.java index 82ea155744b6..f901a5f4a2df 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/HederaInjectionComponent.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/HederaInjectionComponent.java @@ -25,6 +25,8 @@ import com.hedera.node.app.config.ConfigModule; import com.hedera.node.app.config.GenesisUsage; import com.hedera.node.app.fees.FeesInjectionModule; +import com.hedera.node.app.grpc.GrpcInjectionModule; +import com.hedera.node.app.grpc.GrpcServerManager; import com.hedera.node.app.info.InfoInjectionModule; import com.hedera.node.app.metrics.MetricsInjectionModule; import com.hedera.node.app.service.mono.LegacyMonoInjectionModule; @@ -35,6 +37,7 @@ import com.hedera.node.app.service.mono.state.StateModule; import com.hedera.node.app.service.mono.utils.NonAtomicReference; import com.hedera.node.app.services.ServicesInjectionModule; +import com.hedera.node.app.services.ServicesRegistry; import com.hedera.node.app.solvency.SolvencyInjectionModule; import com.hedera.node.app.spi.records.RecordCache; import com.hedera.node.app.state.HederaState; @@ -66,6 +69,7 @@ WorkflowsInjectionModule.class, HederaStateInjectionModule.class, FeesInjectionModule.class, + GrpcInjectionModule.class, MetricsInjectionModule.class, AuthorizerInjectionModule.class, InfoInjectionModule.class, @@ -87,8 +91,13 @@ public interface HederaInjectionComponent extends ServicesApp { RecordCache recordCache(); + GrpcServerManager grpcServerManager(); + @Component.Builder interface Builder { + @BindsInstance + Builder servicesRegistry(ServicesRegistry registry); + @BindsInstance Builder initTrigger(InitTrigger initTrigger); diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/GrpcInjectionModule.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/GrpcInjectionModule.java new file mode 100644 index 000000000000..bd01ea46abc3 --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/GrpcInjectionModule.java @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.grpc; + +import dagger.Binds; +import dagger.Module; + +/** A Dagger module for facilities in the {@link com.hedera.node.app.info} package. */ +@Module +public interface GrpcInjectionModule { + @Binds + GrpcServerManager provideGrpcServerManager(HelidonGrpcServerManager serverManager); +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/GrpcServerManager.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/GrpcServerManager.java new file mode 100644 index 000000000000..5c9f41822779 --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/GrpcServerManager.java @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.grpc; + +import com.hedera.node.config.data.GrpcConfig; + +/** + * Manages the lifecycle of all gRPC servers. + * + *

Our node will run at least one gRPC server on the port specified in the {@link GrpcConfig}. It will also attempt + * to run a gRPC server on the tls-port specified in the same config. If it fails to start the tls-port, it will log + * a warning and continue. + */ +public interface GrpcServerManager { + /** + * Starts the gRPC servers. + * @throws IllegalStateException if the servers are already running + */ + void start(); + + /** + * Stops the gRPC servers. This call is idempotent. + */ + void stop(); + + /** + * True, if this server is started and running. + * + * @return {@code true} if the server is running, false otherwise. + */ + boolean isRunning(); + + /** + * Gets the port that the non-tls gRPC server is listening on. + * + * @return the port of the listening server, or -1 if no server is listening on that port. Note that this value may + * be different from the port designation in configuration. If the special port 0 is used in config, it will + * denote using an ephemeral port from the OS ephemeral port range. The actual port used will be returned by + * this method. + */ + int port(); + + /** + * Gets the port that the tls gRPC server is listening on. + * + * @return the port of the listening tls server, or -1 if no server listening on that port + */ + int tlsPort(); +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/HelidonGrpcServerManager.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/HelidonGrpcServerManager.java new file mode 100644 index 000000000000..fb6de3fe86f2 --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/grpc/HelidonGrpcServerManager.java @@ -0,0 +1,241 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.grpc; + +import static java.util.Objects.requireNonNull; + +import com.hedera.hapi.node.base.Transaction; +import com.hedera.node.app.services.ServicesRegistry; +import com.hedera.node.app.workflows.ingest.IngestWorkflow; +import com.hedera.node.app.workflows.query.QueryWorkflow; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.GrpcConfig; +import com.hedera.node.config.data.NettyConfig; +import com.swirlds.common.metrics.Metrics; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import io.helidon.common.configurable.Resource; +import io.helidon.common.configurable.ResourceException; +import io.helidon.grpc.core.GrpcTlsDescriptor; +import io.helidon.grpc.server.GrpcRouting; +import io.helidon.grpc.server.GrpcServer; +import io.helidon.grpc.server.GrpcServerConfiguration; +import io.helidon.grpc.server.ServiceDescriptor; +import java.nio.file.Path; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.stream.Collectors; +import javax.inject.Inject; +import javax.inject.Singleton; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * An implementation of {@link GrpcServerManager} based on Helidon gRPC. + * + *

This implementation uses two different ports for gRPC and gRPC+TLS. If the TLS server cannot be started, then + * a warning is logged, but we continue to function without TLS. This is useful during testing and local development + * where TLS may not be available. + */ +@Singleton +public final class HelidonGrpcServerManager implements GrpcServerManager { + /** The logger instance for this class. */ + private static final Logger logger = LogManager.getLogger(HelidonGrpcServerManager.class); + /** The set of {@link ServiceDescriptor}s for services that the gRPC server will expose */ + private final Set services; + /** The configuration provider, so we can figure out ports and other information. */ + private final ConfigProvider configProvider; + /** The gRPC server listening on the plain (non-tls) port */ + private GrpcServer server; + /** The gRPC server listening on the plain TLS port */ + private GrpcServer tlsServer; + + /** + * Create a new instance. + * + * @param configProvider The config provider, so we can figure out ports and other information. + * @param servicesRegistry The set of all services registered with the system + * @param ingestWorkflow The implementation of the {@link IngestWorkflow} to use for transaction rpc methods + * @param queryWorkflow The implementation of the {@link QueryWorkflow} to use for query rpc methods + * @param metrics Used to get/create metrics for each transaction and query method. + */ + @Inject + public HelidonGrpcServerManager( + @NonNull final ConfigProvider configProvider, + @NonNull final ServicesRegistry servicesRegistry, + @NonNull final IngestWorkflow ingestWorkflow, + @NonNull final QueryWorkflow queryWorkflow, + @NonNull final Metrics metrics) { + this.configProvider = requireNonNull(configProvider); + requireNonNull(ingestWorkflow); + requireNonNull(queryWorkflow); + requireNonNull(metrics); + + // Convert the various RPC service definitions into transaction or query endpoints using the GrpcServiceBuilder. + services = servicesRegistry.services().stream() + .flatMap(s -> s.rpcDefinitions().stream()) + .map(d -> { + final var builder = new GrpcServiceBuilder(d.basePath(), ingestWorkflow, queryWorkflow); + d.methods().forEach(m -> { + if (Transaction.class.equals(m.requestType())) { + builder.transaction(m.path()); + } else { + builder.query(m.path()); + } + }); + return builder.build(metrics); + }) + .collect(Collectors.toUnmodifiableSet()); + } + + @Override + public int port() { + return server == null ? -1 : server.port(); + } + + @Override + public int tlsPort() { + return tlsServer == null ? -1 : tlsServer.port(); + } + + @Override + public boolean isRunning() { + return server != null && server.isRunning(); + } + + @Override + public synchronized void start() { + if (isRunning()) { + logger.error("Cannot start gRPC servers, they have already been started!"); + throw new IllegalStateException("Server already started"); + } + + // Setup the GRPC Routing, such that all grpc services are registered + final var grpcRoutingBuilder = GrpcRouting.builder(); + services.forEach(grpcRoutingBuilder::register); + + logger.info("Starting gRPC servers"); + final var nettyConfig = configProvider.getConfiguration().getConfigData(NettyConfig.class); + final var startRetries = nettyConfig.startRetries(); + final var startRetryIntervalMs = nettyConfig.startRetryIntervalMs(); + + final var grpcConfig = configProvider.getConfiguration().getConfigData(GrpcConfig.class); + final var port = grpcConfig.port(); + + // Start the plain-port server + logger.debug("Starting Helidon gRPC server on port {}", port); + server = GrpcServer.create(GrpcServerConfiguration.builder().port(port), grpcRoutingBuilder); + startServerWithRetry(server, startRetries, startRetryIntervalMs); + logger.debug("Helidon gRPC server listening on port {}", server.port()); + + // Try to start the server listening on the tls port. If this doesn't start, then we just keep going. We should + // rethink whether we want to have two ports per consensus node like this. We do expose both via the proxies, + // but we could have either TLS or non-TLS only on the node itself and have the proxy manage making a TLS + // connection or terminating it, as appropriate. But for now, we support both, with the TLS port being optional. + try { + final var tlsPort = grpcConfig.tlsPort(); + logger.debug("Starting Helidon TLS gRPC server on port {}", tlsPort); + tlsServer = GrpcServer.create( + GrpcServerConfiguration.builder() + .port(tlsPort) + .tlsConfig(GrpcTlsDescriptor.builder() + .enabled(true) + .tlsCert(Resource.create(Path.of(nettyConfig.tlsCrtPath()))) + .tlsKey(Resource.create(Path.of(nettyConfig.tlsKeyPath()))) + .build()) + .build(), + grpcRoutingBuilder); + startServerWithRetry(tlsServer, startRetries, startRetryIntervalMs); + logger.debug("Helidon TLS gRPC server listening on port {}", tlsServer.port()); + } catch (ResourceException e) { + tlsServer = null; + logger.warn("Could not start TLS server, will continue without it: {}", e.getMessage()); + } + } + + @Override + public synchronized void stop() { + logger.info("Shutting down gRPC servers"); + if (server != null) { + logger.info("Shutting down Helidon gRPC server on port {}", server.port()); + terminateServer(server); + server = null; + } + + if (tlsServer != null) { + logger.info("Shutting down Helidon TLS gRPC server on port {}", tlsServer.port()); + terminateServer(tlsServer); + tlsServer = null; + } + } + + /** + * Attempts to start the server. It will retry {@code startRetries} times until it finally gives up with + * {@code startRetryIntervalMs} between attempts. + * + * @param server The server to start + * @param startRetries The number of times to retry, if needed. Non-negative (enforced by config). + * @param startRetryIntervalMs The time interval between retries. Positive (enforced by config). + */ + void startServerWithRetry( + @NonNull final GrpcServer server, final int startRetries, final long startRetryIntervalMs) { + requireNonNull(server); + + var remaining = startRetries; + while (remaining > 0) { + try { + server.start().toCompletableFuture().get(startRetryIntervalMs, TimeUnit.MILLISECONDS); + if (server.isRunning()) return; + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + throw new RuntimeException("Interrupted while waiting to for server to start", ie); + } catch (ExecutionException | TimeoutException e) { + remaining--; + if (remaining == 0) { + throw new RuntimeException("Failed to start gRPC server"); + } + logger.info("Still trying to start server... {} tries remaining", remaining); + } + } + } + + /** + * Terminates the given server + * + * @param server the server to terminate + */ + private void terminateServer(@Nullable final GrpcServer server) { + if (server == null) { + return; + } + + final var nettyConfig = configProvider.getConfiguration().getConfigData(NettyConfig.class); + final var terminationTimeout = nettyConfig.terminationTimeout(); + + try { + server.shutdown().toCompletableFuture().get(terminationTimeout, TimeUnit.SECONDS); + logger.info("Helidon gRPC server stopped"); + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + logger.warn("Interrupted while waiting for Helidon gRPC to terminate!", ie); + } catch (Exception e) { + logger.warn("Exception while waiting for Helidon gRPC to terminate!", e); + } + } +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/services/ServicesRegistry.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/services/ServicesRegistry.java new file mode 100644 index 000000000000..34a7ac8ac93d --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/services/ServicesRegistry.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.services; + +import com.hedera.node.app.spi.Service; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Set; +import javax.inject.Singleton; + +/** + * A registry providing access to all services registered with the application. + */ +@Singleton +public interface ServicesRegistry { + /** + * Gets the full set of services registered. + * + * @return The set of services. May be empty. + */ + @NonNull + Set services(); +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/services/ServicesRegistryImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/services/ServicesRegistryImpl.java new file mode 100644 index 000000000000..e9cca8be0de1 --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/services/ServicesRegistryImpl.java @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.services; + +import com.hedera.node.app.spi.Service; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Collections; +import java.util.Set; +import javax.inject.Singleton; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * A simple implementation of {@link ServicesRegistry}. + * + * @param services The services that are registered + */ +@Singleton +public record ServicesRegistryImpl(@NonNull Set services) implements ServicesRegistry { + private static final Logger logger = LogManager.getLogger(ServicesRegistryImpl.class); + + public ServicesRegistryImpl(@NonNull final Set services) { + this.services = Collections.unmodifiableSet(services); + this.services.forEach(service -> logger.info( + "Registered service {} with implementation {}", service.getServiceName(), service.getClass())); + } +} diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/components/IngestComponentTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/components/IngestComponentTest.java index 83f977235485..a111272e23a4 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/components/IngestComponentTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/components/IngestComponentTest.java @@ -35,6 +35,7 @@ import com.swirlds.common.system.Platform; import com.swirlds.config.api.Configuration; import com.swirlds.platform.gui.SwirldsGui; +import java.util.Set; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -75,6 +76,7 @@ void setUp() { .initialHash(new Hash()) .maxSignedTxnSize(1024) .genesisUsage(false) + .servicesRegistry(Set::of) .build(); } diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/grpc/HelidonGrpcServerManagerTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/grpc/HelidonGrpcServerManagerTest.java new file mode 100644 index 000000000000..e01e6cc90544 --- /dev/null +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/grpc/HelidonGrpcServerManagerTest.java @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.grpc; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.hedera.node.app.config.VersionedConfigImpl; +import com.hedera.node.app.services.ServicesRegistry; +import com.hedera.node.app.workflows.ingest.IngestWorkflow; +import com.hedera.node.app.workflows.query.QueryWorkflow; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.data.GrpcConfig; +import com.hedera.node.config.data.NettyConfig; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import com.swirlds.common.metrics.Metrics; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Set; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +final class HelidonGrpcServerManagerTest { + + private ConfigProvider configProvider; + private ServicesRegistry services; + private IngestWorkflow ingestWorkflow; + private QueryWorkflow queryWorkflow; + private Metrics metrics; + + @BeforeEach + void setUp(@Mock @NonNull final Metrics metrics) { + final var config = new HederaTestConfigBuilder(false) + .withConfigDataType(GrpcConfig.class) + .withConfigDataType(NettyConfig.class) + .getOrCreateConfig(); + + this.configProvider = () -> new VersionedConfigImpl(config, 1); + this.metrics = metrics; + this.services = Set::of; // An empty set of services + this.ingestWorkflow = (req, res) -> {}; + this.queryWorkflow = (req, res) -> {}; + } + + @Test + @DisplayName("Null arguments are not allowed") + @SuppressWarnings("DataFlowIssue") + void nullArgsThrow() { + assertThatThrownBy(() -> new HelidonGrpcServerManager(null, services, ingestWorkflow, queryWorkflow, metrics)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> + new HelidonGrpcServerManager(configProvider, null, ingestWorkflow, queryWorkflow, metrics)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> new HelidonGrpcServerManager(configProvider, services, null, queryWorkflow, metrics)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> new HelidonGrpcServerManager(configProvider, services, ingestWorkflow, null, metrics)) + .isInstanceOf(NullPointerException.class); + assertThatThrownBy(() -> + new HelidonGrpcServerManager(configProvider, services, ingestWorkflow, queryWorkflow, null)) + .isInstanceOf(NullPointerException.class); + } + + @Test + @DisplayName("Ports are -1 when not started") + void portsAreMinusOneWhenNotStarted() { + final var subject = + new HelidonGrpcServerManager(configProvider, services, ingestWorkflow, queryWorkflow, metrics); + assertThat(subject.port()).isEqualTo(-1); + assertThat(subject.tlsPort()).isEqualTo(-1); + } +} diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryComponentTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryComponentTest.java index da07172d6360..08f447294b3b 100644 --- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryComponentTest.java +++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/query/QueryComponentTest.java @@ -36,6 +36,7 @@ import com.swirlds.common.system.Platform; import com.swirlds.config.api.Configuration; import com.swirlds.platform.gui.SwirldsGui; +import java.util.Set; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -74,6 +75,7 @@ void setUp() { .initialHash(new Hash()) .maxSignedTxnSize(1024) .genesisUsage(false) + .servicesRegistry(Set::of) .build(); } diff --git a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/GrpcConfig.java b/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/GrpcConfig.java index 9739f6c535bd..e1aa59e6a684 100644 --- a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/GrpcConfig.java +++ b/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/GrpcConfig.java @@ -18,10 +18,33 @@ import com.swirlds.config.api.ConfigData; import com.swirlds.config.api.ConfigProperty; +import com.swirlds.config.api.validation.annotation.Max; +import com.swirlds.config.api.validation.annotation.Min; +/** + * @param port The port for plain grpc traffic. Must be non-negative. A value of 0 indicates an ephemeral port should be + * automatically selected by the computer. Must not be the same value as {@link #tlsPort()} unless both are + * 0. Must be a value between 0 and 65535, inclusive. + * @param tlsPort The port for tls-encrypted grpc traffic. Must be non-negative. A value of 0 indicates an ephemeral + * port should be automatically selected by the computer. Must not be the same value as {@link #port()} + * unless both are 0. Must be a value between 0 and 65535, inclusive. + * @param workflowsPort Deprecated + * @param workflowsTlsPort Deprecated + */ @ConfigData("grpc") public record GrpcConfig( - @ConfigProperty(defaultValue = "50211") int port, - @ConfigProperty(defaultValue = "50212") int tlsPort, - @ConfigProperty(defaultValue = "60211") int workflowsPort, - @ConfigProperty(defaultValue = "60212") int workflowsTlsPort) {} + @ConfigProperty(defaultValue = "50211") @Min(0) @Max(65535) int port, + @ConfigProperty(defaultValue = "50212") @Min(0) @Max(65535) int tlsPort, + @ConfigProperty(defaultValue = "60211") @Min(0) @Max(65535) int workflowsPort, + @ConfigProperty(defaultValue = "60212") @Min(0) @Max(65535) int workflowsTlsPort) { + + public GrpcConfig { + if (port == tlsPort && port != 0) { + throw new IllegalArgumentException("grpc.port and grpc.tlsPort must be different"); + } + + if (workflowsPort == workflowsTlsPort && workflowsPort != 0) { + throw new IllegalArgumentException("grpc.workflowsPort and grpc.workflowsTlsPort must be different"); + } + } +} diff --git a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/NettyConfig.java b/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/NettyConfig.java index c76b004eb010..3d19f8747e85 100644 --- a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/NettyConfig.java +++ b/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/NettyConfig.java @@ -19,6 +19,21 @@ import com.swirlds.config.api.ConfigData; import com.swirlds.config.api.ConfigProperty; +/** + * + * @param prodFlowControlWindow + * @param prodMaxConcurrentCalls + * @param prodMaxConnectionAge + * @param prodMaxConnectionAgeGrace + * @param prodMaxConnectionIdle + * @param prodKeepAliveTime + * @param prodKeepAliveTimeout + * @param startRetries The number of times to retry starting the gRPC servers, if they fail to start. Defaults to 90. + * @param startRetryIntervalMs The number of milliseconds between retries. Defaults to 1000ms. Minimum value is 1. + * @param terminationTimeout The timeout, *in seconds*, to wait for the servers to terminate. + * @param tlsCrtPath + * @param tlsKeyPath + */ @ConfigData("netty") public record NettyConfig( // @ConfigProperty(defaultValue = "PROD") Profile mode, @@ -31,5 +46,20 @@ public record NettyConfig( @ConfigProperty(value = "prod.keepAliveTimeout", defaultValue = "3") long prodKeepAliveTimeout, @ConfigProperty(defaultValue = "90") int startRetries, @ConfigProperty(defaultValue = "1000") long startRetryIntervalMs, + @ConfigProperty(defaultValue = "5") long terminationTimeout, @ConfigProperty(value = "tlsCrt.path", defaultValue = "hedera.crt") String tlsCrtPath, - @ConfigProperty(value = "tlsKey.path", defaultValue = "hedera.key") String tlsKeyPath) {} + @ConfigProperty(value = "tlsKey.path", defaultValue = "hedera.key") String tlsKeyPath) { + public NettyConfig { + if (startRetries < 0) { + throw new IllegalArgumentException("startRetries must be non-negative."); + } + + if (startRetryIntervalMs < 1) { + throw new IllegalArgumentException("startRetryIntervalMs cannot be less than 1ms"); + } + + if (terminationTimeout < 0) { + throw new IllegalArgumentException("terminationTimeout must be non-negative"); + } + } +} diff --git a/hedera-node/hedera-consensus-service/src/main/java/com/hedera/node/app/service/consensus/ConsensusService.java b/hedera-node/hedera-consensus-service/src/main/java/com/hedera/node/app/service/consensus/ConsensusService.java index 01b6d4f610d9..2154c0ed20db 100644 --- a/hedera-node/hedera-consensus-service/src/main/java/com/hedera/node/app/service/consensus/ConsensusService.java +++ b/hedera-node/hedera-consensus-service/src/main/java/com/hedera/node/app/service/consensus/ConsensusService.java @@ -18,8 +18,10 @@ import com.hedera.node.app.spi.Service; import com.hedera.node.app.spi.ServiceFactory; +import com.hedera.pbj.runtime.RpcServiceDefinition; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ServiceLoader; +import java.util.Set; /** * Implements the HAPI rpcDefinitions() { + return Set.of(ConsensusServiceDefinition.INSTANCE); + } + /** * Returns the concrete implementation instance of the service * diff --git a/hedera-node/hedera-consensus-service/src/main/java/com/hedera/node/app/service/consensus/ConsensusServiceDefinition.java b/hedera-node/hedera-consensus-service/src/main/java/com/hedera/node/app/service/consensus/ConsensusServiceDefinition.java new file mode 100644 index 000000000000..002afe113bd2 --- /dev/null +++ b/hedera-node/hedera-consensus-service/src/main/java/com/hedera/node/app/service/consensus/ConsensusServiceDefinition.java @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.consensus; + +import com.hedera.hapi.node.base.Transaction; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.hapi.node.transaction.TransactionResponse; +import com.hedera.pbj.runtime.RpcMethodDefinition; +import com.hedera.pbj.runtime.RpcServiceDefinition; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Set; + +/** + * The Consensus Service provides the ability for Hedera Hashgraph to provide aBFT consensus as to + * the order and validity of messages submitted to a *topic*, as well as a *consensus timestamp* for + * those messages. + */ +@SuppressWarnings("java:S6548") +public final class ConsensusServiceDefinition implements RpcServiceDefinition { + public static final ConsensusServiceDefinition INSTANCE = new ConsensusServiceDefinition(); + + private static final Set> methods = Set.of( + // + // Create a topic to be used for consensus. + // If an autoRenewAccount is specified, that account must also sign this transaction. + // If an adminKey is specified, the adminKey must sign the transaction. + // On success, the resulting TransactionReceipt contains the newly created TopicId. + // Request is [ConsensusCreateTopicTransactionBody](#proto.ConsensusCreateTopicTransactionBody) + // + new RpcMethodDefinition<>("createTopic", Transaction.class, TransactionResponse.class), + // + // Update a topic. + // If there is no adminKey, the only authorized update (available to anyone) is to extend the + // expirationTime. + // Otherwise, transaction must be signed by the adminKey. + // If an adminKey is updated, the transaction must be signed by the pre-update adminKey and post-update + // adminKey. + // If a new autoRenewAccount is specified (not just being removed), that account must also sign the + // transaction. + // Request is [ConsensusUpdateTopicTransactionBody](#proto.ConsensusUpdateTopicTransactionBody) + // + new RpcMethodDefinition<>("updateTopic", Transaction.class, TransactionResponse.class), + // + // Delete a topic. No more transactions or queries on the topic (via HAPI) will succeed. + // If an adminKey is set, this transaction must be signed by that key. + // If there is no adminKey, this transaction will fail UNAUTHORIZED. + // Request is [ConsensusDeleteTopicTransactionBody](#proto.ConsensusDeleteTopicTransactionBody) + // + new RpcMethodDefinition<>("deleteTopic", Transaction.class, TransactionResponse.class), + // + // Retrieve the latest state of a topic. This method is unrestricted and allowed on any topic by any payer + // account. + // Deleted accounts will not be returned. + // Request is [ConsensusGetTopicInfoQuery](#proto.ConsensusGetTopicInfoQuery) + // Response is [ConsensusGetTopicInfoResponse](#proto.ConsensusGetTopicInfoResponse) + // + new RpcMethodDefinition<>("getTopicInfo", Query.class, Response.class), + // + // Submit a message for consensus. + // Valid and authorized messages on valid topics will be ordered by the consensus service, gossipped to the + // mirror net, and published (in order) to all subscribers (from the mirror net) on this topic. + // The submitKey (if any) must sign this transaction. + // On success, the resulting TransactionReceipt contains the topic's updated topicSequenceNumber and + // topicRunningHash. + // Request is [ConsensusSubmitMessageTransactionBody](#proto.ConsensusSubmitMessageTransactionBody) + // + new RpcMethodDefinition<>("submitMessage", Transaction.class, TransactionResponse.class)); + + private ConsensusServiceDefinition() { + // Forbid instantiation + } + + @Override + @NonNull + public String basePath() { + return "proto.ConsensusService"; + } + + @Override + @NonNull + public Set> methods() { + return methods; + } +} diff --git a/hedera-node/hedera-file-service/src/main/java/com/hedera/node/app/service/file/FileService.java b/hedera-node/hedera-file-service/src/main/java/com/hedera/node/app/service/file/FileService.java index 736e5d025bb9..d9ace6599798 100644 --- a/hedera-node/hedera-file-service/src/main/java/com/hedera/node/app/service/file/FileService.java +++ b/hedera-node/hedera-file-service/src/main/java/com/hedera/node/app/service/file/FileService.java @@ -18,8 +18,10 @@ import com.hedera.node.app.spi.Service; import com.hedera.node.app.spi.ServiceFactory; +import com.hedera.pbj.runtime.RpcServiceDefinition; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ServiceLoader; +import java.util.Set; /** * Implements the HAPI rpcDefinitions() { + return Set.of(FileServiceDefinition.INSTANCE); + } + /** * Returns the concrete implementation instance of the service * diff --git a/hedera-node/hedera-file-service/src/main/java/com/hedera/node/app/service/file/FileServiceDefinition.java b/hedera-node/hedera-file-service/src/main/java/com/hedera/node/app/service/file/FileServiceDefinition.java new file mode 100644 index 000000000000..c65f5d5aee75 --- /dev/null +++ b/hedera-node/hedera-file-service/src/main/java/com/hedera/node/app/service/file/FileServiceDefinition.java @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.file; + +import com.hedera.hapi.node.base.Transaction; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.hapi.node.transaction.TransactionResponse; +import com.hedera.pbj.runtime.RpcMethodDefinition; +import com.hedera.pbj.runtime.RpcServiceDefinition; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Set; + +/** + * Transactions and queries for the file service. + */ +@SuppressWarnings("java:S6548") +public final class FileServiceDefinition implements RpcServiceDefinition { + public static final FileServiceDefinition INSTANCE = new FileServiceDefinition(); + + private static final Set> methods = Set.of( + new RpcMethodDefinition<>("createFile", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("updateFile", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("deleteFile", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("appendContent", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("getFileContent", Query.class, Response.class), + new RpcMethodDefinition<>("getFileInfo", Query.class, Response.class), + new RpcMethodDefinition<>("systemDelete", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("systemUndelete", Transaction.class, TransactionResponse.class)); + + private FileServiceDefinition() { + // Forbid instantiation + } + + @Override + @NonNull + public String basePath() { + return "proto.FileService"; + } + + @Override + @NonNull + public Set> methods() { + return methods; + } +} diff --git a/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/FreezeService.java b/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/FreezeService.java index 46965a7e568b..3b8047959834 100644 --- a/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/FreezeService.java +++ b/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/FreezeService.java @@ -18,8 +18,10 @@ import com.hedera.node.app.spi.Service; import com.hedera.node.app.spi.ServiceFactory; +import com.hedera.pbj.runtime.RpcServiceDefinition; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ServiceLoader; +import java.util.Set; /** * Implements the HAPI rpcDefinitions() { + return Set.of(FreezeServiceDefinition.INSTANCE); + } + /** * Returns the concrete implementation instance of the service * diff --git a/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/FreezeServiceDefinition.java b/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/FreezeServiceDefinition.java new file mode 100644 index 000000000000..b8419520ffac --- /dev/null +++ b/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/FreezeServiceDefinition.java @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.networkadmin; + +import com.hedera.hapi.node.base.Transaction; +import com.hedera.hapi.node.transaction.TransactionResponse; +import com.hedera.pbj.runtime.RpcMethodDefinition; +import com.hedera.pbj.runtime.RpcServiceDefinition; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Set; + +/** + * The request and responses for freeze service. + */ +@SuppressWarnings("java:S6548") +public final class FreezeServiceDefinition implements RpcServiceDefinition { + public static final FreezeServiceDefinition INSTANCE = new FreezeServiceDefinition(); + + private static final Set> methods = + Set.of(new RpcMethodDefinition<>("freeze", Transaction.class, TransactionResponse.class)); + + private FreezeServiceDefinition() { + // Forbid instantiation + } + + @Override + @NonNull + public String basePath() { + return "proto.FreezeService"; + } + + @Override + @NonNull + public Set> methods() { + return methods; + } +} diff --git a/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/NetworkService.java b/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/NetworkService.java index fffd8586f703..d907a225dc60 100644 --- a/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/NetworkService.java +++ b/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/NetworkService.java @@ -18,8 +18,10 @@ import com.hedera.node.app.spi.Service; import com.hedera.node.app.spi.ServiceFactory; +import com.hedera.pbj.runtime.RpcServiceDefinition; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ServiceLoader; +import java.util.Set; /** * Implements the HAPI rpcDefinitions() { + return Set.of(NetworkServiceDefinition.INSTANCE); + } + /** * Returns the concrete implementation instance of the service * diff --git a/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/NetworkServiceDefinition.java b/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/NetworkServiceDefinition.java new file mode 100644 index 000000000000..9ca6e521b62a --- /dev/null +++ b/hedera-node/hedera-network-admin-service/src/main/java/com/hedera/node/app/service/networkadmin/NetworkServiceDefinition.java @@ -0,0 +1,56 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.networkadmin; + +import com.hedera.hapi.node.base.Transaction; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.hapi.node.transaction.TransactionResponse; +import com.hedera.pbj.runtime.RpcMethodDefinition; +import com.hedera.pbj.runtime.RpcServiceDefinition; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Set; + +/** + * The requests and responses for different network services. + */ +@SuppressWarnings("java:S6548") +public final class NetworkServiceDefinition implements RpcServiceDefinition { + public static final NetworkServiceDefinition INSTANCE = new NetworkServiceDefinition(); + + private static final Set> methods = Set.of( + new RpcMethodDefinition<>("getVersionInfo", Query.class, Response.class), + new RpcMethodDefinition<>("getExecutionTime", Query.class, Response.class), + new RpcMethodDefinition<>("uncheckedSubmit", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("getAccountDetails", Query.class, Response.class)); + + private NetworkServiceDefinition() { + // Forbid instantiation + } + + @Override + @NonNull + public String basePath() { + return "proto.NetworkService"; + } + + @Override + @NonNull + public Set> methods() { + return methods; + } +} diff --git a/hedera-node/hedera-schedule-service/src/main/java/com/hedera/node/app/service/schedule/ScheduleService.java b/hedera-node/hedera-schedule-service/src/main/java/com/hedera/node/app/service/schedule/ScheduleService.java index cab355e9aedc..1cfdc5cdd798 100644 --- a/hedera-node/hedera-schedule-service/src/main/java/com/hedera/node/app/service/schedule/ScheduleService.java +++ b/hedera-node/hedera-schedule-service/src/main/java/com/hedera/node/app/service/schedule/ScheduleService.java @@ -18,8 +18,10 @@ import com.hedera.node.app.spi.Service; import com.hedera.node.app.spi.ServiceFactory; +import com.hedera.pbj.runtime.RpcServiceDefinition; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ServiceLoader; +import java.util.Set; /** * Implements the HAPI rpcDefinitions() { + return Set.of(ScheduleServiceDefinition.INSTANCE); + } + /** * Returns the concrete implementation instance of the service * diff --git a/hedera-node/hedera-schedule-service/src/main/java/com/hedera/node/app/service/schedule/ScheduleServiceDefinition.java b/hedera-node/hedera-schedule-service/src/main/java/com/hedera/node/app/service/schedule/ScheduleServiceDefinition.java new file mode 100644 index 000000000000..1573d84a467b --- /dev/null +++ b/hedera-node/hedera-schedule-service/src/main/java/com/hedera/node/app/service/schedule/ScheduleServiceDefinition.java @@ -0,0 +1,56 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.schedule; + +import com.hedera.hapi.node.base.Transaction; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.hapi.node.transaction.TransactionResponse; +import com.hedera.pbj.runtime.RpcMethodDefinition; +import com.hedera.pbj.runtime.RpcServiceDefinition; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Set; + +/** + * Transactions and queries for the Schedule Service + */ +@SuppressWarnings("java:S6548") +public final class ScheduleServiceDefinition implements RpcServiceDefinition { + public static final ScheduleServiceDefinition INSTANCE = new ScheduleServiceDefinition(); + + private static final Set> methods = Set.of( + new RpcMethodDefinition<>("createSchedule", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("signSchedule", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("deleteSchedule", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("getScheduleInfo", Query.class, Response.class)); + + private ScheduleServiceDefinition() { + // Forbid instantiation + } + + @Override + @NonNull + public String basePath() { + return "proto.ScheduleService"; + } + + @Override + @NonNull + public Set> methods() { + return methods; + } +} diff --git a/hedera-node/hedera-smart-contract-service/src/main/java/com/hedera/node/app/service/contract/ContractService.java b/hedera-node/hedera-smart-contract-service/src/main/java/com/hedera/node/app/service/contract/ContractService.java index 5a9eb6ea62a8..e15615438c2c 100644 --- a/hedera-node/hedera-smart-contract-service/src/main/java/com/hedera/node/app/service/contract/ContractService.java +++ b/hedera-node/hedera-smart-contract-service/src/main/java/com/hedera/node/app/service/contract/ContractService.java @@ -18,8 +18,10 @@ import com.hedera.node.app.spi.Service; import com.hedera.node.app.spi.ServiceFactory; +import com.hedera.pbj.runtime.RpcServiceDefinition; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ServiceLoader; +import java.util.Set; /** * Implements the HAPI rpcDefinitions() { + return Set.of(SmartContractServiceDefinition.INSTANCE); + } + /** * Returns the concrete implementation instance of the service * diff --git a/hedera-node/hedera-smart-contract-service/src/main/java/com/hedera/node/app/service/contract/SmartContractServiceDefinition.java b/hedera-node/hedera-smart-contract-service/src/main/java/com/hedera/node/app/service/contract/SmartContractServiceDefinition.java new file mode 100644 index 000000000000..daa3479533e2 --- /dev/null +++ b/hedera-node/hedera-smart-contract-service/src/main/java/com/hedera/node/app/service/contract/SmartContractServiceDefinition.java @@ -0,0 +1,64 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.contract; + +import com.hedera.hapi.node.base.Transaction; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.hapi.node.transaction.TransactionResponse; +import com.hedera.pbj.runtime.RpcMethodDefinition; +import com.hedera.pbj.runtime.RpcServiceDefinition; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Set; + +/** + * Transactions and queries for the Smart Contract Service + */ +@SuppressWarnings("java:S6548") +public final class SmartContractServiceDefinition implements RpcServiceDefinition { + public static final SmartContractServiceDefinition INSTANCE = new SmartContractServiceDefinition(); + + private static final Set> methods = Set.of( + new RpcMethodDefinition<>("createContract", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("updateContract", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("contractCallMethod", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("contractCallLocalMethod", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("deleteContract", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("systemDelete", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("systemUndelete", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("callEthereum", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("ContractGetBytecode", Query.class, Response.class), + new RpcMethodDefinition<>("getBySolidityID", Query.class, Response.class), + new RpcMethodDefinition<>("getTxRecordByContractID", Query.class, Response.class), + new RpcMethodDefinition<>("getContractInfo", Query.class, Response.class)); + + private SmartContractServiceDefinition() { + // Forbid instantiation + } + + @Override + @NonNull + public String basePath() { + return "proto.SmartContractService"; + } + + @Override + @NonNull + public Set> methods() { + return methods; + } +} diff --git a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/CryptoServiceDefinition.java b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/CryptoServiceDefinition.java new file mode 100644 index 000000000000..2c8291351213 --- /dev/null +++ b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/CryptoServiceDefinition.java @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token; + +import com.hedera.hapi.node.base.Transaction; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.hapi.node.transaction.TransactionResponse; +import com.hedera.pbj.runtime.RpcMethodDefinition; +import com.hedera.pbj.runtime.RpcServiceDefinition; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Set; + +/** + * Transactions and queries for the Crypto Service + */ +@SuppressWarnings("java:S6548") +public final class CryptoServiceDefinition implements RpcServiceDefinition { + public static final CryptoServiceDefinition INSTANCE = new CryptoServiceDefinition(); + + private static final Set> methods = Set.of( + new RpcMethodDefinition<>("createAccount", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("updateAccount", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("cryptoTransfer", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("cryptoDelete", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("approveAllowances", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("deleteAllowances", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("addLiveHash", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("deleteLiveHash", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("getLiveHash", Query.class, Response.class), + new RpcMethodDefinition<>("getAccountRecords", Query.class, Response.class), + new RpcMethodDefinition<>("cryptoGetBalance", Query.class, Response.class), + new RpcMethodDefinition<>("getAccountInfo", Query.class, Response.class), + new RpcMethodDefinition<>("getTransactionReceipts", Query.class, Response.class), + new RpcMethodDefinition<>("getFastTransactionRecord", Query.class, Response.class), + new RpcMethodDefinition<>("getTxRecordByTxID", Query.class, Response.class), + new RpcMethodDefinition<>("getStakersByAccountID", Query.class, Response.class)); + + private CryptoServiceDefinition() { + // Forbid instantiation + } + + @Override + @NonNull + public String basePath() { + return "proto.CryptoService"; + } + + @Override + @NonNull + public Set> methods() { + return methods; + } +} diff --git a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/TokenService.java b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/TokenService.java index c821b259a298..994c76da9482 100644 --- a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/TokenService.java +++ b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/TokenService.java @@ -18,8 +18,10 @@ import com.hedera.node.app.spi.Service; import com.hedera.node.app.spi.ServiceFactory; +import com.hedera.pbj.runtime.RpcServiceDefinition; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ServiceLoader; +import java.util.Set; /** * Implements the HAPI rpcDefinitions() { + return Set.of(CryptoServiceDefinition.INSTANCE, TokenServiceDefinition.INSTANCE); + } + /** * Returns the concrete implementation instance of the service * diff --git a/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/TokenServiceDefinition.java b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/TokenServiceDefinition.java new file mode 100644 index 000000000000..a78932bebab4 --- /dev/null +++ b/hedera-node/hedera-token-service/src/main/java/com/hedera/node/app/service/token/TokenServiceDefinition.java @@ -0,0 +1,71 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token; + +import com.hedera.hapi.node.base.Transaction; +import com.hedera.hapi.node.transaction.Query; +import com.hedera.hapi.node.transaction.Response; +import com.hedera.hapi.node.transaction.TransactionResponse; +import com.hedera.pbj.runtime.RpcMethodDefinition; +import com.hedera.pbj.runtime.RpcServiceDefinition; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Set; + +/** + * Transactions and queries for the Token Service + */ +@SuppressWarnings("java:S6548") +public final class TokenServiceDefinition implements RpcServiceDefinition { + public static final TokenServiceDefinition INSTANCE = new TokenServiceDefinition(); + + private static final Set> methods = Set.of( + new RpcMethodDefinition<>("createToken", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("updateToken", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("mintToken", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("burnToken", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("deleteToken", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("wipeTokenAccount", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("freezeTokenAccount", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("unfreezeTokenAccount", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("grantKycToTokenAccount", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("revokeKycFromTokenAccount", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("associateTokens", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("dissociateTokens", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("updateTokenFeeSchedule", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("getTokenInfo", Query.class, Response.class), + new RpcMethodDefinition<>("getAccountNftInfos", Query.class, Response.class), + new RpcMethodDefinition<>("getTokenNftInfo", Query.class, Response.class), + new RpcMethodDefinition<>("getTokenNftInfos", Query.class, Response.class), + new RpcMethodDefinition<>("pauseToken", Transaction.class, TransactionResponse.class), + new RpcMethodDefinition<>("unpauseToken", Transaction.class, TransactionResponse.class)); + + private TokenServiceDefinition() { + // Forbid instantiation + } + + @Override + @NonNull + public String basePath() { + return "proto.TokenService"; + } + + @Override + @NonNull + public Set> methods() { + return methods; + } +} diff --git a/hedera-node/hedera-util-service/src/main/java/com/hedera/node/app/service/util/UtilService.java b/hedera-node/hedera-util-service/src/main/java/com/hedera/node/app/service/util/UtilService.java index a28638b4b932..caa5869e2495 100644 --- a/hedera-node/hedera-util-service/src/main/java/com/hedera/node/app/service/util/UtilService.java +++ b/hedera-node/hedera-util-service/src/main/java/com/hedera/node/app/service/util/UtilService.java @@ -18,8 +18,10 @@ import com.hedera.node.app.spi.Service; import com.hedera.node.app.spi.ServiceFactory; +import com.hedera.pbj.runtime.RpcServiceDefinition; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ServiceLoader; +import java.util.Set; /** * Implements the HAPI rpcDefinitions() { + return Set.of(UtilServiceDefinition.INSTANCE); + } + /** * Returns the concrete implementation instance of the service * diff --git a/hedera-node/hedera-util-service/src/main/java/com/hedera/node/app/service/util/UtilServiceDefinition.java b/hedera-node/hedera-util-service/src/main/java/com/hedera/node/app/service/util/UtilServiceDefinition.java new file mode 100644 index 000000000000..9e2fd0f56749 --- /dev/null +++ b/hedera-node/hedera-util-service/src/main/java/com/hedera/node/app/service/util/UtilServiceDefinition.java @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.util; + +import com.hedera.hapi.node.base.Transaction; +import com.hedera.hapi.node.transaction.TransactionResponse; +import com.hedera.pbj.runtime.RpcMethodDefinition; +import com.hedera.pbj.runtime.RpcServiceDefinition; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Set; + +/** + * Transactions and queries for the Util Service + */ +@SuppressWarnings("java:S6548") +public final class UtilServiceDefinition implements RpcServiceDefinition { + public static final UtilServiceDefinition INSTANCE = new UtilServiceDefinition(); + + private static final Set> methods = + Set.of(new RpcMethodDefinition<>("prng", Transaction.class, TransactionResponse.class)); + + private UtilServiceDefinition() { + // Forbid instantiation + } + + @Override + @NonNull + public String basePath() { + return "proto.UtilService"; + } + + @Override + @NonNull + public Set> methods() { + return methods; + } +} diff --git a/settings.gradle.kts b/settings.gradle.kts index 8f72c56a335a..23184035139a 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -150,7 +150,7 @@ dependencyResolutionManagement { version("com.google.common", "31.1-jre") version("com.google.protobuf", "3.19.4") version("com.google.protobuf.util", "3.19.2") - version("com.hedera.pbj.runtime", "0.6.1") + version("com.hedera.pbj.runtime", "0.7.0") version("com.sun.jna", "5.12.1") version("com.swirlds.base", swirldsVersion) version("com.swirlds.cli", swirldsVersion) From 43133b40ce5b0c6c38f67c8f9149abce4e46a681 Mon Sep 17 00:00:00 2001 From: Neeharika Sompalli <52669918+Neeharika-Sompalli@users.noreply.github.com> Date: Tue, 20 Jun 2023 16:02:19 -0500 Subject: [PATCH 60/70] `TokenUpdate` handle implementation (#7142) Signed-off-by: Neeharika-Sompalli --- .../mono/store/tokens/HederaTokenStore.java | 3 +- .../token/impl/handlers/BaseTokenHandler.java | 81 ++ .../impl/handlers/TokenUpdateHandler.java | 409 ++++++- .../token/impl/util/TokenHandlerHelper.java | 11 +- .../validators/TokenAttributesValidator.java | 4 +- .../impl/validators/TokenCreateValidator.java | 2 +- .../impl/validators/TokenUpdateValidator.java | 119 ++ .../token/impl/test/WritableNftStoreTest.java | 4 +- .../test/handlers/TokenMintHandlerTest.java | 6 +- .../TokenUpdateHandlerParityTest.java | 6 +- .../test/handlers/TokenUpdateHandlerTest.java | 1033 +++++++++++++++++ .../util/CryptoTokenHandlerTestBase.java | 12 +- .../TokenAttributesValidatorTest.java | 28 +- 13 files changed, 1666 insertions(+), 52 deletions(-) create mode 100644 hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenUpdateValidator.java create mode 100644 hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUpdateHandlerTest.java diff --git a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/tokens/HederaTokenStore.java b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/tokens/HederaTokenStore.java index f729ff183ea3..c287f92af108 100644 --- a/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/tokens/HederaTokenStore.java +++ b/hedera-node/hedera-mono-service/src/main/java/com/hedera/node/app/service/mono/store/tokens/HederaTokenStore.java @@ -668,7 +668,8 @@ public static boolean affectsExpiryAtMost(final TokenUpdateTransactionBody op) { && !op.hasAutoRenewAccount() && op.getSymbol().length() == 0 && op.getName().length() == 0 - && op.getAutoRenewPeriod().getSeconds() == 0; + && op.getAutoRenewPeriod().getSeconds() == 0 + && !op.hasMemo(); } private ResponseCodeEnum fullySanityChecked( diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java index 0fce3d4d91c1..a32dd5b99bec 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/BaseTokenHandler.java @@ -20,9 +20,14 @@ import static com.hedera.hapi.node.base.ResponseCodeEnum.INSUFFICIENT_TOKEN_BALANCE; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_MINT_AMOUNT; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TREASURY_ACCOUNT_FOR_TOKEN; +import static com.hedera.hapi.node.base.ResponseCodeEnum.MAX_ENTITIES_IN_PRICE_REGIME_HAVE_BEEN_CREATED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NO_REMAINING_AUTOMATIC_ASSOCIATIONS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_SUPPLY_KEY; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_MAX_SUPPLY_REACHED; import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.spi.workflows.HandleException.validateFalse; import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; import static java.util.Objects.requireNonNull; @@ -33,9 +38,13 @@ import com.hedera.hapi.node.state.token.Account; import com.hedera.hapi.node.state.token.Token; import com.hedera.hapi.node.state.token.TokenRelation; +import com.hedera.hapi.node.token.TokenUpdateTransactionBody; import com.hedera.node.app.service.token.impl.WritableAccountStore; import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; import com.hedera.node.app.service.token.impl.WritableTokenStore; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.config.data.EntitiesConfig; +import com.hedera.node.config.data.TokensConfig; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ArrayList; import java.util.List; @@ -284,7 +293,79 @@ private List createTokenRelsToAccount( return newTokenRels; } + /** + * Creates a new {@link TokenRelation} with the account and token. This is called when there is + * no association yet, but have open slots for maxAutoAssociations on the account. + * @param account the account to link the tokens to + * @param token the token to link to the account + * @param accountStore the account store + * @param tokenRelStore the token relation store + * @param context the handle context + */ + protected void autoAssociate( + @NonNull final Account account, + @NonNull final Token token, + @NonNull final WritableAccountStore accountStore, + @NonNull final WritableTokenRelationStore tokenRelStore, + @NonNull final HandleContext context) { + final var tokensConfig = context.configuration().getConfigData(TokensConfig.class); + final var entitiesConfig = context.configuration().getConfigData(EntitiesConfig.class); + + final var accountId = asAccount(account.accountNumber()); + final var tokenId = asToken(token.tokenNumber()); + // If token is already associated, no need to associate again + validateTrue(tokenRelStore.get(accountId, tokenId) == null, TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT); + validateTrue( + tokenRelStore.sizeOfState() + 1 < tokensConfig.maxAggregateRels(), + MAX_ENTITIES_IN_PRICE_REGIME_HAVE_BEEN_CREATED); + + // Check is number of used associations is less than maxAutoAssociations + final var numAssociations = account.numberAssociations(); + validateFalse( + entitiesConfig.limitTokenAssociations() && numAssociations >= tokensConfig.maxPerAccount(), + TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED); + + final var maxAutoAssociations = account.maxAutoAssociations(); + final var usedAutoAssociations = account.usedAutoAssociations(); + validateFalse(usedAutoAssociations >= maxAutoAssociations, NO_REMAINING_AUTOMATIC_ASSOCIATIONS); + + // Create new token relation and commit to store + final var newTokenRel = TokenRelation.newBuilder() + .tokenNumber(tokenId.tokenNum()) + .accountNumber(account.accountNumber()) + .automaticAssociation(true) + .kycGranted(!token.hasKycKey()) + .frozen(token.hasFreezeKey() && token.accountsFrozenByDefault()) + .previousToken(-1) + .nextToken(account.headTokenNumber()) + .build(); + + final var copyAccount = account.copyBuilder() + .numberAssociations(numAssociations + 1) + .usedAutoAssociations(usedAutoAssociations + 1) + .headTokenNumber(tokenId.tokenNum()) + .build(); + + accountStore.put(copyAccount); + tokenRelStore.put(newTokenRel); + } + /* ------------------------- Helper functions ------------------------- */ + + /** + * Returns true if the given token update op is an expiry-only update op. + * This is needed for validating whether a token update op has admin key present on the token, + * to update any other fields other than expiry. + * @param op the token update op to check + * @return true if the given token update op is an expiry-only update op + */ + public static boolean isExpiryOnlyUpdateOp(@NonNull final TokenUpdateTransactionBody op) { + final var defaultOp = TokenUpdateTransactionBody.DEFAULT; + final var copyDefaultWithExpiry = + defaultOp.copyBuilder().expiry(op.expiry()).token(op.token()).build(); + return op.equals(copyDefaultWithExpiry); + } + @NonNull public static TokenID asToken(final long num) { return TokenID.newBuilder().tokenNum(num).build(); diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenUpdateHandler.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenUpdateHandler.java index 2d411dc987ca..424ca9259d05 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenUpdateHandler.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/handlers/TokenUpdateHandler.java @@ -16,49 +16,78 @@ package com.hedera.node.app.service.token.impl.handlers; +import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_FROZEN_FOR_TOKEN; +import static com.hedera.hapi.node.base.ResponseCodeEnum.CURRENT_TREASURY_STILL_OWNS_NFTS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INSUFFICIENT_TOKEN_BALANCE; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ACCOUNT_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_AUTORENEW_ACCOUNT; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TREASURY_ACCOUNT_FOR_TOKEN; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_FEE_SCHEDULE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_FREEZE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_KYC_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_PAUSE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_SUPPLY_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_WIPE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_IMMUTABLE; +import static com.hedera.hapi.node.base.TokenType.FUNGIBLE_COMMON; +import static com.hedera.hapi.node.base.TokenType.NON_FUNGIBLE_UNIQUE; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.util.TokenHandlerHelper.getIfUsable; +import static com.hedera.node.app.service.token.impl.validators.TokenAttributesValidator.isKeyRemoval; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck; import static java.util.Objects.requireNonNull; -import com.hedera.hapi.node.base.HederaFunctionality; -import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.state.token.Account; +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.state.token.TokenRelation; +import com.hedera.hapi.node.token.TokenUpdateTransactionBody; +import com.hedera.hapi.node.transaction.TransactionBody; import com.hedera.node.app.service.token.ReadableTokenStore; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; +import com.hedera.node.app.service.token.impl.WritableTokenStore; +import com.hedera.node.app.service.token.impl.validators.TokenUpdateValidator; +import com.hedera.node.app.spi.validation.ExpiryMeta; import com.hedera.node.app.spi.workflows.HandleContext; import com.hedera.node.app.spi.workflows.HandleException; import com.hedera.node.app.spi.workflows.PreCheckException; import com.hedera.node.app.spi.workflows.PreHandleContext; import com.hedera.node.app.spi.workflows.TransactionHandler; +import com.hedera.node.config.data.TokensConfig; import edu.umd.cs.findbugs.annotations.NonNull; import javax.inject.Inject; import javax.inject.Singleton; /** - * This class contains all workflow-related functionality regarding {@link - * HederaFunctionality#TOKEN_UPDATE}. - * - *

NOTE: this class intentionally changes the following error response codes relative to - * SigRequirements: - * - *

    - *
  1. When a missing account is used as a token treasuryNum, fails with {@code INVALID_ACCOUNT_ID} - * rather than {@code ACCOUNT_ID_DOES_NOT_EXIST}. - *
- * - * * EET expectations may need to be updated accordingly + * Provides the state transition for token update. */ @Singleton -public class TokenUpdateHandler implements TransactionHandler { +public class TokenUpdateHandler extends BaseTokenHandler implements TransactionHandler { + private final TokenUpdateValidator tokenUpdateValidator; + @Inject - public TokenUpdateHandler() { - // Exists for injection + public TokenUpdateHandler(@NonNull final TokenUpdateValidator tokenUpdateValidator) { + this.tokenUpdateValidator = tokenUpdateValidator; + } + + @Override + public void pureChecks(@NonNull final TransactionBody txn) throws PreCheckException { + requireNonNull(txn); + final var op = txn.tokenUpdateOrThrow(); + validateTruePreCheck(op.hasToken(), INVALID_TOKEN_ID); } @Override public void preHandle(@NonNull final PreHandleContext context) throws PreCheckException { requireNonNull(context); final var op = context.body().tokenUpdateOrThrow(); - final var tokenId = op.tokenOrElse(TokenID.DEFAULT); + pureChecks(context.body()); + + final var tokenId = op.tokenOrThrow(); final var tokenStore = context.createStore(ReadableTokenStore.class); final var tokenMetadata = tokenStore.getTokenMeta(tokenId); @@ -79,6 +108,348 @@ public void preHandle(@NonNull final PreHandleContext context) throws PreCheckEx @Override public void handle(@NonNull final HandleContext context) throws HandleException { - throw new UnsupportedOperationException("Not implemented"); + requireNonNull(context); + final var txn = context.body(); + final var op = txn.tokenUpdateOrThrow(); + final var tokenId = op.tokenOrThrow(); + + // validate fields that involve config or state + final var validationResult = tokenUpdateValidator.validateSemantics(context, op); + // get the resolved expiry meta and token + final var token = validationResult.token(); + final var resolvedExpiry = validationResult.resolvedExpiryMeta(); + + final var accountStore = context.writableStore(WritableAccountStore.class); + final var tokenRelStore = context.writableStore(WritableTokenRelationStore.class); + final var tokenStore = context.writableStore(WritableTokenStore.class); + final var tokensConfig = context.configuration().getConfigData(TokensConfig.class); + + // If the operation has treasury change, then we need to check if the new treasury is valid + // and if the treasury is not already associated with the token, see if it has auto associations + // enabled and has open slots. If so, auto-associate. + // We allow existing treasuries to have any nft balances left over, but the new treasury should + // not have any balances left over. Transfer all balances for the current token to new treasury + if (op.hasTreasury()) { + final var existingTreasury = asAccount(token.treasuryAccountNumber()); + final var newTreasury = op.treasuryOrThrow(); + final var newTreasuryAccount = getIfUsable( + newTreasury, accountStore, context.expiryValidator(), INVALID_TREASURY_ACCOUNT_FOR_TOKEN); + final var newTreasuryRel = tokenRelStore.get(newTreasury, tokenId); + // If there is no treasury relationship, then we need to create one if auto associations are available. + // If not fail + if (newTreasuryRel == null) { + autoAssociate(newTreasuryAccount, token, accountStore, tokenRelStore, context); + } + // Treasury can be modified when it owns NFTs when the property "tokens.nfts.useTreasuryWildcards" + // is enabled. + if (!tokensConfig.nftsUseTreasuryWildcards() && token.tokenType().equals(NON_FUNGIBLE_UNIQUE)) { + final var existingTreasuryRel = tokenRelStore.get(existingTreasury, tokenId); + validateTrue(existingTreasuryRel != null, INVALID_TREASURY_ACCOUNT_FOR_TOKEN); + final var tokenRelBalance = existingTreasuryRel.balance(); + validateTrue(tokenRelBalance == 0, CURRENT_TREASURY_STILL_OWNS_NFTS); + } + + if (!newTreasury.equals(existingTreasury)) { + // TODO : Not sure why we are checking existing treasury account here + final var existingTreasuryAccount = getIfUsable( + existingTreasury, accountStore, context.expiryValidator(), INVALID_TREASURY_ACCOUNT_FOR_TOKEN); + updateTreasuryTitles(existingTreasuryAccount, newTreasuryAccount, token, accountStore, tokenRelStore); + // If the token is fungible, transfer fungible balance to new treasury + // If it is non-fungible token transfer the ownership of the NFTs from old treasury to new treasury + transferTokensToNewTreasury(existingTreasury, newTreasury, token, tokenRelStore, accountStore); + } + } + + final var tokenBuilder = customizeToken(token, resolvedExpiry, op); + tokenStore.put(tokenBuilder.build()); + } + + /** + * Transfer tokens from old treasury to new treasury if the token is fungible. If the token is non-fungible, + * transfer the ownership of the NFTs from old treasury to new treasury + * @param oldTreasury old treasury account + * @param newTreasury new treasury account + * @param token token + * @param tokenRelStore token relationship store + * @param accountStore account store + */ + private void transferTokensToNewTreasury( + final AccountID oldTreasury, + final AccountID newTreasury, + final Token token, + final WritableTokenRelationStore tokenRelStore, + final WritableAccountStore accountStore) { + final var tokenId = asToken(token.tokenNumber()); + // Validate both accounts are not frozen and have the right keys + final var oldTreasuryRel = getIfUsable(oldTreasury, tokenId, tokenRelStore); + final var newTreasuryRel = getIfUsable(newTreasury, tokenId, tokenRelStore); + if (oldTreasuryRel.balance() > 0) { + validateFrozenAndKey(oldTreasuryRel); + validateFrozenAndKey(newTreasuryRel); + + if (token.tokenType().equals(FUNGIBLE_COMMON)) { + // Transfers fungible balances and updates account's numOfPositiveBalances + // and puts to modifications on state. + transferFungibleTokensToTreasury(oldTreasuryRel, newTreasuryRel, tokenRelStore, accountStore); + } else { + // Transfers NFT ownerships and updates account's numOwnedNfts and + // tokenRelation's balance and puts to modifications on state. + changeOwnerToNewTreasury(oldTreasuryRel, newTreasuryRel, tokenRelStore, accountStore); + } + } + } + /** + * Transfer fungible tokens from old treasury to new treasury. + * NOTE: This updates account's numOfPositiveBalances and puts to modifications on state. + * @param fromTreasuryRel old treasury relationship + * @param toTreasuryRel new treasury relationship + * @param tokenRelStore token relationship store + * @param accountStore account store + */ + private void transferFungibleTokensToTreasury( + final TokenRelation fromTreasuryRel, + final TokenRelation toTreasuryRel, + final WritableTokenRelationStore tokenRelStore, + final WritableAccountStore accountStore) { + final var adjustment = fromTreasuryRel.balance(); + + final var fromTreasury = accountStore.getAccountById(asAccount(fromTreasuryRel.accountNumber())); + final var toTreasury = accountStore.getAccountById(asAccount(toTreasuryRel.accountNumber())); + + adjustBalance(fromTreasuryRel, fromTreasury, -adjustment, tokenRelStore, accountStore); + adjustBalance(toTreasuryRel, toTreasury, adjustment, tokenRelStore, accountStore); + // TODO: If any of the above fail, need to rollback only token transfer balances for record. + // Not sure how it will be done yet + } + + /** + * Adjust fungible balances for the given token relationship and account by the given adjustment. + * NOTE: This updates account's numOfPositiveBalances and puts to modifications on state. + * @param tokenRel token relationship + * @param account account to be adjusted + * @param adjustment adjustment to be made + * @param tokenRelStore token relationship store + * @param accountStore account store + */ + private void adjustBalance( + final TokenRelation tokenRel, + final Account account, + final long adjustment, + final WritableTokenRelationStore tokenRelStore, + final WritableAccountStore accountStore) { + final var originalBalance = tokenRel.balance(); + final var newBalance = originalBalance + adjustment; + validateTrue(newBalance >= 0, INSUFFICIENT_TOKEN_BALANCE); + + final var copyRel = tokenRel.copyBuilder(); + tokenRelStore.put(copyRel.balance(newBalance).build()); + + var numPositiveBalances = account.numberPositiveBalances(); + // If the original balance is zero, then the receiving account's numPositiveBalances has to + // be increased + // and if the newBalance is zero, then the sending account's numPositiveBalances has to be + // decreased + if (newBalance == 0 && adjustment < 0) { + numPositiveBalances--; + } else if (originalBalance == 0 && adjustment > 0) { + numPositiveBalances++; + } + final var copyAccount = account.copyBuilder(); + accountStore.put(copyAccount.numberPositiveBalances(numPositiveBalances).build()); + // TODO: Need to track units change in record in finalize method for this + } + + /** + * Change the ownership of the NFTs from old treasury to new treasury. + * NOTE: This updates account's numOwnedNfts and tokenRelation's balance and puts to modifications on state. + * @param fromTreasuryRel old treasury relationship + * @param toTreasuryRel new treasury relationship + * @param tokenRelStore token relationship store + * @param accountStore account store + */ + private void changeOwnerToNewTreasury( + final TokenRelation fromTreasuryRel, + final TokenRelation toTreasuryRel, + final WritableTokenRelationStore tokenRelStore, + final WritableAccountStore accountStore) { + final var fromTreasury = accountStore.getAccountById(asAccount(fromTreasuryRel.accountNumber())); + final var toTreasury = accountStore.getAccountById(asAccount(toTreasuryRel.accountNumber())); + + final var fromRelBalance = fromTreasuryRel.balance(); + final var toRelBalance = toTreasuryRel.balance(); + + final var fromNftsOwned = fromTreasury.numberOwnedNfts(); + final var toNftsWOwned = toTreasury.numberOwnedNfts(); + + final var fromTreasuryCopy = fromTreasury.copyBuilder(); + final var toTreasuryCopy = toTreasury.copyBuilder(); + final var fromRelCopy = fromTreasuryRel.copyBuilder(); + final var toRelCopy = toTreasuryRel.copyBuilder(); + + accountStore.put( + fromTreasuryCopy.numberOwnedNfts(fromNftsOwned - fromRelBalance).build()); + accountStore.put( + toTreasuryCopy.numberOwnedNfts(toNftsWOwned + fromRelBalance).build()); + tokenRelStore.put(fromRelCopy.balance(0).build()); + tokenRelStore.put(toRelCopy.balance(toRelBalance + fromRelBalance).build()); + // TODO : Need to build record transfer list for this case. Not needed for this PR. + // Need to do in finalize + } + + /** + * Validate both KYC is granted and token is not frozen on the token. + * @param tokenRel token relationship + */ + private void validateFrozenAndKey(final TokenRelation tokenRel) { + validateTrue(!tokenRel.frozen(), ACCOUNT_FROZEN_FOR_TOKEN); + validateTrue(tokenRel.kycGranted(), TOKEN_HAS_NO_KYC_KEY); + } + + /** + * Build a Token based on the given token update transaction body. + * @param token token to be updated + * @param resolvedExpiry resolved expiry + * @param op token update transaction body + * @return updated token builder + */ + private Token.Builder customizeToken( + @NonNull final Token token, + @NonNull final ExpiryMeta resolvedExpiry, + @NonNull final TokenUpdateTransactionBody op) { + final var copyToken = token.copyBuilder(); + // All these keys are validated in validateSemantics + // If these keys did not exist on the token already, they can't be changed on update + updateKeys(op, token, copyToken); + updateExpiryFields(op, resolvedExpiry, copyToken); + updateNameSymbolMemoAndTreasury(op, copyToken, token); + return copyToken; + } + + /** + * Updates token name, token symbol, token memo and token treasury if they are present in the + * token update transaction body. + * @param op token update transaction body + * @param builder token builder + * @param originalToken original token + */ + private void updateNameSymbolMemoAndTreasury( + final TokenUpdateTransactionBody op, final Token.Builder builder, final Token originalToken) { + if (op.symbol() != null && op.symbol().length() > 0) { + builder.symbol(op.symbol()); + } + if (op.name() != null && op.name().length() > 0) { + builder.name(op.name()); + } + if (op.hasMemo() && op.memo().length() > 0) { + builder.memo(op.memo()); + } + if (op.hasTreasury() && op.treasuryOrThrow().accountNum() != originalToken.treasuryAccountNumber()) { + builder.treasuryAccountNumber(op.treasuryOrThrow().accountNum()); + } + } + + /** + * Updates expiry fields of the token if they are present in the token update transaction body. + * @param op token update transaction body + * @param resolvedExpiry resolved expiry + * @param builder token builder + */ + private void updateExpiryFields( + final TokenUpdateTransactionBody op, final ExpiryMeta resolvedExpiry, final Token.Builder builder) { + if (op.hasExpiry()) { + builder.expiry(resolvedExpiry.expiry()); + } + if (op.hasAutoRenewPeriod()) { + builder.autoRenewSecs(resolvedExpiry.autoRenewPeriod()); + } + if (op.hasAutoRenewAccount()) { + builder.autoRenewAccountNumber(resolvedExpiry.autoRenewNum()); + } + } + + /** + * Updates keys of the token if they are present in the token update transaction body. + * All keys can be updates only if they had already existed on the token. + * These keys can't be updated if they were not added during creation. + * @param op token update transaction body + * @param originalToken original token + * @param builder token builder + */ + private void updateKeys( + final TokenUpdateTransactionBody op, final Token originalToken, final Token.Builder builder) { + if (op.hasKycKey()) { + validateTrue(originalToken.hasKycKey(), TOKEN_HAS_NO_KYC_KEY); + builder.kycKey(op.kycKey()); + } + if (op.hasFreezeKey()) { + validateTrue(originalToken.hasFreezeKey(), TOKEN_HAS_NO_FREEZE_KEY); + builder.freezeKey(op.freezeKey()); + } + if (op.hasWipeKey()) { + validateTrue(originalToken.hasWipeKey(), TOKEN_HAS_NO_WIPE_KEY); + builder.wipeKey(op.wipeKey()); + } + if (op.hasSupplyKey()) { + validateTrue(originalToken.hasSupplyKey(), TOKEN_HAS_NO_SUPPLY_KEY); + builder.supplyKey(op.supplyKey()); + } + if (op.hasFeeScheduleKey()) { + validateTrue(originalToken.hasFeeScheduleKey(), TOKEN_HAS_NO_FEE_SCHEDULE_KEY); + builder.feeScheduleKey(op.feeScheduleKey()); + } + if (op.hasPauseKey()) { + validateTrue(originalToken.hasPauseKey(), TOKEN_HAS_NO_PAUSE_KEY); + builder.pauseKey(op.pauseKey()); + } + if (!isExpiryOnlyUpdateOp(op)) { + validateTrue(originalToken.hasAdminKey(), TOKEN_IS_IMMUTABLE); + } + if (op.hasAdminKey()) { + final var newAdminKey = op.adminKey(); + if (isKeyRemoval(newAdminKey)) { + builder.adminKey((Key) null); + } else { + builder.adminKey(newAdminKey); + } + } + } + + /** + * If there is a change in treasury account, update the treasury titles of the old and + * new treasury accounts. + * NOTE : This updated the numberTreasuryTitles on old and new treasury accounts. + * And also updates new treasury relationship to not be frozen + * @param existingTreasuryAccount existing treasury account + * @param newTreasuryAccount new treasury account + * @param originalToken original token + * @param accountStore account store + * @param tokenRelStore token relation store + */ + private void updateTreasuryTitles( + @NonNull final Account existingTreasuryAccount, + @NonNull final Account newTreasuryAccount, + @NonNull final Token originalToken, + @NonNull final WritableAccountStore accountStore, + @NonNull final WritableTokenRelationStore tokenRelStore) { + final var newTokenRelation = + tokenRelStore.get(asAccount(newTreasuryAccount.accountNumber()), asToken(originalToken.tokenNumber())); + final var newRelCopy = newTokenRelation.copyBuilder(); + + if (originalToken.hasFreezeKey()) { + newRelCopy.frozen(false); + } + if (originalToken.hasKycKey()) { + newRelCopy.kycGranted(true); + } + + final var existingTreasuryTitles = existingTreasuryAccount.numberTreasuryTitles(); + final var newTreasuryAccountTitles = newTreasuryAccount.numberTreasuryTitles(); + final var copyOldTreasury = + existingTreasuryAccount.copyBuilder().numberTreasuryTitles(existingTreasuryTitles - 1); + final var copyNewTreasury = newTreasuryAccount.copyBuilder().numberTreasuryTitles(newTreasuryAccountTitles + 1); + + accountStore.put(copyOldTreasury.build()); + accountStore.put(copyNewTreasury.build()); + tokenRelStore.put(newRelCopy.build()); } } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java index 09726185f243..a851a0b7efc7 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/util/TokenHandlerHelper.java @@ -33,6 +33,7 @@ */ import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_DELETED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.CONTRACT_DELETED; import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; import static com.hedera.hapi.node.base.ResponseCodeEnum.OK; import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_PAUSED; @@ -89,9 +90,13 @@ public static Account getIfUsable( final var acct = accountStore.getAccountById(accountId); validateTrue(acct != null, errorIfNotUsable); - validateFalse(acct.deleted(), ACCOUNT_DELETED); - final var expiryStatus = expiryValidator.expirationStatus( - EntityType.ACCOUNT, acct.expiredAndPendingRemoval(), acct.tinybarBalance()); + final var isContract = acct.smartContract(); + + validateFalse(acct.deleted(), isContract ? CONTRACT_DELETED : ACCOUNT_DELETED); + final var type = isContract ? EntityType.CONTRACT : EntityType.ACCOUNT; + + final var expiryStatus = + expiryValidator.expirationStatus(type, acct.expiredAndPendingRemoval(), acct.tinybarBalance()); validateTrue(expiryStatus == OK, expiryStatus); return acct; diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenAttributesValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenAttributesValidator.java index 465a133ff6a7..4cdbcdacb62b 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenAttributesValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenAttributesValidator.java @@ -110,7 +110,7 @@ private void tokenStringCheck( * @param hasPauseKey whether the token has a pause key * @param pauseKey the token pause key to validate */ - public void checkKeys( + public void validateTokenKeys( final boolean hasAdminKey, @Nullable final Key adminKey, final boolean hasKycKey, @@ -153,7 +153,7 @@ public void checkKeys( * @param source the key to check * @return true if the key is a key removal, false otherwise */ - private static boolean isKeyRemoval(@NonNull final Key source) { + public static boolean isKeyRemoval(@NonNull final Key source) { requireNonNull(source); return IMMUTABILITY_SENTINEL_KEY.equals(source); } diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java index a8335bdcbf12..affd6d83a282 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenCreateValidator.java @@ -122,7 +122,7 @@ public void validate( tokenAttributesValidator.validateTokenSymbol(op.symbol(), config); tokenAttributesValidator.validateTokenName(op.name(), config); - tokenAttributesValidator.checkKeys( + tokenAttributesValidator.validateTokenKeys( op.hasAdminKey(), op.adminKey(), op.hasKycKey(), op.kycKey(), op.hasWipeKey(), op.wipeKey(), diff --git a/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenUpdateValidator.java b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenUpdateValidator.java new file mode 100644 index 000000000000..b3ee80811549 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/main/java/com/hedera/node/app/service/token/impl/validators/TokenUpdateValidator.java @@ -0,0 +1,119 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.validators; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_AUTORENEW_ACCOUNT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_IMMUTABLE; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.util.TokenHandlerHelper.getIfUsable; +import static com.hedera.node.app.spi.key.KeyUtils.isEmpty; +import static com.hedera.node.app.spi.validation.ExpiryMeta.NA; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; + +import com.hedera.hapi.node.state.token.Token; +import com.hedera.hapi.node.token.TokenUpdateTransactionBody; +import com.hedera.node.app.service.token.ReadableAccountStore; +import com.hedera.node.app.service.token.ReadableTokenStore; +import com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler; +import com.hedera.node.app.spi.validation.ExpiryMeta; +import com.hedera.node.app.spi.validation.ExpiryValidator; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.config.data.TokensConfig; +import edu.umd.cs.findbugs.annotations.NonNull; +import javax.inject.Inject; + +public class TokenUpdateValidator { + private final TokenAttributesValidator validator; + + @Inject + public TokenUpdateValidator(@NonNull final TokenAttributesValidator validator) { + this.validator = validator; + } + + public record ValidationResult(@NonNull Token token, @NonNull ExpiryMeta resolvedExpiryMeta) {} + + @NonNull + public ValidationResult validateSemantics( + @NonNull final HandleContext context, @NonNull final TokenUpdateTransactionBody op) { + final var readableAccountStore = context.readableStore(ReadableAccountStore.class); + final var tokenStore = context.readableStore(ReadableTokenStore.class); + final var tokenId = op.tokenOrThrow(); + final var token = getIfUsable(tokenId, tokenStore); + final var tokensConfig = context.configuration().getConfigData(TokensConfig.class); + // If the token has an empty admin key it can't be updated + if (isEmpty(token.adminKey())) { + validateTrue(BaseTokenHandler.isExpiryOnlyUpdateOp(op), TOKEN_IS_IMMUTABLE); + } + // validate memo + if (op.hasMemo()) { + context.attributeValidator().validateMemo(op.memo()); + } + // validate token symbol, if being changed + if (op.symbol() != null && !op.symbol().isEmpty()) { + validator.validateTokenSymbol(op.symbol(), tokensConfig); + } + // validate token name, if being changed + if (op.name() != null && !op.name().isEmpty()) { + validator.validateTokenName(op.name(), tokensConfig); + } + // validate token keys, if any being changed + validator.validateTokenKeys( + op.hasAdminKey(), op.adminKey(), + op.hasKycKey(), op.kycKey(), + op.hasWipeKey(), op.wipeKey(), + op.hasSupplyKey(), op.supplyKey(), + op.hasFreezeKey(), op.freezeKey(), + op.hasFeeScheduleKey(), op.feeScheduleKey(), + op.hasPauseKey(), op.pauseKey()); + + final var resolvedExpiryMeta = resolveExpiry(token, op, context.expiryValidator()); + validateNewAndExistingAutoRenewAccount( + resolvedExpiryMeta.autoRenewNum(), + token.autoRenewAccountNumber(), + readableAccountStore, + context.expiryValidator()); + return new ValidationResult(token, resolvedExpiryMeta); + } + + private void validateNewAndExistingAutoRenewAccount( + final long resolvedAutoRenewNum, + final long existingAutoRenewNum, + final ReadableAccountStore readableAccountStore, + final ExpiryValidator expiryValidator) { + // Get resolved auto-renewal account + getIfUsable(asAccount(resolvedAutoRenewNum), readableAccountStore, expiryValidator, INVALID_AUTORENEW_ACCOUNT); + // If token has an existing auto-renewal account, validate its expiration + // FUTURE : Not sure why we should validate existing auto-renew account. Retained as in mono-service + if (existingAutoRenewNum != 0) { + getIfUsable( + asAccount(existingAutoRenewNum), readableAccountStore, expiryValidator, INVALID_AUTORENEW_ACCOUNT); + } + } + + private ExpiryMeta resolveExpiry( + @NonNull final Token token, + @NonNull final TokenUpdateTransactionBody op, + @NonNull final ExpiryValidator expiryValidator) { + final var givenExpiryMeta = + new ExpiryMeta(token.expiry(), token.autoRenewSecs(), token.autoRenewAccountNumber()); + final var updateExpiryMeta = new ExpiryMeta( + op.hasExpiry() ? op.expiryOrThrow().seconds() : NA, + op.hasAutoRenewPeriod() ? op.autoRenewPeriodOrThrow().seconds() : NA, + op.hasAutoRenewAccount() ? op.autoRenewAccountOrThrow().accountNum() : NA); + return expiryValidator.resolveUpdateAttempt(givenExpiryMeta, updateExpiryMeta); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableNftStoreTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableNftStoreTest.java index 7efcaa0ba553..943ae9d453ff 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableNftStoreTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/WritableNftStoreTest.java @@ -16,6 +16,7 @@ package com.hedera.node.app.service.token.impl.test; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.BDDMockito.given; @@ -23,7 +24,6 @@ import com.hedera.hapi.node.state.common.UniqueTokenId; import com.hedera.hapi.node.state.token.Nft; import com.hedera.node.app.service.token.impl.WritableNftStore; -import com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler; import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoTokenHandlerTestBase; import java.util.Collections; import java.util.Set; @@ -147,7 +147,7 @@ void removesByTokenIdAndSerialNum() { writableNftStore = new WritableNftStore(writableStates); assertNotNull(writableNftStore.get(nftToRemove)); - writableNftStore.remove(BaseTokenHandler.asToken(nftToRemove.tokenTypeNumber()), nftToRemove.serialNumber()); + writableNftStore.remove(asToken(nftToRemove.tokenTypeNumber()), nftToRemove.serialNumber()); // Assert the NFT is removed assertNull(writableNftStore.get(nftToRemove)); diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerTest.java index 394c30321e31..76b6ca9fa6d9 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenMintHandlerTest.java @@ -108,7 +108,7 @@ void acceptsValidNonFungibleTokenMintTxn() { givenMintTxn(nonFungibleTokenId, List.of(metadata1, metadata2), null); assertThat(writableTokenRelStore.get(treasuryId, nonFungibleTokenId).balance()) - .isEqualTo(1000L); + .isEqualTo(1L); assertThat(writableAccountStore.get(treasuryId).tinybarBalance()).isEqualTo(10000L); assertThat(writableAccountStore.get(treasuryId).numberOwnedNfts()).isEqualTo(2); assertThat(writableTokenStore.get(nonFungibleTokenId).totalSupply()).isEqualTo(1000L); @@ -118,7 +118,7 @@ void acceptsValidNonFungibleTokenMintTxn() { // treasury relation balance will increase by metadata list size assertThat(writableTokenRelStore.get(treasuryId, nonFungibleTokenId).balance()) - .isEqualTo(1002L); + .isEqualTo(3L); // tinybar balance should not get affected assertThat(writableAccountStore.get(treasuryId).tinybarBalance()).isEqualTo(10000L); @@ -126,7 +126,7 @@ void acceptsValidNonFungibleTokenMintTxn() { assertThat(writableAccountStore.get(treasuryId).numberOwnedNfts()).isEqualTo(4); // treasury relation supply will not increase since its not fungible token change assertThat(writableTokenStore.get(nonFungibleTokenId).totalSupply()).isEqualTo(1000L); - assertThat(recordBuilder.serialNumbers()).isEqualTo(List.of(1L, 2L)); + assertThat(recordBuilder.serialNumbers()).isEqualTo(List.of(3L, 4L)); } @Test diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUpdateHandlerParityTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUpdateHandlerParityTest.java index a3e00a66203c..47c21fd5710a 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUpdateHandlerParityTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUpdateHandlerParityTest.java @@ -50,17 +50,21 @@ import com.hedera.node.app.service.token.ReadableTokenStore; import com.hedera.node.app.service.token.impl.handlers.TokenUpdateHandler; import com.hedera.node.app.service.token.impl.test.handlers.util.ParityTestBase; +import com.hedera.node.app.service.token.impl.validators.TokenAttributesValidator; +import com.hedera.node.app.service.token.impl.validators.TokenUpdateValidator; import com.hedera.node.app.spi.fixtures.workflows.FakePreHandleContext; import com.hedera.node.app.spi.workflows.PreCheckException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; class TokenUpdateHandlerParityTest extends ParityTestBase { - private final TokenUpdateHandler subject = new TokenUpdateHandler(); + private TokenUpdateHandler subject; @BeforeEach public void setUp() { super.setUp(); + final var validator = new TokenUpdateValidator(new TokenAttributesValidator()); + subject = new TokenUpdateHandler(validator); } @Test diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUpdateHandlerTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUpdateHandlerTest.java new file mode 100644 index 000000000000..61516d965a29 --- /dev/null +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/TokenUpdateHandlerTest.java @@ -0,0 +1,1033 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.token.impl.test.handlers; + +import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_DELETED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_EXPIRED_AND_PENDING_REMOVAL; +import static com.hedera.hapi.node.base.ResponseCodeEnum.ACCOUNT_FROZEN_FOR_TOKEN; +import static com.hedera.hapi.node.base.ResponseCodeEnum.CURRENT_TREASURY_STILL_OWNS_NFTS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.EXPIRATION_REDUCTION_NOT_ALLOWED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ADMIN_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_AUTORENEW_ACCOUNT; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_CUSTOM_FEE_SCHEDULE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_EXPIRATION_TIME; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_PAUSE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_SUPPLY_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TOKEN_ID; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_TREASURY_ACCOUNT_FOR_TOKEN; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_WIPE_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.INVALID_ZERO_BYTE_IN_STRING; +import static com.hedera.hapi.node.base.ResponseCodeEnum.MEMO_TOO_LONG; +import static com.hedera.hapi.node.base.ResponseCodeEnum.NO_REMAINING_AUTOMATIC_ASSOCIATIONS; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_HAS_NO_KYC_KEY; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_IMMUTABLE; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_IS_PAUSED; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_NAME_TOO_LONG; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_SYMBOL_TOO_LONG; +import static com.hedera.hapi.node.base.ResponseCodeEnum.TOKEN_WAS_DELETED; +import static com.hedera.hapi.node.base.TokenType.FUNGIBLE_COMMON; +import static com.hedera.hapi.node.base.TokenType.NON_FUNGIBLE_UNIQUE; +import static com.hedera.node.app.service.mono.context.properties.PropertyNames.ENTITIES_MAX_LIFETIME; +import static com.hedera.node.app.service.token.impl.handlers.BaseCryptoHandler.asAccount; +import static com.hedera.node.app.service.token.impl.handlers.BaseTokenHandler.asToken; +import static com.hedera.node.app.spi.fixtures.workflows.ExceptionConditions.responseCode; +import static com.hedera.node.app.spi.workflows.HandleException.validateTrue; +import static com.hedera.test.utils.KeyUtils.B_COMPLEX_KEY; +import static org.assertj.core.api.Assertions.assertThatNoException; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mock.Strictness.LENIENT; + +import com.hedera.hapi.node.base.AccountID; +import com.hedera.hapi.node.base.Duration; +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.Timestamp; +import com.hedera.hapi.node.base.TokenID; +import com.hedera.hapi.node.base.TransactionID; +import com.hedera.hapi.node.state.token.TokenRelation; +import com.hedera.hapi.node.token.TokenUpdateTransactionBody; +import com.hedera.hapi.node.transaction.TransactionBody; +import com.hedera.node.app.config.VersionedConfigImpl; +import com.hedera.node.app.service.mono.config.HederaNumbers; +import com.hedera.node.app.service.mono.context.properties.GlobalDynamicProperties; +import com.hedera.node.app.service.mono.context.properties.PropertySource; +import com.hedera.node.app.service.token.ReadableAccountStore; +import com.hedera.node.app.service.token.ReadableTokenRelationStore; +import com.hedera.node.app.service.token.ReadableTokenStore; +import com.hedera.node.app.service.token.impl.WritableAccountStore; +import com.hedera.node.app.service.token.impl.WritableTokenRelationStore; +import com.hedera.node.app.service.token.impl.WritableTokenStore; +import com.hedera.node.app.service.token.impl.handlers.TokenUpdateHandler; +import com.hedera.node.app.service.token.impl.test.handlers.util.CryptoTokenHandlerTestBase; +import com.hedera.node.app.service.token.impl.validators.TokenAttributesValidator; +import com.hedera.node.app.service.token.impl.validators.TokenUpdateValidator; +import com.hedera.node.app.spi.validation.AttributeValidator; +import com.hedera.node.app.spi.validation.ExpiryValidator; +import com.hedera.node.app.spi.workflows.HandleContext; +import com.hedera.node.app.spi.workflows.HandleException; +import com.hedera.node.app.workflows.handle.validation.StandardizedAttributeValidator; +import com.hedera.node.app.workflows.handle.validation.StandardizedExpiryValidator; +import com.hedera.node.config.ConfigProvider; +import com.hedera.node.config.testfixtures.HederaTestConfigBuilder; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class TokenUpdateHandlerTest extends CryptoTokenHandlerTestBase { + @Mock(strictness = LENIENT) + private HandleContext handleContext; + + @Mock(strictness = LENIENT) + private ConfigProvider configProvider; + + @Mock(strictness = LENIENT) + private PropertySource compositeProps; + + @Mock(strictness = LENIENT) + private HederaNumbers hederaNumbers; + + @Mock(strictness = LENIENT) + private GlobalDynamicProperties dynamicProperties; + + private TransactionBody txn; + private ExpiryValidator expiryValidator; + private AttributeValidator attributeValidator; + private TokenUpdateHandler subject; + + @BeforeEach + public void setUp() { + super.setUp(); + refreshWritableStores(); + final TokenUpdateValidator validator = new TokenUpdateValidator(new TokenAttributesValidator()); + subject = new TokenUpdateHandler(validator); + givenStoresAndConfig(handleContext); + setUpTxnContext(); + } + + @Test + void happyPathForFungibleTokenUpdate() { + txn = new TokenUpdateBuilder().build(); + given(handleContext.body()).willReturn(txn); + + final var token = readableTokenStore.get(fungibleTokenId); + assertThat(token.symbol()).isEqualTo(fungibleToken.symbol()); + assertThat(token.name()).isEqualTo(fungibleToken.name()); + assertThat(token.treasuryAccountNumber()).isEqualTo(fungibleToken.treasuryAccountNumber()); + assertThat(token.adminKey()).isEqualTo(fungibleToken.adminKey()); + assertThat(token.supplyKey()).isEqualTo(fungibleToken.supplyKey()); + assertThat(token.kycKey()).isEqualTo(fungibleToken.kycKey()); + assertThat(token.freezeKey()).isEqualTo(fungibleToken.freezeKey()); + assertThat(token.wipeKey()).isEqualTo(fungibleToken.wipeKey()); + assertThat(token.feeScheduleKey()).isEqualTo(fungibleToken.feeScheduleKey()); + assertThat(token.pauseKey()).isEqualTo(fungibleToken.pauseKey()); + assertThat(token.autoRenewAccountNumber()).isEqualTo(fungibleToken.autoRenewAccountNumber()); + assertThat(token.expiry()).isEqualTo(fungibleToken.expiry()); + assertThat(token.memo()).isEqualTo(fungibleToken.memo()); + assertThat(token.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); + assertThat(token.tokenType()).isEqualTo(FUNGIBLE_COMMON); + + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + + final var modifiedToken = writableTokenStore.get(fungibleTokenId); + assertThat(modifiedToken.symbol()).isEqualTo("TTT"); + assertThat(modifiedToken.name()).isEqualTo("TestToken1"); + assertThat(modifiedToken.treasuryAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.adminKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.supplyKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.kycKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.freezeKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.wipeKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.feeScheduleKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.pauseKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.autoRenewAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.expiry()).isEqualTo(1234600L); + assertThat(modifiedToken.memo()).isEqualTo("test token1"); + assertThat(modifiedToken.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); + assertThat(token.tokenType()).isEqualTo(FUNGIBLE_COMMON); + } + + @Test + void happyPathForNonFungibleTokenUpdate() { + txn = new TokenUpdateBuilder().build(); + given(handleContext.body()).willReturn(txn); + + final var token = readableTokenStore.get(nonFungibleTokenId); + assertThat(token.symbol()).isEqualTo(nonFungibleToken.symbol()); + assertThat(token.name()).isEqualTo(nonFungibleToken.name()); + assertThat(token.treasuryAccountNumber()).isEqualTo(nonFungibleToken.treasuryAccountNumber()); + assertThat(token.adminKey()).isEqualTo(nonFungibleToken.adminKey()); + assertThat(token.supplyKey()).isEqualTo(nonFungibleToken.supplyKey()); + assertThat(token.kycKey()).isEqualTo(nonFungibleToken.kycKey()); + assertThat(token.freezeKey()).isEqualTo(nonFungibleToken.freezeKey()); + assertThat(token.wipeKey()).isEqualTo(nonFungibleToken.wipeKey()); + assertThat(token.feeScheduleKey()).isEqualTo(nonFungibleToken.feeScheduleKey()); + assertThat(token.pauseKey()).isEqualTo(nonFungibleToken.pauseKey()); + assertThat(token.autoRenewAccountNumber()).isEqualTo(nonFungibleToken.autoRenewAccountNumber()); + assertThat(token.expiry()).isEqualTo(nonFungibleToken.expiry()); + assertThat(token.memo()).isEqualTo(nonFungibleToken.memo()); + assertThat(token.autoRenewSecs()).isEqualTo(nonFungibleToken.autoRenewSecs()); + assertThat(token.tokenType()).isEqualTo(NON_FUNGIBLE_UNIQUE); + + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + + final var modifiedToken = writableTokenStore.get(fungibleTokenId); + assertThat(modifiedToken.symbol()).isEqualTo("TTT"); + assertThat(modifiedToken.name()).isEqualTo("TestToken1"); + assertThat(modifiedToken.treasuryAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.adminKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.supplyKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.kycKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.freezeKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.wipeKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.feeScheduleKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.pauseKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.autoRenewAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.expiry()).isEqualTo(1234600L); + assertThat(modifiedToken.memo()).isEqualTo("test token1"); + assertThat(modifiedToken.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); + assertThat(token.tokenType()).isEqualTo(NON_FUNGIBLE_UNIQUE); + } + + @Test + void invalidTokenFails() { + txn = new TokenUpdateBuilder().withToken(asToken(1000)).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_TOKEN_ID)); + } + + @Test + void failsIfTokenImmutable() { + final var copyToken = writableTokenStore + .get(fungibleTokenId) + .copyBuilder() + .adminKey((Key) null) + .build(); + writableTokenStore.put(copyToken); + given(handleContext.readableStore(ReadableTokenStore.class)).willReturn(writableTokenStore); + txn = new TokenUpdateBuilder().build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_IS_IMMUTABLE)); + } + + @Test + void failsIfTokenHasNoKycGrantedImmutable() { + final var copyTokenRel = writableTokenRelStore + .get(treasuryId, fungibleTokenId) + .copyBuilder() + .kycGranted(false) + .build(); + writableTokenRelStore.put(copyTokenRel); + given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); + txn = new TokenUpdateBuilder().build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_HAS_NO_KYC_KEY)); + } + + @Test + void failsIfTokenRelIsFrozen() { + final var copyTokenRel = writableTokenRelStore + .get(treasuryId, fungibleTokenId) + .copyBuilder() + .frozen(true) + .build(); + writableTokenRelStore.put(copyTokenRel); + given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); + txn = new TokenUpdateBuilder().build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(ACCOUNT_FROZEN_FOR_TOKEN)); + } + + @Test + void failsIfMemoTooLong() { + txn = new TokenUpdateBuilder() + .withMemo("12345678904634634563436462343254534e5365453435452454524541534353665324545435") + .build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(MEMO_TOO_LONG)); + } + + @Test + void failsIfMemoHasZeroByte() { + given(dynamicProperties.maxMemoUtf8Bytes()).willReturn(100); + txn = new TokenUpdateBuilder().withMemo("\0").build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_ZERO_BYTE_IN_STRING)); + } + + @Test + void doesntFailForZeroLengthSymbolUpdate() { + txn = new TokenUpdateBuilder().withSymbol("").build(); + given(handleContext.body()).willReturn(txn); + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + } + + @Test + void doesntFailForNullSymbol() { + setUpTxnContext(); + txn = new TokenUpdateBuilder().withSymbol(null).build(); + given(handleContext.body()).willReturn(txn); + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + } + + @Test + void failsForVeryLongSymbol() { + setUpTxnContext(); + txn = new TokenUpdateBuilder() + .withSymbol("1234567890123456789012345678901234567890123456789012345678901234567890") + .build(); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.maxSymbolUtf8Bytes", "10") + .getOrCreateConfig(); + given(handleContext.configuration()).willReturn(configuration); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + given(handleContext.body()).willReturn(txn); + + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + Assertions.assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_SYMBOL_TOO_LONG)); + } + + @Test + void doesntFailForZeroLengthName() { + txn = new TokenUpdateBuilder().withName("").build(); + given(handleContext.body()).willReturn(txn); + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + } + + @Test + void doesntFailForNullName() { + txn = new TokenUpdateBuilder().withName(null).build(); + given(handleContext.body()).willReturn(txn); + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + } + + @Test + void failsForVeryLongName() { + txn = new TokenUpdateBuilder() + .withName("1234567890123456789012345678901234567890123456789012345678901234567890") + .build(); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.maxTokenNameUtf8Bytes", "10") + .getOrCreateConfig(); + given(handleContext.configuration()).willReturn(configuration); + given(configProvider.getConfiguration()).willReturn(new VersionedConfigImpl(configuration, 1)); + given(handleContext.body()).willReturn(txn); + + assertThatNoException().isThrownBy(() -> subject.pureChecks(txn)); + Assertions.assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_NAME_TOO_LONG)); + } + + @Test + void worksWithUnassociatedNewTreasuryIfAutoAssociationsAvailable() { + txn = new TokenUpdateBuilder() + .withTreasury(payerId) + .withToken(fungibleTokenId) + .build(); + given(handleContext.body()).willReturn(txn); + writableTokenRelStore.remove(TokenRelation.newBuilder() + .tokenNumber(fungibleTokenId.tokenNum()) + .accountNumber(payerId.accountNum()) + .build()); + given(handleContext.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); + given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); + assertThat(writableTokenRelStore.get(payerId, fungibleTokenId)).isNull(); + + final var token = readableTokenStore.get(fungibleTokenId); + assertThat(token.symbol()).isEqualTo(fungibleToken.symbol()); + assertThat(token.name()).isEqualTo(fungibleToken.name()); + assertThat(token.treasuryAccountNumber()).isEqualTo(fungibleToken.treasuryAccountNumber()); + assertThat(token.adminKey()).isEqualTo(fungibleToken.adminKey()); + assertThat(token.supplyKey()).isEqualTo(fungibleToken.supplyKey()); + assertThat(token.kycKey()).isEqualTo(fungibleToken.kycKey()); + assertThat(token.freezeKey()).isEqualTo(fungibleToken.freezeKey()); + assertThat(token.wipeKey()).isEqualTo(fungibleToken.wipeKey()); + assertThat(token.feeScheduleKey()).isEqualTo(fungibleToken.feeScheduleKey()); + assertThat(token.pauseKey()).isEqualTo(fungibleToken.pauseKey()); + assertThat(token.autoRenewAccountNumber()).isEqualTo(fungibleToken.autoRenewAccountNumber()); + assertThat(token.expiry()).isEqualTo(fungibleToken.expiry()); + assertThat(token.memo()).isEqualTo(fungibleToken.memo()); + assertThat(token.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); + assertThat(token.tokenType()).isEqualTo(FUNGIBLE_COMMON); + + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + + final var rel = writableTokenRelStore.get(payerId, fungibleTokenId); + + assertThat(rel).isNotNull(); + final var modifiedToken = writableTokenStore.get(fungibleTokenId); + assertThat(modifiedToken.symbol()).isEqualTo("TTT"); + assertThat(modifiedToken.name()).isEqualTo("TestToken1"); + assertThat(modifiedToken.treasuryAccountNumber()).isEqualTo(payerId.accountNum()); + assertThat(modifiedToken.adminKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.supplyKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.kycKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.freezeKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.wipeKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.feeScheduleKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.pauseKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.autoRenewAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.expiry()).isEqualTo(1234600L); + assertThat(modifiedToken.memo()).isEqualTo("test token1"); + assertThat(modifiedToken.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); + assertThat(modifiedToken.tokenType()).isEqualTo(FUNGIBLE_COMMON); + + assertThat(rel.frozen()).isFalse(); + assertThat(rel.kycGranted()).isTrue(); + } + + @Test + void worksWithUnassociatedNewTreasuryIfAutoAssociationsAvailableForNFT() { + txn = new TokenUpdateBuilder() + .withTreasury(payerId) + .withToken(nonFungibleTokenId) + .build(); + given(handleContext.body()).willReturn(txn); + writableTokenRelStore.remove(TokenRelation.newBuilder() + .tokenNumber(nonFungibleTokenId.tokenNum()) + .accountNumber(payerId.accountNum()) + .build()); + given(handleContext.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); + given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); + assertThat(writableTokenRelStore.get(payerId, nonFungibleTokenId)).isNull(); + + final var token = readableTokenStore.get(nonFungibleTokenId); + assertThat(token.symbol()).isEqualTo(nonFungibleToken.symbol()); + assertThat(token.name()).isEqualTo(nonFungibleToken.name()); + assertThat(token.treasuryAccountNumber()).isEqualTo(nonFungibleToken.treasuryAccountNumber()); + assertThat(token.adminKey()).isEqualTo(nonFungibleToken.adminKey()); + assertThat(token.supplyKey()).isEqualTo(nonFungibleToken.supplyKey()); + assertThat(token.kycKey()).isEqualTo(nonFungibleToken.kycKey()); + assertThat(token.freezeKey()).isEqualTo(nonFungibleToken.freezeKey()); + assertThat(token.wipeKey()).isEqualTo(nonFungibleToken.wipeKey()); + assertThat(token.feeScheduleKey()).isEqualTo(nonFungibleToken.feeScheduleKey()); + assertThat(token.pauseKey()).isEqualTo(nonFungibleToken.pauseKey()); + assertThat(token.autoRenewAccountNumber()).isEqualTo(nonFungibleToken.autoRenewAccountNumber()); + assertThat(token.expiry()).isEqualTo(nonFungibleToken.expiry()); + assertThat(token.memo()).isEqualTo(nonFungibleToken.memo()); + assertThat(token.autoRenewSecs()).isEqualTo(nonFungibleToken.autoRenewSecs()); + assertThat(token.tokenType()).isEqualTo(NON_FUNGIBLE_UNIQUE); + + final var newTreasury = writableAccountStore.get(payerId); + final var oldTreasury = writableAccountStore.get(treasuryId); + assertThat(newTreasury.numberOwnedNfts()).isEqualTo(2); + assertThat(oldTreasury.numberOwnedNfts()).isEqualTo(2); + + final var newTreasuryRel = writableTokenRelStore.get(payerId, nonFungibleTokenId); + final var oldTreasuryRel = writableTokenRelStore.get(treasuryId, nonFungibleTokenId); + assertThat(newTreasuryRel).isNull(); + assertThat(oldTreasuryRel.balance()).isEqualTo(1); + + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + + final var rel = writableTokenRelStore.get(payerId, nonFungibleTokenId); + + assertThat(rel).isNotNull(); + final var modifiedToken = writableTokenStore.get(nonFungibleTokenId); + assertThat(modifiedToken.symbol()).isEqualTo("TTT"); + assertThat(modifiedToken.name()).isEqualTo("TestToken1"); + assertThat(modifiedToken.treasuryAccountNumber()).isEqualTo(payerId.accountNum()); + assertThat(rel.balance()).isEqualTo(1); + assertThat(modifiedToken.adminKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.supplyKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.kycKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.freezeKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.wipeKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.feeScheduleKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.pauseKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.autoRenewAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.expiry()).isEqualTo(1234600L); + assertThat(modifiedToken.memo()).isEqualTo("test token1"); + assertThat(modifiedToken.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); + assertThat(modifiedToken.tokenType()).isEqualTo(NON_FUNGIBLE_UNIQUE); + + assertThat(rel.frozen()).isFalse(); + assertThat(rel.kycGranted()).isTrue(); + + final var modifiedNewTreasury = writableAccountStore.get(payerId); + final var modifiedOldTreasury = writableAccountStore.get(treasuryId); + assertThat(modifiedNewTreasury.numberOwnedNfts()).isEqualTo(3); + assertThat(modifiedOldTreasury.numberOwnedNfts()).isEqualTo(1); + } + + @Test + void failsIfNoAutoAssociationsAvailableForNewUnassociatedTreasury() { + txn = new TokenUpdateBuilder() + .withTreasury(payerId) + .withToken(fungibleTokenId) + .build(); + given(handleContext.body()).willReturn(txn); + writableTokenRelStore.remove(TokenRelation.newBuilder() + .tokenNumber(fungibleTokenId.tokenNum()) + .accountNumber(payerId.accountNum()) + .build()); + writableAccountStore.put(account.copyBuilder() + .maxAutoAssociations(0) + .usedAutoAssociations(0) + .build()); + given(handleContext.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); + given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + given(handleContext.readableStore(ReadableAccountStore.class)).willReturn(writableAccountStore); + assertThat(writableTokenRelStore.get(payerId, fungibleTokenId)).isNull(); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(NO_REMAINING_AUTOMATIC_ASSOCIATIONS)); + } + + @Test + void failsOnInvalidNewTreasury() { + txn = new TokenUpdateBuilder().withTreasury(asAccount(2000000)).build(); + given(handleContext.body()).willReturn(txn); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_TREASURY_ACCOUNT_FOR_TOKEN)); + } + + @Test + void failsOnDetachedNewTreasury() { + txn = new TokenUpdateBuilder().withTreasury(payerId).build(); + writableAccountStore.put(account.copyBuilder() + .expiredAndPendingRemoval(true) + .tinybarBalance(0) + .expiry(consensusInstant.getEpochSecond() - 10000) + .build()); + given(handleContext.body()).willReturn(txn); + writableTokenRelStore.remove(TokenRelation.newBuilder() + .tokenNumber(fungibleTokenId.tokenNum()) + .accountNumber(payerId.accountNum()) + .build()); + given(handleContext.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); + given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + given(handleContext.readableStore(ReadableAccountStore.class)).willReturn(writableAccountStore); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(ACCOUNT_EXPIRED_AND_PENDING_REMOVAL)); + } + + @Test + void failsOnDetachedOldTreasury() { + txn = new TokenUpdateBuilder().build(); + writableAccountStore.put(treasuryAccount + .copyBuilder() + .expiredAndPendingRemoval(true) + .tinybarBalance(0) + .expiry(consensusInstant.getEpochSecond() - 10000) + .build()); + given(handleContext.body()).willReturn(txn); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + given(handleContext.readableStore(ReadableAccountStore.class)).willReturn(writableAccountStore); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(ACCOUNT_EXPIRED_AND_PENDING_REMOVAL)); + } + + @Test + void failsOnDetachedNewAutoRenewAccount() { + txn = new TokenUpdateBuilder().withAutoRenewAccount(payerId).build(); + writableAccountStore.put(account.copyBuilder() + .expiredAndPendingRemoval(true) + .tinybarBalance(0) + .expiry(consensusInstant.getEpochSecond() - 10000) + .build()); + given(handleContext.body()).willReturn(txn); + writableTokenRelStore.remove(TokenRelation.newBuilder() + .tokenNumber(fungibleTokenId.tokenNum()) + .accountNumber(payerId.accountNum()) + .build()); + given(handleContext.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); + given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + given(handleContext.readableStore(ReadableAccountStore.class)).willReturn(writableAccountStore); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(ACCOUNT_EXPIRED_AND_PENDING_REMOVAL)); + } + + @Test + void failsOnDetachedOldAutoRenewAccount() { + txn = new TokenUpdateBuilder().build(); + writableAccountStore.put(spenderAccount + .copyBuilder() + .expiredAndPendingRemoval(true) + .tinybarBalance(0) + .expiry(consensusInstant.getEpochSecond() - 10000) + .build()); + given(handleContext.body()).willReturn(txn); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + given(handleContext.readableStore(ReadableAccountStore.class)).willReturn(writableAccountStore); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(ACCOUNT_EXPIRED_AND_PENDING_REMOVAL)); + } + + @Test + void failsOnDeletedOldAutoRenewAccount() { + txn = new TokenUpdateBuilder().build(); + writableAccountStore.put(spenderAccount.copyBuilder().deleted(true).build()); + given(handleContext.body()).willReturn(txn); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + given(handleContext.readableStore(ReadableAccountStore.class)).willReturn(writableAccountStore); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(ACCOUNT_DELETED)); + } + + @Test + void failsOnDeletedNewAutoRenewAccount() { + txn = new TokenUpdateBuilder().withAutoRenewAccount(payerId).build(); + writableAccountStore.put(account.copyBuilder().deleted(true).build()); + given(handleContext.body()).willReturn(txn); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + given(handleContext.readableStore(ReadableAccountStore.class)).willReturn(writableAccountStore); + + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(ACCOUNT_DELETED)); + } + + @Test + void permitsExtendingOnlyExpiryWithoutAdminKey() { + final var transactionID = + TransactionID.newBuilder().accountID(payerId).transactionValidStart(consensusTimestamp); + final var body = TokenUpdateTransactionBody.newBuilder() + .token(fungibleTokenId) + .expiry(Timestamp.newBuilder().seconds(1234600L).build()) + .build(); + txn = TransactionBody.newBuilder() + .transactionID(transactionID) + .tokenUpdate(body) + .build(); + + given(handleContext.body()).willReturn(txn); + + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + } + + @Test + void failsOnReducedNewExpiry() { + txn = new TokenUpdateBuilder() + .withExpiry(consensusInstant.getEpochSecond() - 72000) + .build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(EXPIRATION_REDUCTION_NOT_ALLOWED)); + } + + @Test + void failsOnInvalidNewExpiry() { + given(compositeProps.getLongProperty(ENTITIES_MAX_LIFETIME)).willReturn(3_000_000_000L); + txn = new TokenUpdateBuilder().withExpiry(3_000_000_000L + 10).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_EXPIRATION_TIME)); + } + + @Test + void failsOnAlreadyDeletedToken() { + final var copyToken = writableTokenStore + .get(fungibleTokenId) + .copyBuilder() + .deleted(true) + .build(); + writableTokenStore.put(copyToken); + given(handleContext.readableStore(ReadableTokenStore.class)).willReturn(writableTokenStore); + txn = new TokenUpdateBuilder().build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_WAS_DELETED)); + } + + @Test + void failsOnPausedToken() { + final var copyToken = writableTokenStore + .get(fungibleTokenId) + .copyBuilder() + .paused(true) + .build(); + writableTokenStore.put(copyToken); + given(handleContext.readableStore(ReadableTokenStore.class)).willReturn(writableTokenStore); + txn = new TokenUpdateBuilder().build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(TOKEN_IS_PAUSED)); + } + + @Test + void doesntReplaceIdenticalTreasury() { + final var transactionID = + TransactionID.newBuilder().accountID(payerId).transactionValidStart(consensusTimestamp); + final var body = TokenUpdateTransactionBody.newBuilder() + .token(fungibleTokenId) + .treasury(treasuryId) + .build(); + txn = TransactionBody.newBuilder() + .transactionID(transactionID) + .tokenUpdate(body) + .build(); + given(handleContext.body()).willReturn(txn); + + final var oldRel = writableTokenRelStore.get(treasuryId, fungibleTokenId); + assertThat(oldRel).isNotNull(); + final var oldToken = writableTokenStore.get(fungibleTokenId); + + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + + final var newRel = writableTokenRelStore.get(treasuryId, fungibleTokenId); + assertThat(newRel).isNotNull(); + + final var modifiedToken = writableTokenStore.get(fungibleTokenId); + assertThat(oldToken).isEqualTo(modifiedToken); + assertThat(oldRel).isEqualTo(newRel); + } + + @Test + void followsHappyPathWithNewTreasuryAndZeroBalanceOldTreasury() { + txn = new TokenUpdateBuilder() + .withTreasury(payerId) + .withToken(fungibleTokenId) + .build(); + given(handleContext.body()).willReturn(txn); + writableTokenRelStore.remove(TokenRelation.newBuilder() + .tokenNumber(fungibleTokenId.tokenNum()) + .accountNumber(payerId.accountNum()) + .build()); + writableAccountStore.put(account.copyBuilder().numberPositiveBalances(0).build()); + given(handleContext.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); + given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + given(handleContext.readableStore(ReadableAccountStore.class)).willReturn(writableAccountStore); + assertThat(writableTokenRelStore.get(payerId, fungibleTokenId)).isNull(); + + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + + final var rel = writableTokenRelStore.get(payerId, fungibleTokenId); + + assertThat(rel).isNotNull(); + final var modifiedToken = writableTokenStore.get(fungibleTokenId); + assertThat(modifiedToken.symbol()).isEqualTo("TTT"); + assertThat(modifiedToken.name()).isEqualTo("TestToken1"); + assertThat(modifiedToken.treasuryAccountNumber()).isEqualTo(payerId.accountNum()); + assertThat(modifiedToken.adminKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.supplyKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.wipeKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.feeScheduleKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.pauseKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.autoRenewAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.expiry()).isEqualTo(1234600L); + assertThat(modifiedToken.memo()).isEqualTo("test token1"); + assertThat(modifiedToken.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); + assertThat(modifiedToken.tokenType()).isEqualTo(FUNGIBLE_COMMON); + } + + @Test + void doesntGrantKycOrUnfreezeNewTreasuryIfNoKeyIsPresent() { + txn = new TokenUpdateBuilder() + .withTreasury(payerId) + .withToken(fungibleTokenId) + .withKycKey(null) + .wthFreezeKey(null) + .build(); + given(handleContext.body()).willReturn(txn); + writableTokenRelStore.remove(TokenRelation.newBuilder() + .tokenNumber(fungibleTokenId.tokenNum()) + .accountNumber(payerId.accountNum()) + .build()); + given(handleContext.writableStore(WritableTokenRelationStore.class)).willReturn(writableTokenRelStore); + given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); + assertThat(writableTokenRelStore.get(payerId, fungibleTokenId)).isNull(); + + writableTokenStore.put(fungibleToken + .copyBuilder() + .kycKey((Key) null) + .freezeKey((Key) null) + .build()); + given(handleContext.writableStore(WritableTokenStore.class)).willReturn(writableTokenStore); + given(handleContext.readableStore(ReadableTokenStore.class)).willReturn(writableTokenStore); + + assertThatNoException().isThrownBy(() -> subject.handle(handleContext)); + + final var rel = writableTokenRelStore.get(payerId, fungibleTokenId); + + assertThat(rel).isNotNull(); + final var modifiedToken = writableTokenStore.get(fungibleTokenId); + assertThat(modifiedToken.symbol()).isEqualTo("TTT"); + assertThat(modifiedToken.name()).isEqualTo("TestToken1"); + assertThat(modifiedToken.treasuryAccountNumber()).isEqualTo(payerId.accountNum()); + assertThat(modifiedToken.adminKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.supplyKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.wipeKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.feeScheduleKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.pauseKey()).isEqualTo(B_COMPLEX_KEY); + assertThat(modifiedToken.autoRenewAccountNumber()).isEqualTo(ownerId.accountNum()); + assertThat(modifiedToken.expiry()).isEqualTo(1234600L); + assertThat(modifiedToken.memo()).isEqualTo("test token1"); + assertThat(modifiedToken.autoRenewSecs()).isEqualTo(fungibleToken.autoRenewSecs()); + assertThat(modifiedToken.tokenType()).isEqualTo(FUNGIBLE_COMMON); + + assertThat(rel.frozen()).isFalse(); + assertThat(rel.kycGranted()).isTrue(); + } + + @Test + void validatesUpdatingKeys() { + txn = new TokenUpdateBuilder().withAdminKey(Key.DEFAULT).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_ADMIN_KEY)); + + txn = new TokenUpdateBuilder().withSupplyKey(Key.DEFAULT).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_SUPPLY_KEY)); + + txn = new TokenUpdateBuilder().withWipeKey(Key.DEFAULT).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_WIPE_KEY)); + + txn = new TokenUpdateBuilder().withFeeScheduleKey(Key.DEFAULT).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_CUSTOM_FEE_SCHEDULE_KEY)); + + txn = new TokenUpdateBuilder().withPauseKey(Key.DEFAULT).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(INVALID_PAUSE_KEY)); + } + + @Test + void rejectsTreasuryUpdateIfNonzeroBalanceForNFTs() { + final var copyTokenRel = writableTokenRelStore + .get(treasuryId, nonFungibleTokenId) + .copyBuilder() + .balance(1) + .build(); + configuration = new HederaTestConfigBuilder() + .withValue("tokens.nfts.useTreasuryWildcards", "false") + .getOrCreateConfig(); + given(handleContext.configuration()).willReturn(configuration); + writableTokenRelStore.put(copyTokenRel); + given(handleContext.readableStore(ReadableTokenRelationStore.class)).willReturn(writableTokenRelStore); + txn = new TokenUpdateBuilder().withToken(nonFungibleTokenId).build(); + given(handleContext.body()).willReturn(txn); + assertThatThrownBy(() -> subject.handle(handleContext)) + .isInstanceOf(HandleException.class) + .has(responseCode(CURRENT_TREASURY_STILL_OWNS_NFTS)); + } + + /* --------------------------------- Helpers --------------------------------- */ + /** + * A builder for {@link com.hedera.hapi.node.transaction.TransactionBody} instances. + */ + private class TokenUpdateBuilder { + private AccountID payer = payerId; + private AccountID treasury = ownerId; + private Key adminKey = B_COMPLEX_KEY; + private String name = "TestToken1"; + private String symbol = "TTT"; + private Key kycKey = B_COMPLEX_KEY; + private Key freezeKey = B_COMPLEX_KEY; + private Key wipeKey = B_COMPLEX_KEY; + private Key supplyKey = B_COMPLEX_KEY; + private Key feeScheduleKey = B_COMPLEX_KEY; + private Key pauseKey = B_COMPLEX_KEY; + private Timestamp expiry = Timestamp.newBuilder().seconds(1234600L).build(); + private AccountID autoRenewAccount = ownerId; + private long autoRenewPeriod = autoRenewSecs; + private String memo = "test token1"; + TokenID tokenId = fungibleTokenId; + + private TokenUpdateBuilder() {} + + public TransactionBody build() { + final var transactionID = + TransactionID.newBuilder().accountID(payer).transactionValidStart(consensusTimestamp); + final var createTxnBody = TokenUpdateTransactionBody.newBuilder() + .token(tokenId) + .symbol(symbol) + .name(name) + .treasury(treasury) + .adminKey(adminKey) + .supplyKey(supplyKey) + .kycKey(kycKey) + .freezeKey(freezeKey) + .wipeKey(wipeKey) + .feeScheduleKey(feeScheduleKey) + .pauseKey(pauseKey) + .autoRenewAccount(autoRenewAccount) + .expiry(expiry) + .memo(memo); + if (autoRenewPeriod > 0) { + createTxnBody.autoRenewPeriod( + Duration.newBuilder().seconds(autoRenewPeriod).build()); + } + return TransactionBody.newBuilder() + .transactionID(transactionID) + .tokenUpdate(createTxnBody.build()) + .build(); + } + + public TokenUpdateBuilder withToken(TokenID tokenId) { + this.tokenId = tokenId; + return this; + } + + public TokenUpdateBuilder withFreezeKey(Key freezeKey) { + this.freezeKey = freezeKey; + return this; + } + + public TokenUpdateBuilder withAutoRenewAccount(AccountID autoRenewAccount) { + this.autoRenewAccount = autoRenewAccount; + return this; + } + + public TokenUpdateBuilder withSymbol(final String symbol) { + this.symbol = symbol; + return this; + } + + public TokenUpdateBuilder withName(final String name) { + this.name = name; + return this; + } + + public TokenUpdateBuilder withTreasury(final AccountID treasury) { + this.treasury = treasury; + return this; + } + + public TokenUpdateBuilder withFeeScheduleKey(final Key key) { + this.feeScheduleKey = key; + return this; + } + + public TokenUpdateBuilder withAdminKey(final Key key) { + this.adminKey = key; + return this; + } + + public TokenUpdateBuilder withSupplyKey(final Key key) { + this.supplyKey = key; + return this; + } + + public TokenUpdateBuilder withKycKey(final Key key) { + this.kycKey = key; + return this; + } + + public TokenUpdateBuilder withWipeKey(final Key key) { + this.wipeKey = key; + return this; + } + + public TokenUpdateBuilder withExpiry(final long expiry) { + this.expiry = Timestamp.newBuilder().seconds(expiry).build(); + return this; + } + + public TokenUpdateBuilder withAutoRenewPeriod(final long autoRenewPeriod) { + this.autoRenewPeriod = autoRenewPeriod; + return this; + } + + public TokenUpdateBuilder withMemo(final String s) { + this.memo = s; + return this; + } + + public TokenUpdateBuilder withPauseKey(final Key key) { + this.pauseKey = key; + return this; + } + + public TokenUpdateBuilder wthFreezeKey(final Key key) { + this.freezeKey = key; + return this; + } + } + + private void setUpTxnContext() { + given(handleContext.writableStore(WritableAccountStore.class)).willReturn(writableAccountStore); + given(handleContext.configuration()).willReturn(configuration); + given(handleContext.consensusNow()).willReturn(consensusInstant); + given(compositeProps.getLongProperty("entities.maxLifetime")).willReturn(7200000L); + + attributeValidator = + new StandardizedAttributeValidator(consensusInstant::getEpochSecond, compositeProps, dynamicProperties); + expiryValidator = new StandardizedExpiryValidator( + id -> { + final var account = writableAccountStore.get( + AccountID.newBuilder().accountNum(id.num()).build()); + validateTrue(account != null, INVALID_AUTORENEW_ACCOUNT); + }, + attributeValidator, + consensusInstant::getEpochSecond, + hederaNumbers, + configProvider); + + given(handleContext.expiryValidator()).willReturn(expiryValidator); + given(handleContext.attributeValidator()).willReturn(attributeValidator); + given(dynamicProperties.maxMemoUtf8Bytes()).willReturn(50); + given(dynamicProperties.maxAutoRenewDuration()).willReturn(3000000L); + given(dynamicProperties.minAutoRenewDuration()).willReturn(10L); + given(configProvider.getConfiguration()).willReturn(versionedConfig); + } +} diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java index 5962f82c58f8..2e002194d859 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/handlers/util/CryptoTokenHandlerTestBase.java @@ -528,7 +528,7 @@ private void givenValidAccounts() { } protected Token givenValidFungibleToken() { - return givenValidFungibleToken(autoRenewId.accountNum()); + return givenValidFungibleToken(spenderId.accountNum()); } protected Token givenValidFungibleToken(long autoRenewAccountNumber) { @@ -555,7 +555,7 @@ protected Token givenValidFungibleToken( supplyKey, feeScheduleKey, pauseKey, - 0, + 2, deleted, TokenType.FUNGIBLE_COMMON, TokenSupplyType.FINITE, @@ -606,7 +606,7 @@ protected Account givenValidAccount() { 2, 0, 1000L, - 2, + 0, 72000, 0, Collections.emptyList(), @@ -623,7 +623,7 @@ protected TokenRelation givenFungibleTokenRelation() { .accountNumber(accountNum) .balance(1000L) .frozen(false) - .kycGranted(false) + .kycGranted(true) .deleted(false) .automaticAssociation(true) .nextToken(2L) @@ -635,9 +635,9 @@ protected TokenRelation givenNonFungibleTokenRelation() { return TokenRelation.newBuilder() .tokenNumber(nonFungibleTokenNum.longValue()) .accountNumber(accountNum) - .balance(1000L) + .balance(1) .frozen(false) - .kycGranted(false) + .kycGranted(true) .deleted(false) .automaticAssociation(true) .nextToken(2L) diff --git a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/TokenAttributesValidatorTest.java b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/TokenAttributesValidatorTest.java index eba761446fd7..f51dc77a8c87 100644 --- a/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/TokenAttributesValidatorTest.java +++ b/hedera-node/hedera-token-service-impl/src/test/java/com/hedera/node/app/service/token/impl/test/validators/TokenAttributesValidatorTest.java @@ -125,7 +125,7 @@ void failsForVeryLongName() { @Test void validatesKeys() { - assertThatThrownBy(() -> subject.checkKeys( + assertThatThrownBy(() -> subject.validateTokenKeys( true, Key.DEFAULT, true, @@ -142,7 +142,7 @@ void validatesKeys() { A_COMPLEX_KEY)) .isInstanceOf(HandleException.class) .has(responseCode(INVALID_ADMIN_KEY)); - assertThatThrownBy(() -> subject.checkKeys( + assertThatThrownBy(() -> subject.validateTokenKeys( true, A_COMPLEX_KEY, true, @@ -159,7 +159,7 @@ void validatesKeys() { A_COMPLEX_KEY)) .isInstanceOf(HandleException.class) .has(responseCode(INVALID_KYC_KEY)); - assertThatThrownBy(() -> subject.checkKeys( + assertThatThrownBy(() -> subject.validateTokenKeys( true, A_COMPLEX_KEY, true, @@ -176,7 +176,7 @@ void validatesKeys() { A_COMPLEX_KEY)) .isInstanceOf(HandleException.class) .has(responseCode(INVALID_WIPE_KEY)); - assertThatThrownBy(() -> subject.checkKeys( + assertThatThrownBy(() -> subject.validateTokenKeys( true, A_COMPLEX_KEY, true, @@ -193,7 +193,7 @@ void validatesKeys() { A_COMPLEX_KEY)) .isInstanceOf(HandleException.class) .has(responseCode(INVALID_SUPPLY_KEY)); - assertThatThrownBy(() -> subject.checkKeys( + assertThatThrownBy(() -> subject.validateTokenKeys( true, A_COMPLEX_KEY, true, @@ -210,7 +210,7 @@ void validatesKeys() { A_COMPLEX_KEY)) .isInstanceOf(HandleException.class) .has(responseCode(INVALID_FREEZE_KEY)); - assertThatThrownBy(() -> subject.checkKeys( + assertThatThrownBy(() -> subject.validateTokenKeys( true, A_COMPLEX_KEY, true, @@ -227,7 +227,7 @@ void validatesKeys() { A_COMPLEX_KEY)) .isInstanceOf(HandleException.class) .has(responseCode(INVALID_CUSTOM_FEE_SCHEDULE_KEY)); - assertThatThrownBy(() -> subject.checkKeys( + assertThatThrownBy(() -> subject.validateTokenKeys( true, A_COMPLEX_KEY, true, @@ -249,7 +249,7 @@ void validatesKeys() { @Test void validatesKeysWithNulls() { assertThatNoException() - .isThrownBy(() -> subject.checkKeys( + .isThrownBy(() -> subject.validateTokenKeys( false, Key.DEFAULT, true, @@ -265,7 +265,7 @@ void validatesKeysWithNulls() { true, A_COMPLEX_KEY)); assertThatNoException() - .isThrownBy(() -> subject.checkKeys( + .isThrownBy(() -> subject.validateTokenKeys( true, A_COMPLEX_KEY, false, @@ -281,7 +281,7 @@ void validatesKeysWithNulls() { true, A_COMPLEX_KEY)); assertThatNoException() - .isThrownBy(() -> subject.checkKeys( + .isThrownBy(() -> subject.validateTokenKeys( true, A_COMPLEX_KEY, true, @@ -297,7 +297,7 @@ void validatesKeysWithNulls() { true, A_COMPLEX_KEY)); assertThatNoException() - .isThrownBy(() -> subject.checkKeys( + .isThrownBy(() -> subject.validateTokenKeys( true, A_COMPLEX_KEY, true, @@ -313,7 +313,7 @@ void validatesKeysWithNulls() { true, A_COMPLEX_KEY)); assertThatNoException() - .isThrownBy(() -> subject.checkKeys( + .isThrownBy(() -> subject.validateTokenKeys( true, A_COMPLEX_KEY, true, @@ -329,7 +329,7 @@ void validatesKeysWithNulls() { true, A_COMPLEX_KEY)); assertThatNoException() - .isThrownBy(() -> subject.checkKeys( + .isThrownBy(() -> subject.validateTokenKeys( true, A_COMPLEX_KEY, true, @@ -345,7 +345,7 @@ void validatesKeysWithNulls() { true, A_COMPLEX_KEY)); assertThatNoException() - .isThrownBy(() -> subject.checkKeys( + .isThrownBy(() -> subject.validateTokenKeys( true, A_COMPLEX_KEY, true, From b5bb9ae573b2672bf25934f28b138d6431cc5701 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20Brandst=C3=A4tter?= Date: Tue, 20 Jun 2023 23:02:33 +0200 Subject: [PATCH 61/70] #6453 Introduce PathsConfig (#7141) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Timo Brandstätter --- .../swirlds/common/config/PathsConfig.java | 103 ++++++++++++++++++ .../platform/ApplicationDefinitionLoader.java | 10 +- .../java/com/swirlds/platform/Browser.java | 56 ++++------ .../swirlds/platform/SettingConstants.java | 48 ++++---- .../java/com/swirlds/platform/Settings.java | 56 ++-------- .../platform/config/ConfigMappings.java | 8 +- .../swirlds/platform/crypto/CryptoSetup.java | 5 +- .../filesystem/OSFileSystemChecker.java | 6 +- .../swirlds/platform/util/BootstrapUtils.java | 5 +- .../com/swirlds/platform/SettingsTest.java | 31 ------ 10 files changed, 187 insertions(+), 141 deletions(-) create mode 100644 platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/PathsConfig.java diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/PathsConfig.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/PathsConfig.java new file mode 100644 index 000000000000..faaeed768515 --- /dev/null +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/PathsConfig.java @@ -0,0 +1,103 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.common.config; + +import static com.swirlds.common.io.utility.FileUtils.getAbsolutePath; +import static com.swirlds.common.io.utility.FileUtils.rethrowIO; + +import com.swirlds.config.api.ConfigData; +import com.swirlds.config.api.ConfigProperty; +import java.nio.file.Path; + +/** + * Configurations related to paths. + * @param configPath + * path to config.txt (which might not exist) + * @param settingsPath + * path to settings.txt (which might not exist) + * @param settingsUsedDir + * the directory where the settings used file will be created on startup if and only if settings.txt exists + * @param keysDirPath + * path to data/keys/ + * @param appsDirPath + * path to data/apps/ + * @param logPath + * path to log4j2.xml (which might not exist) + */ +@ConfigData("paths") +public record PathsConfig( + @ConfigProperty(defaultValue = "config.txt") String configPath, + @ConfigProperty(defaultValue = "settings.txt") String settingsPath, + @ConfigProperty(defaultValue = ".") String settingsUsedDir, + @ConfigProperty(defaultValue = "data/keys") String keysDirPath, + @ConfigProperty(defaultValue = "data/apps") String appsDirPath, + @ConfigProperty(defaultValue = "log4j2.xml") String logPath) { + + /** + * path to config.txt (which might not exist) + * + * @return absolute path to config.txt + */ + public Path getConfigPath() { + return getAbsolutePath(configPath); + } + + /** + * path to settings.txt (which might not exist) + * + * @return absolute path to settings.txt + */ + public Path getSettingsPath() { + return getAbsolutePath(settingsPath); + } + + /** + * the directory where the settings used file will be created on startup if and only if settings.txt exists + * + * @return absolute path to settings directory + */ + public Path getSettingsUsedDir() { + return getAbsolutePath(settingsUsedDir); + } + + /** + * path to data/keys/ + * + * @return absolute path to data/keys/ + */ + public Path getKeysDirPath() { + return getAbsolutePath(keysDirPath); + } + + /** + * path to data/apps/ + * + * @return absolute path to data/apps/ + */ + public Path getAppsDirPath() { + return getAbsolutePath(appsDirPath); + } + + /** + * path to log4j2.xml (which might not exist) + * + * @return absolute path to log4j2.xml + */ + public Path getLogPath() { + return rethrowIO(() -> getAbsolutePath(logPath)); + } +} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ApplicationDefinitionLoader.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ApplicationDefinitionLoader.java index 27a5eaeff417..2d24c0cfab9e 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ApplicationDefinitionLoader.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/ApplicationDefinitionLoader.java @@ -18,6 +18,8 @@ import static com.swirlds.logging.LogMarker.EXCEPTION; +import com.swirlds.common.config.PathsConfig; +import com.swirlds.common.config.singleton.ConfigurationHolder; import com.swirlds.common.internal.ApplicationDefinition; import com.swirlds.common.internal.ConfigurationException; import com.swirlds.common.system.NodeId; @@ -59,14 +61,14 @@ public final class ApplicationDefinitionLoader { private ApplicationDefinitionLoader() {} /** - * Parses the configuration file specified by the {@link Settings#getConfigPath()} setting, configures all + * Parses the configuration file specified by the {@link PathsConfig#getConfigPath()} setting, configures all * appropriate system settings, and returns a generic {@link ApplicationDefinition}. * * @param localNodesToStart * the {@link Set} of local nodes to be started, if specified * @return an {@link ApplicationDefinition} specifying the application to be loaded and all related configuration * @throws ConfigurationException - * if the configuration file specified by {@link Settings#getConfigPath()} does not exist + * if the configuration file specified by {@link PathsConfig#getConfigPath()} does not exist */ public static ApplicationDefinition load( @NonNull final LegacyConfigProperties configurationProperties, @NonNull final Set localNodesToStart) @@ -100,7 +102,9 @@ private static AppStartParams convertToStartParams(final JarAppConfig appConfig) // the line is: app, jarFilename, optionalParameters final String appJarFilename = appConfig.jarName(); // this is a real .jar file, so load from data/apps/ - Path appJarPath = Settings.getInstance().getAppsDirPath().resolve(appJarFilename); + Path appJarPath = ConfigurationHolder.getConfigData(PathsConfig.class) + .getAppsDirPath() + .resolve(appJarFilename); String mainClassname = ""; try (final JarFile jarFile = new JarFile(appJarPath.toFile())) { final Manifest manifest = jarFile.getManifest(); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java index 96531b0cdc91..6457faece36c 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java @@ -39,6 +39,7 @@ import com.swirlds.common.config.ConsensusConfig; import com.swirlds.common.config.EventConfig; import com.swirlds.common.config.OSHealthCheckConfig; +import com.swirlds.common.config.PathsConfig; import com.swirlds.common.config.StateConfig; import com.swirlds.common.config.WiringConfig; import com.swirlds.common.config.export.ConfigExport; @@ -193,7 +194,7 @@ private Browser(@NonNull final Set localNodesToStart) throws IOException // The properties from the config.txt final LegacyConfigProperties configurationProperties = LegacyConfigPropertiesLoader.loadConfigFile( - Settings.getInstance().getConfigPath()); + ConfigurationHolder.getConfigData(PathsConfig.class).getConfigPath()); final ConfigSource settingsConfigSource = LegacyFileConfigSource.ofSettingsFile(); final ConfigSource mappedSettingsConfigSource = ConfigMappings.addConfigMapping(settingsConfigSource); @@ -236,7 +237,8 @@ private Browser(@NonNull final Set localNodesToStart) throws IOException .withConfigDataType(SyncConfig.class) .withConfigDataType(UptimeConfig.class) .withConfigDataType(RecycleBinConfig.class) - .withConfigDataType(EventConfig.class); + .withConfigDataType(EventConfig.class) + .withConfigDataType(PathsConfig.class); // Assume all locally run instances provide the same configuration definitions to the configuration builder. if (appMains.size() > 0) { @@ -302,13 +304,13 @@ private Browser(@NonNull final Set localNodesToStart) throws IOException // simulation to run. try { - - if (Files.exists(Settings.getInstance().getConfigPath())) { - CommonUtils.tellUserConsole("Reading the configuration from the file: " - + Settings.getInstance().getConfigPath()); + final PathsConfig pathsConfig = configuration.getConfigData(PathsConfig.class); + if (Files.exists(pathsConfig.getConfigPath())) { + CommonUtils.tellUserConsole( + "Reading the configuration from the file: " + pathsConfig.getConfigPath()); } else { - CommonUtils.tellUserConsole("A config.txt file could be created here: " - + Settings.getInstance().getConfigPath()); + CommonUtils.tellUserConsole( + "A config.txt file could be created here: " + pathsConfig.getConfigPath()); return; } // instantiate all Platform objects, which each instantiates a Statistics object @@ -424,7 +426,8 @@ private void writeSettingsUsed(final Configuration configuration) { final StringBuilder settingsUsedBuilder = new StringBuilder(); // Add all settings values to the string builder - if (Files.exists(Settings.getInstance().getSettingsPath())) { + final PathsConfig pathsConfig = configuration.getConfigData(PathsConfig.class); + if (Files.exists(pathsConfig.getSettingsPath())) { Settings.getInstance().addSettingsUsed(settingsUsedBuilder); } @@ -436,8 +439,7 @@ private void writeSettingsUsed(final Configuration configuration) { ConfigExport.addConfigContents(configuration, settingsUsedBuilder); // Write the settingsUsed.txt file - final Path settingsUsedPath = - Settings.getInstance().getSettingsUsedDir().resolve(SettingConstants.SETTING_USED_FILENAME); + final Path settingsUsedPath = pathsConfig.getSettingsUsedDir().resolve(SettingConstants.SETTING_USED_FILENAME); try (final OutputStream outputStream = new FileOutputStream(settingsUsedPath.toFile())) { outputStream.write(settingsUsedBuilder.toString().getBytes(StandardCharsets.UTF_8)); } catch (final IOException | RuntimeException e) { @@ -473,31 +475,21 @@ public static synchronized void parseCommandLineArgsAndLaunch(final String... ar if (args != null) { for (final String item : args) { final String arg = item.trim().toLowerCase(); - switch (arg) { - case "-local": - case "-log": - currentOption = arg; - break; - default: - if (currentOption != null) { - switch (currentOption) { - case "-local": - try { - localNodesToStart.add(new NodeId(Integer.parseInt(arg))); - } catch (final NumberFormatException ex) { - // Intentionally suppress the NumberFormatException - } - break; - case "-log": - Settings.getInstance().setLogPath(getAbsolutePath(arg)); - break; - } - } + if (arg.equals("-local")) { + currentOption = arg; + } else if (currentOption != null) { + try { + localNodesToStart.add(new NodeId(Integer.parseInt(arg))); + } catch (final NumberFormatException ex) { + // Intentionally suppress the NumberFormatException + } } } } - launch(localNodesToStart, Settings.getInstance().getLogPath()); + launch( + localNodesToStart, + ConfigurationHolder.getConfigData(PathsConfig.class).getLogPath()); } /** diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java index eb1f15904df2..ce2998eea511 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java @@ -28,16 +28,9 @@ public final class SettingConstants { /** name of the settings used file */ static final String SETTING_USED_FILENAME = "settingsUsed.txt"; - static final String CONFIG_TXT = "config.txt"; - static final String SETTINGS_TXT = "settings.txt"; static final String DATA_STRING = "data"; static final String SAVED_STRING = "saved"; - static final String KEYS_STRING = "keys"; - static final String APPS_STRING = "apps"; - static final String LOG4J2_CONFIG_FILE = "log4j2.xml"; static final int NUM_CRYPTO_THREADS_DEFAULT_VALUE = 32; - static final int SIGNED_STATE_FREQ_DEFAULT_VALUE = 1; - static final int MAX_EVENT_QUEUE_FOR_CONS_DEFAULT_VALUE = 10_000; static final int THROTTLE_TRANSACTION_QUEUE_SIZE_DEFAULT_VALUE = 100_000; static final int NUM_CONNECTIONS_DEFAULT_VALUE = 40; static final int MAX_OUTGOING_SYNCS_DEFAULT_VALUE = 2; @@ -71,7 +64,6 @@ public final class SettingConstants { static final boolean LOAD_KEYS_FROM_PFX_FILES_DEFAULT_VALUE = true; static final int MAX_TRANSACTION_BYTES_PER_EVENT_DEFAULT_VALUE = 245760; static final int MAX_TRANSACTION_COUNT_PER_EVENT_DEFAULT_VALUE = 245760; - static final int RANDOM_EVENT_PROBABILITY_DEFAULT_VALUE = 0; static final int THREAD_DUMP_PERIOD_MS_DEFAULT_VALUE = 0; static final String THREAD_DUMP_LOG_DIR_DEFAULT_VALUE = "data/threadDump"; static final int JVM_PAUSE_DETECTOR_SLEEP_MS_DEFAULT_VALUE = 1000; @@ -79,16 +71,16 @@ public final class SettingConstants { static final boolean GOSSIP_WITH_DIFFERENT_VERSIONS_DEFAULT_VALUE = false; static final Set REMOVED_SETTINGS = Set.of( - "reconnectg.active", - "reconnectg.reconnectWindowSeconds", - "reconnectg.fallenBehindThreshold", - "reconnectg.asyncStreamTimeoutMilliseconds", - "reconnectg.asyncOutputStreamFlushMilliseconds", - "reconnectg.asyncStreamBufferSize", - "reconnectg.asyncStreams", - "reconnectg.maxAckDelayMilliseconds", - "reconnectg.maximumReconnectFailuresBeforeShutdown", - "reconnectg.minimumTimeBetweenReconnects", + "reconnect.active", + "reconnect.reconnectWindowSeconds", + "reconnect.fallenBehindThreshold", + "reconnect.asyncStreamTimeoutMilliseconds", + "reconnect.asyncOutputStreamFlushMilliseconds", + "reconnect.asyncStreamBufferSize", + "reconnect.asyncStreams", + "reconnect.maxAckDelayMilliseconds", + "reconnect.maximumReconnectFailuresBeforeShutdown", + "reconnect.minimumTimeBetweenReconnects", "chatter.useChatter", "chatter.attemptedChatterEventPerSecond", "chatter.chatteringCreationThreshold", @@ -203,10 +195,22 @@ public final class SettingConstants { "metrics.csvOutputFolder", "metrics.csvFileName", "metrics.csvAppend", - "metrics.prometheusEndpointEnabled", - "metrics.prometheusEndpointPortNumber", - "metrics.prometheusEndpointMaxBacklogAllowed", - "metrics.disableMetricsOutput"); + "metrics.disableMetricsOutput", + "prometheus.endpointEnabled", + "prometheus.endpointPortNumber", + "prometheus.endpointMaxBacklogAllowed", + "configPath", + "settingsPath", + "settingsUsedDir", + "keysDirPath", + "appsDirPath", + "logPath", + "paths.configPath", + "paths.settingsPath", + "paths.settingsUsedDir", + "paths.keysDirPath", + "paths.appsDirPath", + "paths.logPath"); private SettingConstants() {} } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java index d9f846f1be85..48438e7bf1cd 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java @@ -17,14 +17,11 @@ package com.swirlds.platform; import static com.swirlds.common.io.utility.FileUtils.getAbsolutePath; -import static com.swirlds.common.io.utility.FileUtils.rethrowIO; import static com.swirlds.common.settings.ParsingUtils.parseDuration; import static com.swirlds.logging.LogMarker.EXCEPTION; import static com.swirlds.logging.LogMarker.STARTUP; -import static com.swirlds.platform.SettingConstants.APPS_STRING; import static com.swirlds.platform.SettingConstants.BUFFER_SIZE_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.CALLER_SKIPS_BEFORE_SLEEP_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.CONFIG_TXT; import static com.swirlds.platform.SettingConstants.DATA_STRING; import static com.swirlds.platform.SettingConstants.DEADLOCK_CHECK_PERIOD_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.DELAY_SHUFFLE_DEFAULT_VALUE; @@ -33,9 +30,7 @@ import static com.swirlds.platform.SettingConstants.GOSSIP_WITH_DIFFERENT_VERSIONS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.JVM_PAUSE_DETECTOR_SLEEP_MS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.JVM_PAUSE_REPORT_MS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.KEYS_STRING; import static com.swirlds.platform.SettingConstants.LOAD_KEYS_FROM_PFX_FILES_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.LOG4J2_CONFIG_FILE; import static com.swirlds.platform.SettingConstants.LOG_STACK_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.MAX_ADDRESS_SIZE_ALLOWED_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.MAX_INCOMING_SYNCS_INC_DEFAULT_VALUE; @@ -46,7 +41,6 @@ import static com.swirlds.platform.SettingConstants.NUM_CRYPTO_THREADS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.REMOVED_SETTINGS; import static com.swirlds.platform.SettingConstants.SAVED_STRING; -import static com.swirlds.platform.SettingConstants.SETTINGS_TXT; import static com.swirlds.platform.SettingConstants.SHOW_INTERNAL_STATS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SLEEP_CALLER_SKIPS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SLEEP_HEARTBEAT_DEFAULT_VALUE; @@ -69,6 +63,8 @@ import static com.swirlds.platform.SettingConstants.VERBOSE_STATISTICS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.VERIFY_EVENT_SIGS_DEFAULT_VALUE; +import com.swirlds.common.config.PathsConfig; +import com.swirlds.common.config.singleton.ConfigurationHolder; import com.swirlds.common.internal.SettingsCommon; import com.swirlds.common.settings.SettingsException; import com.swirlds.common.utility.CommonUtils; @@ -127,23 +123,13 @@ public class Settings { private static final Logger logger = LogManager.getLogger(Settings.class); private static final Settings INSTANCE = new Settings(); - /** path to config.txt (which might not exist) */ - private final Path configPath = getAbsolutePath(CONFIG_TXT); - /** path to settings.txt (which might not exist) */ - private final Path settingsPath = getAbsolutePath(SETTINGS_TXT); /** the directory where the settings used file will be created on startup if and only if settings.txt exists */ private final Path settingsUsedDir = getAbsolutePath(); - /** path to data/keys/ */ - private final Path keysDirPath = getAbsolutePath().resolve(DATA_STRING).resolve(KEYS_STRING); - /** path to data/apps/ */ - private final Path appsDirPath = getAbsolutePath().resolve(DATA_STRING).resolve(APPS_STRING); /////////////////////////////////////////// // settings from settings.txt file /** priority for threads that don't sync (all but SyncCaller, SyncListener,SyncServer */ private final int threadPriorityNonSync = THREAD_PRIORITY_NON_SYNC_DEFAULT_VALUE; - /** path to log4j2.xml (which might not exist) */ - private Path logPath = rethrowIO(() -> getAbsolutePath(LOG4J2_CONFIG_FILE)); /** verify event signatures (rather than just trusting they are correct)? */ private boolean verifyEventSigs = VERIFY_EVENT_SIGS_DEFAULT_VALUE; /** number of threads used to verify signatures and generate keys, in parallel */ @@ -229,7 +215,8 @@ public class Settings { */ private int freezeSecondsAfterStartup = FREEZE_SECONDS_AFTER_STARTUP_DEFAULT_VALUE; /** - * When enabled, the platform will try to load node keys from .pfx files located in {@link #keysDirPath}. If even a + * When enabled, the platform will try to load node keys from .pfx files located in + * {@link com.swirlds.common.config.PathsConfig.keysDirPath}. If even a * single key is missing, the platform will warn and exit. *

* If disabled, the platform will generate keys deterministically. @@ -360,6 +347,8 @@ public void writeSettingsUsed(final Path directory) { * this source file. The settings.txt file is only used for testing and debugging. */ public void loadSettings() { + final Path settingsPath = + ConfigurationHolder.getConfigData(PathsConfig.class).getSettingsPath(); loadSettings(settingsPath.toFile()); } @@ -378,8 +367,9 @@ public void loadSettings(final File settingsFile) { try { scanner = new Scanner(settingsFile, StandardCharsets.UTF_8.name()); } catch (final FileNotFoundException e) { // this should never happen - CommonUtils.tellUserConsole( - "The file " + Settings.getInstance().getSettingsPath() + " exists, but can't be opened. " + e); + final Path settingsPath = + ConfigurationHolder.getConfigData(PathsConfig.class).getSettingsPath(); + CommonUtils.tellUserConsole("The file " + settingsPath + " exists, but can't be opened. " + e); return; } @@ -578,34 +568,6 @@ private String[][] currSettings() { return list.toArray(new String[0][0]); } - public Path getConfigPath() { - return configPath; - } - - public Path getSettingsPath() { - return settingsPath; - } - - public Path getSettingsUsedDir() { - return settingsUsedDir; - } - - public Path getKeysDirPath() { - return keysDirPath; - } - - public Path getAppsDirPath() { - return appsDirPath; - } - - public Path getLogPath() { - return logPath; - } - - public void setLogPath(final Path logPath) { - this.logPath = logPath; - } - public boolean isVerifyEventSigs() { return verifyEventSigs; } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java index ce54429892e4..21ab4cacaf69 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java @@ -58,7 +58,13 @@ private ConfigMappings() {} new ConfigMapping("metrics.disableMetricsOutput", "disableMetricsOutput"), new ConfigMapping("prometheus.endpointEnabled", "prometheusEndpointEnabled"), new ConfigMapping("prometheus.endpointPortNumber", "prometheusEndpointPortNumber"), - new ConfigMapping("prometheus.endpointMaxBacklogAllowed", "prometheusEndpointMaxBacklogAllowed")); + new ConfigMapping("prometheus.endpointMaxBacklogAllowed", "prometheusEndpointMaxBacklogAllowed"), + new ConfigMapping("paths.configPath", "configPath"), + new ConfigMapping("paths.settingsPath", "settingsPath"), + new ConfigMapping("paths.settingsUsedDir", "settingsUsedDir"), + new ConfigMapping("paths.keysDirPath", "keysDirPath"), + new ConfigMapping("paths.appsDirPath", "appsDirPath"), + new ConfigMapping("paths.logPath", "logPath")); /** * Add all known aliases to the provided config source diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/CryptoSetup.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/CryptoSetup.java index 165a5196dd0e..7523534ee021 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/CryptoSetup.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/crypto/CryptoSetup.java @@ -21,6 +21,8 @@ import static com.swirlds.logging.LogMarker.EXCEPTION; import static com.swirlds.logging.LogMarker.STARTUP; +import com.swirlds.common.config.PathsConfig; +import com.swirlds.common.config.singleton.ConfigurationHolder; import com.swirlds.common.crypto.CryptographyException; import com.swirlds.common.crypto.config.CryptoConfig; import com.swirlds.common.system.NodeId; @@ -83,7 +85,8 @@ public static Map initNodeSecurity( .setDaemon(false) .buildFactory()); - final Path keysDirPath = Settings.getInstance().getKeysDirPath(); + final Path keysDirPath = + ConfigurationHolder.getConfigData(PathsConfig.class).getKeysDirPath(); final Map keysAndCerts; try { if (Settings.getInstance().isLoadKeysFromPfxFiles()) { diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/health/filesystem/OSFileSystemChecker.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/health/filesystem/OSFileSystemChecker.java index 2078d5e1464b..f8e8cb108295 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/health/filesystem/OSFileSystemChecker.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/health/filesystem/OSFileSystemChecker.java @@ -20,8 +20,9 @@ import static com.swirlds.platform.health.OSHealthCheckUtils.reportHeader; import com.swirlds.common.config.OSHealthCheckConfig; +import com.swirlds.common.config.PathsConfig; +import com.swirlds.common.config.singleton.ConfigurationHolder; import com.swirlds.common.utility.Units; -import com.swirlds.platform.Settings; import java.util.concurrent.TimeUnit; /** @@ -34,7 +35,8 @@ private OSFileSystemChecker() {} public static boolean performFileSystemCheck(final StringBuilder sb, final OSHealthCheckConfig osHealthConfig) { try { final OSFileSystemCheck.Report fileSystemReport = OSFileSystemCheck.execute( - Settings.getInstance().getConfigPath(), osHealthConfig.fileReadTimeoutMillis()); + ConfigurationHolder.getConfigData(PathsConfig.class).getConfigPath(), + osHealthConfig.fileReadTimeoutMillis()); return appendReport(sb, fileSystemReport, osHealthConfig.maxFileReadMillis()); } catch (final InterruptedException e) { Thread.currentThread().interrupt(); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/util/BootstrapUtils.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/util/BootstrapUtils.java index b260bbb65808..f1b413a54ee4 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/util/BootstrapUtils.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/util/BootstrapUtils.java @@ -19,6 +19,7 @@ import static com.swirlds.logging.LogMarker.STARTUP; import com.swirlds.common.config.ConfigUtils; +import com.swirlds.common.config.PathsConfig; import com.swirlds.common.config.singleton.ConfigurationHolder; import com.swirlds.common.config.sources.LegacyFileConfigSource; import com.swirlds.common.constructable.ConstructableRegistry; @@ -28,7 +29,6 @@ import com.swirlds.config.api.Configuration; import com.swirlds.config.api.ConfigurationBuilder; import com.swirlds.platform.Log4jSetup; -import com.swirlds.platform.Settings; import com.swirlds.platform.state.signed.SignedState; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -60,7 +60,8 @@ public static void startLoggingFramework(final Path log4jPath) { if (log4jPath != null && Files.exists(log4jPath)) { Log4jSetup.startLoggingFramework(log4jPath); } else { - Log4jSetup.startLoggingFramework(Settings.getInstance().getLogPath()); + Log4jSetup.startLoggingFramework( + ConfigurationHolder.getConfigData(PathsConfig.class).getLogPath()); } } diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java index d67c3aa4a186..dd5b77b1480d 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/SettingsTest.java @@ -16,11 +16,8 @@ package com.swirlds.platform; -import static com.swirlds.platform.SettingConstants.APPS_STRING; import static com.swirlds.platform.SettingConstants.BUFFER_SIZE_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.CALLER_SKIPS_BEFORE_SLEEP_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.CONFIG_TXT; -import static com.swirlds.platform.SettingConstants.DATA_STRING; import static com.swirlds.platform.SettingConstants.DEADLOCK_CHECK_PERIOD_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.DELAY_SHUFFLE_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.DO_UPNP_DEFAULT_VALUE; @@ -28,9 +25,7 @@ import static com.swirlds.platform.SettingConstants.GOSSIP_WITH_DIFFERENT_VERSIONS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.JVM_PAUSE_DETECTOR_SLEEP_MS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.JVM_PAUSE_REPORT_MS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.KEYS_STRING; import static com.swirlds.platform.SettingConstants.LOAD_KEYS_FROM_PFX_FILES_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.LOG4J2_CONFIG_FILE; import static com.swirlds.platform.SettingConstants.LOG_STACK_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.MAX_ADDRESS_SIZE_ALLOWED_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.MAX_INCOMING_SYNCS_INC_DEFAULT_VALUE; @@ -39,7 +34,6 @@ import static com.swirlds.platform.SettingConstants.MAX_TRANSACTION_COUNT_PER_EVENT_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.NUM_CONNECTIONS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.NUM_CRYPTO_THREADS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.SETTINGS_TXT; import static com.swirlds.platform.SettingConstants.SHOW_INTERNAL_STATS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SLEEP_CALLER_SKIPS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SLEEP_HEARTBEAT_DEFAULT_VALUE; @@ -65,7 +59,6 @@ import com.swirlds.common.config.sources.LegacyFileConfigSource; import com.swirlds.common.crypto.config.CryptoConfig; import com.swirlds.common.io.config.TemporaryFileConfig; -import com.swirlds.common.io.utility.FileUtils; import com.swirlds.config.api.Configuration; import com.swirlds.platform.config.AddressBookConfig; import com.swirlds.test.framework.TestTypeTags; @@ -200,21 +193,9 @@ public void checkLoadSettings() { public void checkGetDefaultSettings() { // given final Settings settings = Settings.getInstance(); - final Path configPath = FileUtils.getAbsolutePath(CONFIG_TXT); - final Path settingsPath = FileUtils.getAbsolutePath(SETTINGS_TXT); - final Path keysDirectoryPath = - FileUtils.getAbsolutePath().resolve(DATA_STRING).resolve(KEYS_STRING); - final Path appsDirectoryPath = - FileUtils.getAbsolutePath().resolve(DATA_STRING).resolve(APPS_STRING); - final Path logPath = FileUtils.rethrowIO(() -> FileUtils.getAbsolutePath(LOG4J2_CONFIG_FILE)); final Configuration configuration = new TestConfigBuilder().getOrCreateConfig(); // then - Assertions.assertEquals(configPath, settings.getConfigPath()); - Assertions.assertEquals(settingsPath, settings.getSettingsPath()); - Assertions.assertEquals(keysDirectoryPath, settings.getKeysDirPath()); - Assertions.assertEquals(appsDirectoryPath, settings.getAppsDirPath()); - Assertions.assertEquals(logPath, settings.getLogPath()); Assertions.assertEquals(VERIFY_EVENT_SIGS_DEFAULT_VALUE, settings.isVerifyEventSigs()); Assertions.assertEquals(NUM_CRYPTO_THREADS_DEFAULT_VALUE, settings.getNumCryptoThreads()); Assertions.assertEquals(SHOW_INTERNAL_STATS_DEFAULT_VALUE, settings.isShowInternalStats()); @@ -266,13 +247,6 @@ public void checkGetDefaultSettings() { public void checkGetLoadedSettings() throws IOException { // given final Settings settings = Settings.getInstance(); - final Path configPath = FileUtils.getAbsolutePath(CONFIG_TXT); - final Path settingsPath = FileUtils.getAbsolutePath(SETTINGS_TXT); - final Path keysDirectoryPath = - FileUtils.getAbsolutePath().resolve(DATA_STRING).resolve(KEYS_STRING); - final Path appsDirectoryPath = - FileUtils.getAbsolutePath().resolve(DATA_STRING).resolve(APPS_STRING); - final Path logPath = FileUtils.rethrowIO(() -> FileUtils.getAbsolutePath(LOG4J2_CONFIG_FILE)); final File settingsFile = new File(SettingsTest.class.getResource("settings4.txt").getFile()); Assertions.assertTrue(settingsFile.exists()); @@ -285,11 +259,6 @@ public void checkGetLoadedSettings() throws IOException { // then // These values shouldn't change as they are final - Assertions.assertEquals(configPath, settings.getConfigPath()); - Assertions.assertEquals(settingsPath, settings.getSettingsPath()); - Assertions.assertEquals(keysDirectoryPath, settings.getKeysDirPath()); - Assertions.assertEquals(appsDirectoryPath, settings.getAppsDirPath()); - Assertions.assertEquals(logPath, settings.getLogPath()); Assertions.assertEquals(THREAD_PRIORITY_NON_SYNC_DEFAULT_VALUE, settings.getThreadPriorityNonSync()); // These values should change From 0696370a92b7307a3604af0003f9747f7594e76b Mon Sep 17 00:00:00 2001 From: Michael Tinker Date: Tue, 20 Jun 2023 16:41:22 -0500 Subject: [PATCH 62/70] Let `HapiSpec`'s using `hasPrecheck()` pass with or without streamlined ingest (#7157) Signed-off-by: Michael Tinker --- .../services/bdd/spec/HapiSpecSetup.java | 47 ++++++++++++++----- .../bdd/spec/transactions/HapiTxnOp.java | 6 +++ .../src/main/resource/spec-default.properties | 1 + 3 files changed, 42 insertions(+), 12 deletions(-) diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiSpecSetup.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiSpecSetup.java index 3df2631e9355..ab6f00318e3d 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiSpecSetup.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/HapiSpecSetup.java @@ -29,20 +29,12 @@ import com.hedera.services.bdd.spec.props.JutilPropertySource; import com.hedera.services.bdd.spec.props.MapPropertySource; import com.hedera.services.bdd.spec.props.NodeConnectInfo; -import com.hederahashgraph.api.proto.java.AccountID; -import com.hederahashgraph.api.proto.java.ContractID; -import com.hederahashgraph.api.proto.java.Duration; -import com.hederahashgraph.api.proto.java.FileID; -import com.hederahashgraph.api.proto.java.HederaFunctionality; -import com.hederahashgraph.api.proto.java.RealmID; -import com.hederahashgraph.api.proto.java.ShardID; +import com.hedera.services.bdd.spec.transactions.HapiTxnOp; +import com.hederahashgraph.api.proto.java.*; import java.security.SecureRandom; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; import java.util.function.Function; +import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang3.StringUtils; @@ -59,6 +51,7 @@ public static HapiPropertySource getDefaultNodeProps() { return defaultNodeProps; } + private Set streamlinedIngestChecks = null; private HapiPropertySource ciPropertiesMap = null; private static HapiPropertySource DEFAULT_PROPERTY_SOURCE = null; private static final HapiPropertySource BASE_DEFAULT_PROPERTY_SOURCE = JutilPropertySource.getDefaultInstance(); @@ -620,6 +613,7 @@ public String systemUndeleteAdminName() { /** * Stream the set of HAPI operations that should be submitted to workflow port 60211/60212. * This code is needed to test each operation through the new workflow code. + * * @return set of hapi operations */ public Set workflowOperations() { @@ -631,4 +625,33 @@ public Set workflowOperations() { .map(HederaFunctionality::valueOf) .collect(toSet()); } + + /** + * Returns the set of response codes that should be always be enforced on ingest. When + * {@link HapiTxnOp#hasPrecheck(ResponseCodeEnum)} is given a response code not in + * this set, it will automatically accept {@code OK} in its place, but switch the expected + * consensus status to that response code. + * + *

That is, for a non-streamlined status like {@link ResponseCodeEnum#INVALID_ACCOUNT_AMOUNTS}, + * {@code hasPrecheck(INVALID_ACCOUNT_AMOUNTS)} is equivalent to, + *

{@code
+     *     cryptoTransfer(...)
+     *         .hasPrecheckFrom(OK, INVALID_ACCOUNT_AMOUNTS)
+     *         .hasKnownStatus(INVALID_ACCOUNT_AMOUNTS)
+     * }
+ * + * @return the set of response codes that should be always be enforced on ingest + */ + public Set streamlinedIngestChecks() { + if (streamlinedIngestChecks == null) { + final var nominal = props.get("spec.streamlinedIngestChecks"); + streamlinedIngestChecks = EnumSet.copyOf( + nominal.isEmpty() + ? Collections.emptySet() + : Stream.of(nominal.split(",")) + .map(ResponseCodeEnum::valueOf) + .collect(Collectors.toSet())); + } + return streamlinedIngestChecks; + } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java index 42c602236f89..6c0ad41b7032 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/spec/transactions/HapiTxnOp.java @@ -195,6 +195,12 @@ && isWithInRetryLimit(retryCount)) { } } if (!acceptAnyPrecheck) { + final var expectedIngestStatus = getExpectedPrecheck(); + if (expectedIngestStatus != OK + && !spec.setup().streamlinedIngestChecks().contains(expectedIngestStatus)) { + expectedStatus = Optional.of(expectedIngestStatus); + permissiblePrechecks = Optional.of(EnumSet.of(OK, expectedIngestStatus)); + } if (permissiblePrechecks.isPresent()) { if (permissiblePrechecks.get().contains(actualPrecheck)) { expectedPrecheck = Optional.of(actualPrecheck); diff --git a/hedera-node/test-clients/src/main/resource/spec-default.properties b/hedera-node/test-clients/src/main/resource/spec-default.properties index 6f38038bba3a..57ccfce44383 100644 --- a/hedera-node/test-clients/src/main/resource/spec-default.properties +++ b/hedera-node/test-clients/src/main/resource/spec-default.properties @@ -105,6 +105,7 @@ num.opFinisher.threads=8 persistentEntities.dir.path= persistentEntities.updateCreatedManifests=true spec.autoScheduledTxns= +spec.streamlinedIngestChecks=ACCOUNT_DELETED,BUSY,INSUFFICIENT_PAYER_BALANCE,INSUFFICIENT_TX_FEE,INVALID_ACCOUNT_ID,INVALID_NODE_ACCOUNT,INVALID_SIGNATURE,INVALID_TRANSACTION,INVALID_TRANSACTION_BODY,INVALID_TRANSACTION_DURATION,INVALID_TRANSACTION_ID,INVALID_TRANSACTION_START,INVALID_ZERO_BYTE_IN_STRING,KEY_PREFIX_MISMATCH,MEMO_TOO_LONG,PAYER_ACCOUNT_NOT_FOUND,PLATFORM_NOT_ACTIVE,TRANSACTION_EXPIRED,TRANSACTION_HAS_UNKNOWN_FIELDS,TRANSACTION_ID_FIELD_NOT_ALLOWED,TRANSACTION_OVERSIZE status.deferredResolves.doAsync=true status.preResolve.pause.ms=0 status.wait.sleep.ms=500 From 1da2393a7be868ccfbe02db8b8bb18b30a064aa3 Mon Sep 17 00:00:00 2001 From: Nathan Klick Date: Wed, 21 Jun 2023 01:00:37 -0500 Subject: [PATCH 63/70] feat: updates the release workflow to reflect the EVM release changes (#7201) Signed-off-by: Nathan Klick --- .../node-flow-deploy-release-artifact.yaml | 23 +++---------------- 1 file changed, 3 insertions(+), 20 deletions(-) diff --git a/.github/workflows/node-flow-deploy-release-artifact.yaml b/.github/workflows/node-flow-deploy-release-artifact.yaml index c7dca84cf1dc..95ecbc0581a0 100644 --- a/.github/workflows/node-flow-deploy-release-artifact.yaml +++ b/.github/workflows/node-flow-deploy-release-artifact.yaml @@ -18,20 +18,6 @@ name: "Node: Deploy Release Artifact" on: workflow_dispatch: inputs: - # new-version: - # description: "New Version:" - # type: string - # required: false - # default: "" - # trigger-env-deploy: - # description: "Deploy to Environment:" - # type: choice - # required: true - # options: - # - none - # - integration - # - preview - # default: none dry-run-enabled: description: "Perform Dry Run" type: boolean @@ -68,13 +54,10 @@ jobs: if: ${{ github.event_name == 'workflow_dispatch' }} uses: ./.github/workflows/node-zxc-build-release-artifact.yaml with: - # version-policy: ${{ github.event.inputs.new-verison != '' && 'specified' || 'branch-commit' }} version-policy: branch-commit - # new-version: ${{ github.event.inputs.new-version }} - # trigger-env-deploy: ${{ github.event.inputs.trigger-env-deploy }} trigger-env-deploy: none dry-run-enabled: ${{ github.event.inputs.dry-run-enabled == 'true' }} - java-version: ${{ github.event.inputs.java-version || '17.0.3' }} + java-version: ${{ github.event.inputs.java-version || '17.0.7' }} java-distribution: ${{ github.event.inputs.java-distribution || 'temurin' }} gradle-version: ${{ github.event.inputs.gradle-version || 'wrapper' }} @@ -201,11 +184,11 @@ jobs: REQ_JSON="$(jq --compact-output --null-input \ --arg ref "${{ github.ref }}" \ --arg jdist "${{ github.event.inputs.java-distribution || 'temurin' }}" \ - --arg jver "${{ github.event.inputs.java-version || '17.0.3' }}" \ + --arg jver "${{ github.event.inputs.java-version || '17.0.7' }}" \ --arg gver "${{ github.event.inputs.gradle-version || 'wrapper' }}" \ --arg vpol "${VERSION_POLICY}" \ --arg vnum "${VERSION_NUM}" \ - '{"ref": $ref, "java": {"distribution": $jdist, "version": $jver}, "gradle": {"version": $gver}, "release": {"version": {"policy": $vpol, "number": $vnum}}}')" + '{"mode": "new", "ref": $ref, "java": {"distribution": $jdist, "version": $jver}, "gradle": {"version": $gver}, "release": {"version": {"policy": $vpol, "number": $vnum}}}')" echo "payload=${REQ_JSON}" >>"${GITHUB_OUTPUT}" printf "## Dispatch Payload\n\`\`\`json\n%s\n\`\`\`\n" "$(jq '.' <<<"${REQ_JSON}")" >>"${GITHUB_STEP_SUMMARY}" From 0c6a5897351b53a62d7fab229ae9d6c69afd71ed Mon Sep 17 00:00:00 2001 From: Michael Heinrichs Date: Wed, 21 Jun 2023 08:09:43 +0200 Subject: [PATCH 64/70] Added functionality to read original value from WritableKVState (#7171) Signed-off-by: Michael Heinrichs --- .../node/app/spi/state/WritableKVState.java | 11 +++ .../app/spi/state/WritableKVStateBase.java | 7 ++ .../spi/state/WritableKVStateBaseTest.java | 71 ++++++++++++++++++- .../handle/stack/WritableKVStateStack.java | 6 ++ 4 files changed, 94 insertions(+), 1 deletion(-) diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/WritableKVState.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/WritableKVState.java index fa5275ba8523..790ded9227e2 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/WritableKVState.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/WritableKVState.java @@ -43,6 +43,17 @@ public interface WritableKVState extends ReadableKVState { @Nullable V getForModify(@NonNull K key); + /** + * Gets the original value associated with the given key before any modifications were made to + * it. The returned value will be {@code null} if the key does not exist. + * + * @param key The key. Cannot be null, otherwise an exception is thrown. + * @return The original value, or null if there is no such key in the state + * @throws NullPointerException if the key is null. + */ + @Nullable + V getOriginalValue(@NonNull K key); + /** * Adds a new value to the store, or updates an existing value. It is generally preferred to use * {@link #getForModify(K)} to get a writable value, and only use this method if the key does diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/WritableKVStateBase.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/WritableKVStateBase.java index f2ac6e83c564..8d26c158dd88 100644 --- a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/WritableKVStateBase.java +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/WritableKVStateBase.java @@ -82,6 +82,13 @@ public final V get(@NonNull K key) { } } + /** {@inheritDoc} */ + @Nullable + @Override + public V getOriginalValue(@NonNull K key) { + return super.get(key); + } + /** {@inheritDoc} */ @Override @Nullable diff --git a/hedera-node/hedera-app-spi/src/test/java/com/hedera/node/app/spi/state/WritableKVStateBaseTest.java b/hedera-node/hedera-app-spi/src/test/java/com/hedera/node/app/spi/state/WritableKVStateBaseTest.java index 5662d69e0a99..b88342dd0f06 100644 --- a/hedera-node/hedera-app-spi/src/test/java/com/hedera/node/app/spi/state/WritableKVStateBaseTest.java +++ b/hedera-node/hedera-app-spi/src/test/java/com/hedera/node/app/spi/state/WritableKVStateBaseTest.java @@ -24,7 +24,12 @@ import com.hedera.node.app.spi.fixtures.state.MapWritableKVState; import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; @@ -376,11 +381,17 @@ void putNew() { assertThat(state.get(C_KEY)).isEqualTo(CHERRY); assertThat(state.readKeys()).isEmpty(); + // The original value should still not exist + assertThat(state.getOriginalValue(C_KEY)).isNull(); + // Commit should cause the value to be added state.commit(); Mockito.verify(state, Mockito.times(1)).putIntoDataSource(Mockito.anyString(), Mockito.anyString()); Mockito.verify(state, Mockito.times(1)).putIntoDataSource(C_KEY, CHERRY); Mockito.verify(state, Mockito.never()).removeFromDataSource(Mockito.anyString()); + + // After a commit, the original value should have been added + assertThat(state.getOriginalValue(C_KEY)).isEqualTo(CHERRY); } /** @@ -399,11 +410,17 @@ void putExisting() { assertThat(state.modifiedKeys()).hasSize(1); assertThat(state.modifiedKeys()).contains(A_KEY); + // The original value should not have changed + assertThat(state.getOriginalValue(A_KEY)).isEqualTo(APPLE); + // Commit should cause the value to be updated state.commit(); Mockito.verify(state, Mockito.times(1)).putIntoDataSource(Mockito.anyString(), Mockito.anyString()); Mockito.verify(state, Mockito.times(1)).putIntoDataSource(A_KEY, ACAI); Mockito.verify(state, Mockito.never()).removeFromDataSource(Mockito.anyString()); + + // After a commit, the original value should have changed + assertThat(state.getOriginalValue(A_KEY)).isEqualTo(ACAI); } /** @@ -442,11 +459,17 @@ void putTwice() { assertThat(state.readKeys()).isEmpty(); assertThat(state.modifiedKeys()).contains(B_KEY); + // The original value should not have changed + assertThat(state.getOriginalValue(B_KEY)).isEqualTo(BANANA); + // Commit should cause the value to be updated to the latest value state.commit(); Mockito.verify(state, Mockito.times(1)).putIntoDataSource(anyString(), anyString()); Mockito.verify(state, Mockito.times(1)).putIntoDataSource(B_KEY, BLUEBERRY); Mockito.verify(state, Mockito.never()).removeFromDataSource(anyString()); + + // After a commit, the original value should have changed to the latest value + assertThat(state.getOriginalValue(B_KEY)).isEqualTo(BLUEBERRY); } /** @@ -465,11 +488,17 @@ void putAfterRemove() { assertThat(state.readKeys()).isEmpty(); assertThat(state.modifiedKeys()).contains(B_KEY); + // The original value should not have changed + assertThat(state.getOriginalValue(B_KEY)).isEqualTo(BANANA); + // Commit should cause the value to be updated to the latest value state.commit(); Mockito.verify(state, Mockito.times(1)).putIntoDataSource(anyString(), anyString()); Mockito.verify(state, Mockito.times(1)).putIntoDataSource(B_KEY, BLACKBERRY); Mockito.verify(state, Mockito.never()).removeFromDataSource(anyString()); + + // After a commit, the original value should have changed + assertThat(state.getOriginalValue(B_KEY)).isEqualTo(BLACKBERRY); } } @@ -501,12 +530,18 @@ void removeUnknown() { assertThat(state.modifiedKeys()).hasSize(1); assertThat(state.modifiedKeys()).contains(C_KEY); + // The original value should not exist + assertThat(state.getOriginalValue(C_KEY)).isNull(); + // Commit should cause the value to be removed (even though it doesn't actually exist in // the backend) state.commit(); Mockito.verify(state, Mockito.never()).putIntoDataSource(anyString(), anyString()); Mockito.verify(state, Mockito.times(1)).removeFromDataSource(anyString()); Mockito.verify(state, Mockito.times(1)).removeFromDataSource(C_KEY); + + // After a commit, the original value should still not exist + assertThat(state.getOriginalValue(C_KEY)).isNull(); } /** @@ -531,11 +566,17 @@ void removeKnown() { assertThat(state.modifiedKeys()).hasSize(1); assertThat(state.modifiedKeys()).contains(A_KEY); + // The original value should not have changed + assertThat(state.getOriginalValue(A_KEY)).isEqualTo(APPLE); + // Commit should cause the value to be removed state.commit(); Mockito.verify(state, Mockito.never()).putIntoDataSource(anyString(), anyString()); Mockito.verify(state, Mockito.times(1)).removeFromDataSource(anyString()); Mockito.verify(state, Mockito.times(1)).removeFromDataSource(A_KEY); + + // After a commit, the original value should have been removed + assertThat(state.getOriginalValue(A_KEY)).isNull(); } /** @@ -561,11 +602,17 @@ void removeTwice() { assertThat(state.modifiedKeys()).contains(B_KEY); } + // The original value should not have changed + assertThat(state.getOriginalValue(B_KEY)).isEqualTo(BANANA); + // Commit should cause the value to be removed state.commit(); Mockito.verify(state, Mockito.never()).putIntoDataSource(anyString(), anyString()); Mockito.verify(state, Mockito.times(1)).removeFromDataSource(anyString()); Mockito.verify(state, Mockito.times(1)).removeFromDataSource(B_KEY); + + // After a commit, the original value should have been removed + assertThat(state.getOriginalValue(B_KEY)).isNull(); } /** @@ -711,11 +758,17 @@ void removeAfterPut() { assertThat(state.modifiedKeys()).hasSize(1); assertThat(state.modifiedKeys()).contains(A_KEY); + // The original value should not have changed + assertThat(state.getOriginalValue(A_KEY)).isEqualTo(APPLE); + // Commit should cause the value to be removed but not "put" state.commit(); Mockito.verify(state, Mockito.never()).putIntoDataSource(anyString(), anyString()); Mockito.verify(state, Mockito.times(1)).removeFromDataSource(anyString()); Mockito.verify(state, Mockito.times(1)).removeFromDataSource(A_KEY); + + // After a commit, the original value should have been removed + assertThat(state.getOriginalValue(A_KEY)).isNull(); } } @@ -837,9 +890,25 @@ void reset() { assertThat(state.modifiedKeys()).contains(E_KEY); assertThat(state.modifiedKeys()).contains(F_KEY); + // The original values should not have changed + assertThat(state.getOriginalValue(A_KEY)).isEqualTo(APPLE); + assertThat(state.getOriginalValue(B_KEY)).isEqualTo(BANANA); + assertThat(state.getOriginalValue(C_KEY)).isNull(); + assertThat(state.getOriginalValue(D_KEY)).isNull(); + assertThat(state.getOriginalValue(E_KEY)).isNull(); + assertThat(state.getOriginalValue(F_KEY)).isNull(); + state.reset(); assertThat(state.readKeys()).isEmpty(); assertThat(state.modifiedKeys()).isEmpty(); + + // After a reset, the original value should not have changed + assertThat(state.getOriginalValue(A_KEY)).isEqualTo(APPLE); + assertThat(state.getOriginalValue(B_KEY)).isEqualTo(BANANA); + assertThat(state.getOriginalValue(C_KEY)).isNull(); + assertThat(state.getOriginalValue(D_KEY)).isNull(); + assertThat(state.getOriginalValue(E_KEY)).isNull(); + assertThat(state.getOriginalValue(F_KEY)).isNull(); } /** diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableKVStateStack.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableKVStateStack.java index 7069d2ba06ef..d3faed4997cc 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableKVStateStack.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableKVStateStack.java @@ -83,6 +83,12 @@ public V getForModify(@NonNull final K key) { return getCurrent().getForModify(key); } + @Nullable + @Override + public V getOriginalValue(@NonNull K key) { + return getCurrent().getOriginalValue(key); + } + @Override public void put(@NonNull final K key, @NonNull final V value) { getCurrent().put(key, value); From a696579d8fb7c185a88fe0bf7c218890f0d181d2 Mon Sep 17 00:00:00 2001 From: Michael Heinrichs Date: Wed, 21 Jun 2023 08:53:50 +0200 Subject: [PATCH 65/70] Added WrappedWritableQueueState and WritableQueueStateStack (#7123) Signed-off-by: Michael Heinrichs Co-authored-by: Hendrik Ebbers --- .../spi/state/WrappedWritableQueueState.java | 61 ++++++++++++ .../node/app/state/WrappedWritableStates.java | 6 +- .../handle/stack/WritableQueueStateStack.java | 96 +++++++++++++++++++ .../stack/WritableSingletonStateStack.java | 2 +- .../handle/stack/WritableStatesStack.java | 2 +- 5 files changed, 164 insertions(+), 3 deletions(-) create mode 100644 hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/WrappedWritableQueueState.java create mode 100644 hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableQueueStateStack.java diff --git a/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/WrappedWritableQueueState.java b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/WrappedWritableQueueState.java new file mode 100644 index 000000000000..e040781a451c --- /dev/null +++ b/hedera-node/hedera-app-spi/src/main/java/com/hedera/node/app/spi/state/WrappedWritableQueueState.java @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.spi.state; + +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Iterator; + +/** + * An implementation of {@link WritableQueueState} that delegates to another {@link WritableQueueState} as + * though it were the backend data source. Modifications to this {@link WrappedWritableKVState} are + * buffered, along with reads, allowing code to rollback by simply throwing away the wrapper. + * + * @param The type of element in the queue. + */ +public class WrappedWritableQueueState extends WritableQueueStateBase { + + private final WritableQueueState delegate; + + /** + * Create a new instance that will treat the given {@code delegate} as the backend data source. + * Note that the lifecycle of the delegate MUST be as long as, or longer than, the + * lifecycle of this instance. If the delegate is reset or decommissioned while being used as a + * delegate, bugs will occur. + * + * @param delegate The delegate. Must not be null. + */ + public WrappedWritableQueueState(@NonNull final WritableQueueState delegate) { + super(delegate.getStateKey()); + this.delegate = delegate; + } + + @Override + protected void addToDataSource(@NonNull final E element) { + delegate.add(element); + } + + @Override + protected void removeFromDataSource() { + delegate.poll(); + } + + @NonNull + @Override + protected Iterator iterateOnDataSource() { + return delegate.iterator(); + } +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/WrappedWritableStates.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/WrappedWritableStates.java index e6cc2f08b3c3..0bcf57bfa290 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/WrappedWritableStates.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/state/WrappedWritableStates.java @@ -19,6 +19,7 @@ import static java.util.Objects.requireNonNull; import com.hedera.node.app.spi.state.WrappedWritableKVState; +import com.hedera.node.app.spi.state.WrappedWritableQueueState; import com.hedera.node.app.spi.state.WrappedWritableSingletonState; import com.hedera.node.app.spi.state.WritableKVState; import com.hedera.node.app.spi.state.WritableQueueState; @@ -38,6 +39,7 @@ public class WrappedWritableStates implements WritableStates { private final WritableStates delegate; private final Map> writableKVStateMap = new HashMap<>(); private final Map> writableSingletonStateMap = new HashMap<>(); + private final Map> writableQueueStateMap = new HashMap<>(); /** * Constructs a {@link WrappedWritableStates} that wraps the given {@link WritableStates}. @@ -76,10 +78,12 @@ public WritableSingletonState getSingleton(@NonNull String stateKey) { stateKey, s -> new WrappedWritableSingletonState<>(delegate.getSingleton(stateKey))); } + @SuppressWarnings("unchecked") @Override @NonNull public WritableQueueState getQueue(@NonNull String stateKey) { - throw new UnsupportedOperationException("getQueue is not supported"); + return (WritableQueueState) writableQueueStateMap.computeIfAbsent( + stateKey, s -> new WrappedWritableQueueState<>(delegate.getQueue(stateKey))); } /** diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableQueueStateStack.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableQueueStateStack.java new file mode 100644 index 000000000000..c059646d0498 --- /dev/null +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableQueueStateStack.java @@ -0,0 +1,96 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.workflows.handle.stack; + +import static java.util.Objects.requireNonNull; + +import com.hedera.node.app.spi.state.WritableQueueState; +import com.hedera.node.app.spi.state.WritableSingletonState; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Iterator; +import java.util.function.Predicate; + +/** + * An implementation of {@link WritableQueueState} that delegates to the current {@link WritableSingletonState} in a + * {@link com.hedera.node.app.spi.workflows.HandleContext.SavepointStack}. + * + *

A {@link com.hedera.node.app.spi.workflows.HandleContext.SavepointStack} consists of a stack of frames, each of + * which contains a set of modifications in regard to the state of the underlying frame. On the top of the stack is the + * most recent state. This class delegates to the current {@link WritableQueueState} on top of such a stack. + * + *

All changes made to the {@link WritableQueueStateStack} are applied to the frame on top of the stack. + * Consequently, all frames added later on top of the current frame will see the changes. If the frame is removed + * however, the changes are lost. + * + * @param The type of element held in the queue. + */ +public class WritableQueueStateStack implements WritableQueueState { + + private final WritableStatesStack writableStatesStack; + private final String stateKey; + + /** + * Constructs a {@link WritableQueueStateStack} that delegates to the current {@link WritableQueueState} in + * the given {@link WritableStatesStack} for the given state key. A {@link WritableStatesStack} is an implementation + * of {@link com.hedera.node.app.spi.state.WritableStates} that delegates to the most recent version in a + * {@link com.hedera.node.app.spi.workflows.HandleContext.SavepointStack} + * + * @param writableStatesStack the {@link WritableStatesStack} + * @param stateKey the state key + * @throws NullPointerException if any of the arguments is {@code null} + */ + public WritableQueueStateStack( + @NonNull final WritableStatesStack writableStatesStack, @NonNull final String stateKey) { + this.writableStatesStack = requireNonNull(writableStatesStack, "writableStatesStack must not be null"); + this.stateKey = requireNonNull(stateKey, "stateKey must not be null"); + } + + @NonNull + private WritableQueueState getCurrent() { + return writableStatesStack.getCurrent().getQueue(stateKey); + } + + @NonNull + @Override + public String getStateKey() { + return stateKey; + } + + @Nullable + @Override + public E peek() { + return getCurrent().peek(); + } + + @NonNull + @Override + public Iterator iterator() { + return getCurrent().iterator(); + } + + @Override + public void add(@NonNull E element) { + getCurrent().add(element); + } + + @Nullable + @Override + public E removeIf(@NonNull Predicate predicate) { + return getCurrent().removeIf(predicate); + } +} diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableSingletonStateStack.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableSingletonStateStack.java index ffea63f3c6f6..636c5adf1cec 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableSingletonStateStack.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableSingletonStateStack.java @@ -65,7 +65,7 @@ private WritableSingletonState getCurrent() { @Override @NonNull public String getStateKey() { - return getCurrent().getStateKey(); + return stateKey; } @Override diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableStatesStack.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableStatesStack.java index e662e6466800..fc1911f2a676 100644 --- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableStatesStack.java +++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/handle/stack/WritableStatesStack.java @@ -77,7 +77,7 @@ public WritableSingletonState getSingleton(@NonNull final String stateKey @NonNull @Override public WritableQueueState getQueue(@NonNull String stateKey) { - throw new UnsupportedOperationException("getQueue is not supported yet"); + return new WritableQueueStateStack<>(this, stateKey); } @Override From 229732c645dc349e94500cf7ccede8495dc9d577 Mon Sep 17 00:00:00 2001 From: Hendrik Ebbers Date: Wed, 21 Jun 2023 14:59:43 +0200 Subject: [PATCH 66/70] Documentation: same ordinal for config sources (#6933) Signed-off-by: Hendrik Ebbers --- platform-sdk/docs/base/configuration/configuration.md | 3 ++- .../main/java/com/swirlds/config/api/source/ConfigSource.java | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/platform-sdk/docs/base/configuration/configuration.md b/platform-sdk/docs/base/configuration/configuration.md index a1eedfa208f3..d7b8b1f864f1 100644 --- a/platform-sdk/docs/base/configuration/configuration.md +++ b/platform-sdk/docs/base/configuration/configuration.md @@ -381,7 +381,8 @@ When creating a new config source it often makes sense to overwrite the default method. The method returns an ordinal number that is used internally to sort all config sources. Here a config source with a higher ordinal number will overwrite properties of all config sources with a smaller ordinal number. The internal class `com.swirlds.config.impl.sources.ConfigSourceOrdinalConstants` provides constant ordinal numbers for all default -implementations of config sources and can be used as a reference for custom ordinals. +implementations of config sources and can be used as a reference for custom ordinals. If 2 instances have the same ordinal +number the api does not define what instance will have the higher priority. ### Adding custom converters to the configuration diff --git a/platform-sdk/swirlds-config-api/src/main/java/com/swirlds/config/api/source/ConfigSource.java b/platform-sdk/swirlds-config-api/src/main/java/com/swirlds/config/api/source/ConfigSource.java index e7e84f110868..729cb162acf1 100644 --- a/platform-sdk/swirlds-config-api/src/main/java/com/swirlds/config/api/source/ConfigSource.java +++ b/platform-sdk/swirlds-config-api/src/main/java/com/swirlds/config/api/source/ConfigSource.java @@ -57,7 +57,8 @@ public interface ConfigSource { /** * Returns the ordinal. The ordinal is used to define a priority order of all config sources while the config source * with the highest ordinal has the highest priority. A config source will overwrite values of properties that are - * already defined by a config source with a lower ordinal. + * already defined by a config source with a lower ordinal. If 2 instances have the same ordinal number the api does + * not define what instance will have the higher priority. * * @return the ordinal */ From 17cad833f62766b79764cc1f122fa859e7ceb403 Mon Sep 17 00:00:00 2001 From: Hendrik Ebbers Date: Wed, 21 Jun 2023 15:00:29 +0200 Subject: [PATCH 67/70] some small refactoring of the Uninterruptable class (#7147) Signed-off-by: Hendrik Ebbers --- .../threading/interrupt/Uninterruptable.java | 47 ++++++++----------- 1 file changed, 20 insertions(+), 27 deletions(-) diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/interrupt/Uninterruptable.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/interrupt/Uninterruptable.java index 9ac013a3d386..5f2d7db1f9e1 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/interrupt/Uninterruptable.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/threading/interrupt/Uninterruptable.java @@ -61,20 +61,10 @@ private Uninterruptable() {} */ public static void retryIfInterrupted(@NonNull final InterruptableRunnable action) { Objects.requireNonNull(action, "action"); - boolean finished = false; - boolean interrupted = false; - while (!finished) { - try { - action.run(); - finished = true; - } catch (final InterruptedException e) { - interrupted = true; - } - } - - if (interrupted) { - Thread.currentThread().interrupt(); - } + retryIfInterrupted(() -> { + action.run(); + return null; + }); } /** @@ -143,11 +133,12 @@ public static void abortIfInterrupted(@NonNull final InterruptableRunnable actio */ public static void abortAndLogIfInterrupted( @NonNull final InterruptableRunnable action, @NonNull final String errorMessage) { - Objects.requireNonNull(action, "action"); + Objects.requireNonNull(action, "action must not be null"); + Objects.requireNonNull(errorMessage, "errorMessage must not be null"); try { action.run(); } catch (final InterruptedException e) { - logger.error(EXCEPTION.getMarker(), errorMessage); + logger.error(EXCEPTION.getMarker(), errorMessage, e); Thread.currentThread().interrupt(); } } @@ -170,14 +161,13 @@ public static void abortAndLogIfInterrupted( @NonNull final CheckedConsumer consumer, @Nullable final T object, @NonNull final String errorMessage) { - - Objects.requireNonNull(consumer, "consumer"); - Objects.requireNonNull(errorMessage, "errorMessage"); + Objects.requireNonNull(consumer, "consumer must not be null"); + Objects.requireNonNull(errorMessage, "errorMessage must not be null"); try { consumer.accept(object); } catch (final InterruptedException e) { - logger.error(EXCEPTION.getMarker(), errorMessage); + logger.error(EXCEPTION.getMarker(), errorMessage, e); Thread.currentThread().interrupt(); } } @@ -199,13 +189,15 @@ public static void abortAndLogIfInterrupted( */ public static void abortAndThrowIfInterrupted( @NonNull final InterruptableRunnable action, @NonNull final String errorMessage) { - Objects.requireNonNull(action, "action"); + Objects.requireNonNull(action, "action must not be null"); + Objects.requireNonNull(errorMessage, "errorMessage must not be null"); + try { action.run(); } catch (final InterruptedException e) { - logger.error(EXCEPTION.getMarker(), errorMessage); + logger.error(EXCEPTION.getMarker(), errorMessage, e); Thread.currentThread().interrupt(); - throw new IllegalStateException(errorMessage); + throw new IllegalStateException(errorMessage, e); } } @@ -228,15 +220,15 @@ public static void abortAndThrowIfInterrupted( @Nullable final T object, @NonNull final String errorMessage) { - Objects.requireNonNull(consumer, "consumer"); - Objects.requireNonNull(errorMessage, "errorMessage"); + Objects.requireNonNull(consumer, "consumer must not be null"); + Objects.requireNonNull(errorMessage, "errorMessage must not be null"); try { consumer.accept(object); } catch (final InterruptedException e) { - logger.error(EXCEPTION.getMarker(), errorMessage); + logger.error(EXCEPTION.getMarker(), errorMessage, e); Thread.currentThread().interrupt(); - throw new IllegalStateException(errorMessage); + throw new IllegalStateException(errorMessage, e); } } @@ -246,6 +238,7 @@ public static void abortAndThrowIfInterrupted( * @param duration the amount of time to sleep */ public static void tryToSleep(@NonNull final Duration duration) { + Objects.requireNonNull(duration, "duration must not be null"); abortIfInterrupted(() -> MILLISECONDS.sleep(duration.toMillis())); } } From 3749373ed968da065c82a04d59420198957a15aa Mon Sep 17 00:00:00 2001 From: Lev Povolotsky <16233475+povolev15@users.noreply.github.com> Date: Wed, 21 Jun 2023 09:49:32 -0400 Subject: [PATCH 68/70] add topic bi-directional translator (#7192) Signed-off-by: Lev Povolotsky --- .../ConsensusServiceStateTranslator.java | 124 +++++++++++ .../impl/test/ReadableTopicStoreImplTest.java | 4 +- .../impl/test/WritableTopicStoreTest.java | 4 +- .../ConsensusServiceStateTranslatorTest.java | 203 ++++++++++++++++++ ...est.java => ConsensusCreateTopicTest.java} | 2 +- ...est.java => ConsensusDeleteTopicTest.java} | 2 +- ...st.java => ConsensusGetTopicInfoTest.java} | 2 +- ... => ConsensusSubmitMessageParityTest.java} | 2 +- ...t.java => ConsensusSubmitMessageTest.java} | 2 +- ...erTestBase.java => ConsensusTestBase.java} | 30 ++- ...est.java => ConsensusUpdateTopicTest.java} | 2 +- 11 files changed, 365 insertions(+), 12 deletions(-) create mode 100644 hedera-node/hedera-consensus-service-impl/src/main/java/com/hedera/node/app/service/consensus/impl/codecs/ConsensusServiceStateTranslator.java create mode 100644 hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/codecs/ConsensusServiceStateTranslatorTest.java rename hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/{ConsensusCreateTopicHandlerTest.java => ConsensusCreateTopicTest.java} (99%) rename hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/{ConsensusDeleteTopicHandlerTest.java => ConsensusDeleteTopicTest.java} (99%) rename hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/{ConsensusGetTopicInfoHandlerTest.java => ConsensusGetTopicInfoTest.java} (99%) rename hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/{ConsensusSubmitMessageHandlerParityTest.java => ConsensusSubmitMessageParityTest.java} (98%) rename hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/{ConsensusSubmitMessageHandlerTest.java => ConsensusSubmitMessageTest.java} (99%) rename hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/{ConsensusHandlerTestBase.java => ConsensusTestBase.java} (89%) rename hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/{ConsensusUpdateTopicHandlerTest.java => ConsensusUpdateTopicTest.java} (99%) diff --git a/hedera-node/hedera-consensus-service-impl/src/main/java/com/hedera/node/app/service/consensus/impl/codecs/ConsensusServiceStateTranslator.java b/hedera-node/hedera-consensus-service-impl/src/main/java/com/hedera/node/app/service/consensus/impl/codecs/ConsensusServiceStateTranslator.java new file mode 100644 index 000000000000..d7d816154836 --- /dev/null +++ b/hedera-node/hedera-consensus-service-impl/src/main/java/com/hedera/node/app/service/consensus/impl/codecs/ConsensusServiceStateTranslator.java @@ -0,0 +1,124 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.consensus.impl.codecs; + +import static java.util.Objects.requireNonNull; + +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.TopicID; +import com.hedera.hapi.node.state.consensus.Topic; +import com.hedera.node.app.service.consensus.ReadableTopicStore; +import com.hedera.node.app.service.mono.legacy.core.jproto.JKey; +import com.hedera.node.app.service.mono.pbj.PbjConverter; +import com.hedera.node.app.service.mono.utils.EntityNum; +import com.hedera.node.app.spi.state.WritableKVState; +import com.hedera.pbj.runtime.io.buffer.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.function.BiConsumer; + +/** + * Provides a static method to migrate the state of the consensus service from the merkle state to the pbj state and vise versa. + */ +public class ConsensusServiceStateTranslator { + + @NonNull + /** + * Migrates the state of the consensus service from the merkle state to the pbj state. + */ + public static void migrateFromMerkleToPbj( + com.swirlds.merkle.map.MerkleMap< + com.hedera.node.app.service.mono.utils.EntityNum, + com.hedera.node.app.service.mono.state.merkle.MerkleTopic> + monoTopics, + WritableKVState appTopics) { + com.hedera.node.app.service.mono.state.adapters.MerkleMapLike.from(monoTopics) + .forEachNode(new PutConvertedTopic(appTopics)); + } + + @NonNull + /** + * Migrates the state of the consensus service from the pbj state to the merkle state. + */ + public static Topic stateToPbj(@NonNull com.hedera.node.app.service.mono.state.merkle.MerkleTopic monoTopic) { + requireNonNull(monoTopic); + final var topicBuilder = new Topic.Builder(); + topicBuilder.memo(monoTopic.getMemo()); + if (monoTopic.hasAdminKey()) topicBuilder.adminKey(PbjConverter.asPbjKey(monoTopic.getAdminKey())); + if (monoTopic.hasSubmitKey()) topicBuilder.submitKey(PbjConverter.asPbjKey(monoTopic.getSubmitKey())); + topicBuilder.autoRenewPeriod(monoTopic.getAutoRenewDurationSeconds()); + topicBuilder.autoRenewAccountNumber(monoTopic.getAutoRenewAccountId().num()); + topicBuilder.expiry(monoTopic.getExpirationTimestamp().getSeconds()); + topicBuilder.runningHash(Bytes.wrap(monoTopic.getRunningHash())); + topicBuilder.sequenceNumber(monoTopic.getSequenceNumber()); + topicBuilder.deleted(monoTopic.isDeleted()); + topicBuilder.topicNumber(monoTopic.getAutoRenewAccountId().num()); + + return topicBuilder.build(); + } + + @NonNull + /** + * Migrates the state of the consensus service from the pbj state to the merkle state. + */ + public static com.hedera.node.app.service.mono.state.merkle.MerkleTopic pbjToState( + @NonNull TopicID topicID, @NonNull ReadableTopicStore readableTopicStore) { + requireNonNull(topicID); + requireNonNull(readableTopicStore); + final var optionalFile = readableTopicStore.getTopicLeaf(topicID); + return pbjToState(optionalFile.orElseThrow(() -> new IllegalArgumentException("Topic not found"))); + } + + @NonNull + /** + * Migrates the state of the consensus service from the pbj state to the merkle state. + */ + public static com.hedera.node.app.service.mono.state.merkle.MerkleTopic pbjToState(@NonNull Topic topic) { + requireNonNull(topic); + final com.hedera.node.app.service.mono.state.merkle.MerkleTopic monoTopic = + new com.hedera.node.app.service.mono.state.merkle.MerkleTopic( + topic.memo(), + (JKey) com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbjKey( + topic.adminKeyOrElse(Key.DEFAULT)) + .orElse(null), + (JKey) com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbjKey( + topic.submitKeyOrElse(Key.DEFAULT)) + .orElse(null), + topic.autoRenewPeriod(), + new com.hedera.node.app.service.mono.state.submerkle.EntityId( + 0, 0, topic.autoRenewAccountNumber()), + new com.hedera.node.app.service.mono.state.submerkle.RichInstant(topic.expiry(), 0)); + monoTopic.setRunningHash(PbjConverter.asBytes(topic.runningHash())); + monoTopic.setSequenceNumber(topic.sequenceNumber()); + monoTopic.setDeleted(topic.deleted()); + return monoTopic; + } + + private static class PutConvertedTopic + implements BiConsumer { + private final WritableKVState appTopics; + + public PutConvertedTopic(WritableKVState appTopics) { + this.appTopics = appTopics; + } + + @Override + public void accept(EntityNum entityNum, com.hedera.node.app.service.mono.state.merkle.MerkleTopic merkleTopic) { + final var pbjTopic = stateToPbj(merkleTopic); + appTopics.put(new TopicID(0, 0, pbjTopic.topicNumber()), pbjTopic); + } + } +} diff --git a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/ReadableTopicStoreImplTest.java b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/ReadableTopicStoreImplTest.java index d17c97fcc6e6..5d72c7fbef21 100644 --- a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/ReadableTopicStoreImplTest.java +++ b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/ReadableTopicStoreImplTest.java @@ -29,13 +29,13 @@ import com.hedera.hapi.node.state.consensus.Topic; import com.hedera.node.app.service.consensus.ReadableTopicStore; import com.hedera.node.app.service.consensus.impl.ReadableTopicStoreImpl; -import com.hedera.node.app.service.consensus.impl.test.handlers.ConsensusHandlerTestBase; +import com.hedera.node.app.service.consensus.impl.test.handlers.ConsensusTestBase; import com.hedera.node.app.service.mono.utils.EntityNum; import com.hedera.node.app.spi.fixtures.state.MapReadableKVState; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -class ReadableTopicStoreImplTest extends ConsensusHandlerTestBase { +class ReadableTopicStoreImplTest extends ConsensusTestBase { private ReadableTopicStore subject; @BeforeEach diff --git a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/WritableTopicStoreTest.java b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/WritableTopicStoreTest.java index 5de2b1a49e57..4cfbf1f52405 100644 --- a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/WritableTopicStoreTest.java +++ b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/WritableTopicStoreTest.java @@ -24,13 +24,13 @@ import com.hedera.hapi.node.state.consensus.Topic; import com.hedera.node.app.service.consensus.impl.WritableTopicStore; -import com.hedera.node.app.service.consensus.impl.test.handlers.ConsensusHandlerTestBase; +import com.hedera.node.app.service.consensus.impl.test.handlers.ConsensusTestBase; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class WritableTopicStoreTest extends ConsensusHandlerTestBase { +class WritableTopicStoreTest extends ConsensusTestBase { private Topic topic; @Test diff --git a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/codecs/ConsensusServiceStateTranslatorTest.java b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/codecs/ConsensusServiceStateTranslatorTest.java new file mode 100644 index 000000000000..916e4ae22c6e --- /dev/null +++ b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/codecs/ConsensusServiceStateTranslatorTest.java @@ -0,0 +1,203 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hedera.node.app.service.consensus.impl.test.codecs; + +import static com.hedera.node.app.service.consensus.impl.ConsensusServiceImpl.TOPICS_KEY; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import com.hedera.hapi.node.base.Key; +import com.hedera.hapi.node.base.TopicID; +import com.hedera.hapi.node.state.consensus.Topic; +import com.hedera.node.app.service.consensus.impl.codecs.ConsensusServiceStateTranslator; +import com.hedera.node.app.service.consensus.impl.test.handlers.ConsensusTestBase; +import com.hedera.node.app.service.mono.state.merkle.MerkleTopic; +import com.hedera.node.app.spi.fixtures.state.MapWritableKVState; +import com.hedera.node.app.spi.state.WritableKVState; +import com.swirlds.merkle.map.MerkleMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class ConsensusServiceStateTranslatorTest extends ConsensusTestBase { + + @BeforeEach + void setUp() {} + + @Test + void createMerkleTopicFromTopic() { + final var existingTopic = readableStore.getTopic(topicId); + assertFalse(existingTopic.deleted()); + + final com.hedera.node.app.service.mono.state.merkle.MerkleTopic convertedTopic = + ConsensusServiceStateTranslator.pbjToState(topic); + + assertMatch(convertedTopic, getExpectedMonoTopic()); + } + + @Test + void createMerkleTopicFromTopicWithEmptyKeys() { + final var existingTopic = readableStore.getTopic(topicId); + assertFalse(existingTopic.deleted()); + + final com.hedera.node.app.service.mono.state.merkle.MerkleTopic convertedTopic = + ConsensusServiceStateTranslator.pbjToState(topicNoKeys); + + assertMatch(convertedTopic, getExpectedMonoTopicNoKeys()); + } + + @Test + void createMerkleTopicFromReadableTopicStore() { + final com.hedera.node.app.service.mono.state.merkle.MerkleTopic convertedTopic = + ConsensusServiceStateTranslator.pbjToState(topicId, readableStore); + + assertMatch(convertedTopic, getExpectedMonoTopic()); + } + + @Test + void createTopicFromMerkleTopic() { + com.hedera.node.app.service.mono.state.merkle.MerkleTopic merkleTopic = getMerkleTopic( + (com.hedera.node.app.service.mono.legacy.core.jproto.JKey) + com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbjKey(topic.adminKeyOrElse(Key.DEFAULT)) + .orElse(null), + (com.hedera.node.app.service.mono.legacy.core.jproto.JKey) + com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbjKey(topic.submitKeyOrElse(Key.DEFAULT)) + .orElse(null)); + + final Topic convertedTopic = ConsensusServiceStateTranslator.stateToPbj(merkleTopic); + + assertEquals(createTopic(), convertedTopic); + } + + @Test + void createTopicFromMerkleTopicEmptyKeys() { + com.hedera.node.app.service.mono.state.merkle.MerkleTopic merkleTopic = getMerkleTopic(null, null); + + final Topic convertedTopic = ConsensusServiceStateTranslator.stateToPbj(merkleTopic); + + assertEquals(createTopicEmptyKeys(), convertedTopic); + } + + @Test + void createFileFromFileIDAndHederaFs() { + com.swirlds.merkle.map.MerkleMap< + com.hedera.node.app.service.mono.utils.EntityNum, + com.hedera.node.app.service.mono.state.merkle.MerkleTopic> + monoTopics = new MerkleMap<>(); + WritableKVState appTopics = new MapWritableKVState<>(TOPICS_KEY) { + private final List keys = new ArrayList<>(); + + @Override + protected void putIntoDataSource(@NonNull TopicID key, @NonNull Topic value) { + keys.add(key); + super.putIntoDataSource(key, value); + } + + @Override + protected void removeFromDataSource(@NonNull TopicID key) { + keys.add(key); + super.removeFromDataSource(key); + } + }; + + com.hedera.node.app.service.mono.state.merkle.MerkleTopic merkleTopic = getMerkleTopic( + (com.hedera.node.app.service.mono.legacy.core.jproto.JKey) + com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbjKey(topic.adminKeyOrElse(Key.DEFAULT)) + .orElse(null), + (com.hedera.node.app.service.mono.legacy.core.jproto.JKey) + com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbjKey(topic.submitKeyOrElse(Key.DEFAULT)) + .orElse(null)); + monoTopics.put(topicEntityNum, merkleTopic); + refreshStoresWithCurrentTopicOnlyInReadable(); + ConsensusServiceStateTranslator.migrateFromMerkleToPbj(monoTopics, appTopics); + + final Topic convertedTopic = appTopics.get(topicId); + + assertEquals(createTopic(), convertedTopic); + } + + private void assertMatch(MerkleTopic expected, MerkleTopic actual) { + assertEquals(expected.getMemo(), actual.getMemo()); + assertEquals(expected.getAdminKey(), actual.getAdminKey()); + assertEquals(expected.getSubmitKey(), actual.getSubmitKey()); + assertEquals(expected.getExpirationTimestamp(), actual.getExpirationTimestamp()); + assertEquals(expected.getAutoRenewDurationSeconds(), actual.getAutoRenewDurationSeconds()); + assertEquals(expected.getAutoRenewAccountId(), actual.getAutoRenewAccountId()); + assertEquals(expected.getAutoRenewDurationSeconds(), actual.getAutoRenewDurationSeconds()); + assertEquals(expected.isDeleted(), actual.isDeleted()); + assertEquals(expected.getSequenceNumber(), actual.getSequenceNumber()); + } + + private com.hedera.node.app.service.mono.state.merkle.MerkleTopic getExpectedMonoTopic() { + com.hedera.node.app.service.mono.state.merkle.MerkleTopic merkleTopic = + new com.hedera.node.app.service.mono.state.merkle.MerkleTopic(); + merkleTopic.setMemo(topic.memo()); + merkleTopic.setExpirationTimestamp( + new com.hedera.node.app.service.mono.state.submerkle.RichInstant(topic.expiry(), 0)); + merkleTopic.setAdminKey((com.hedera.node.app.service.mono.legacy.core.jproto.JKey) + com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbjKey(topic.adminKeyOrElse(Key.DEFAULT)) + .orElse(null)); + merkleTopic.setSubmitKey((com.hedera.node.app.service.mono.legacy.core.jproto.JKey) + com.hedera.node.app.service.mono.pbj.PbjConverter.fromPbjKey(topic.submitKeyOrElse(Key.DEFAULT)) + .orElse(null)); + merkleTopic.setAutoRenewDurationSeconds(topic.autoRenewPeriod()); + merkleTopic.setDeleted(topic.deleted()); + merkleTopic.setSequenceNumber(topic.sequenceNumber()); + merkleTopic.setAutoRenewAccountId( + new com.hedera.node.app.service.mono.state.submerkle.EntityId(0, 0, topic.autoRenewAccountNumber())); + return merkleTopic; + } + + private com.hedera.node.app.service.mono.state.merkle.MerkleTopic getExpectedMonoTopicNoKeys() { + com.hedera.node.app.service.mono.state.merkle.MerkleTopic merkleTopic = + new com.hedera.node.app.service.mono.state.merkle.MerkleTopic(); + merkleTopic.setMemo(topic.memo()); + merkleTopic.setExpirationTimestamp( + new com.hedera.node.app.service.mono.state.submerkle.RichInstant(topic.expiry(), 0)); + merkleTopic.setAutoRenewDurationSeconds(topic.autoRenewPeriod()); + merkleTopic.setDeleted(topic.deleted()); + merkleTopic.setSequenceNumber(topic.sequenceNumber()); + merkleTopic.setAutoRenewAccountId( + new com.hedera.node.app.service.mono.state.submerkle.EntityId(0, 0, topic.autoRenewAccountNumber())); + return merkleTopic; + } + + private com.hedera.node.app.service.mono.state.merkle.MerkleTopic getMerkleTopic( + com.hedera.node.app.service.mono.legacy.core.jproto.JKey adminKey, + com.hedera.node.app.service.mono.legacy.core.jproto.JKey submitKey) { + com.hedera.node.app.service.mono.state.merkle.MerkleTopic merkleTopic = + new com.hedera.node.app.service.mono.state.merkle.MerkleTopic(); + merkleTopic.setMemo(topic.memo()); + merkleTopic.setExpirationTimestamp( + new com.hedera.node.app.service.mono.state.submerkle.RichInstant(topic.expiry(), 0)); + merkleTopic.setAdminKey(adminKey); + merkleTopic.setSubmitKey(submitKey); + merkleTopic.setAutoRenewDurationSeconds(topic.autoRenewPeriod()); + merkleTopic.setDeleted(true); + merkleTopic.setSequenceNumber(topic.sequenceNumber()); + merkleTopic.setAutoRenewAccountId( + new com.hedera.node.app.service.mono.state.submerkle.EntityId(0, 0, autoRenewId.accountNum())); + merkleTopic.setRunningHash(runningHash); + + return merkleTopic; + } +} diff --git a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusCreateTopicHandlerTest.java b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusCreateTopicTest.java similarity index 99% rename from hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusCreateTopicHandlerTest.java rename to hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusCreateTopicTest.java index 1440db787bb4..d5f478267e38 100644 --- a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusCreateTopicHandlerTest.java +++ b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusCreateTopicTest.java @@ -65,7 +65,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class ConsensusCreateTopicHandlerTest extends ConsensusHandlerTestBase { +class ConsensusCreateTopicTest extends ConsensusTestBase { @Mock private ReadableAccountStore accountStore; diff --git a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusDeleteTopicHandlerTest.java b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusDeleteTopicTest.java similarity index 99% rename from hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusDeleteTopicHandlerTest.java rename to hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusDeleteTopicTest.java index 8ed8c9bc0df8..ae6aa87ff220 100644 --- a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusDeleteTopicHandlerTest.java +++ b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusDeleteTopicTest.java @@ -56,7 +56,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class ConsensusDeleteTopicHandlerTest extends ConsensusHandlerTestBase { +class ConsensusDeleteTopicTest extends ConsensusTestBase { @Mock private ReadableAccountStore accountStore; diff --git a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusGetTopicInfoHandlerTest.java b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusGetTopicInfoTest.java similarity index 99% rename from hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusGetTopicInfoHandlerTest.java rename to hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusGetTopicInfoTest.java index 912e83331158..99ddf6308971 100644 --- a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusGetTopicInfoHandlerTest.java +++ b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusGetTopicInfoTest.java @@ -60,7 +60,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class ConsensusGetTopicInfoHandlerTest extends ConsensusHandlerTestBase { +class ConsensusGetTopicInfoTest extends ConsensusTestBase { @Mock private QueryContext context; diff --git a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusSubmitMessageHandlerParityTest.java b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusSubmitMessageParityTest.java similarity index 98% rename from hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusSubmitMessageHandlerParityTest.java rename to hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusSubmitMessageParityTest.java index 0ceaf30eef7c..0c97832ceb26 100644 --- a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusSubmitMessageHandlerParityTest.java +++ b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusSubmitMessageParityTest.java @@ -40,7 +40,7 @@ import org.junit.jupiter.api.Test; import org.mockito.Mock; -class ConsensusSubmitMessageHandlerParityTest extends ConsensusHandlerTestBase { +class ConsensusSubmitMessageParityTest extends ConsensusTestBase { @Mock private ReadableAccountStore accountStore; diff --git a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusSubmitMessageHandlerTest.java b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusSubmitMessageTest.java similarity index 99% rename from hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusSubmitMessageHandlerTest.java rename to hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusSubmitMessageTest.java index 020770708c56..72d802b1cbb5 100644 --- a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusSubmitMessageHandlerTest.java +++ b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusSubmitMessageTest.java @@ -68,7 +68,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class ConsensusSubmitMessageHandlerTest extends ConsensusHandlerTestBase { +class ConsensusSubmitMessageTest extends ConsensusTestBase { @Mock private ReadableAccountStore accountStore; diff --git a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusHandlerTestBase.java b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusTestBase.java similarity index 89% rename from hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusHandlerTestBase.java rename to hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusTestBase.java index bd8dbd30f6cb..0bac0e157520 100644 --- a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusHandlerTestBase.java +++ b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusTestBase.java @@ -48,14 +48,14 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -public class ConsensusHandlerTestBase { +public class ConsensusTestBase { protected final Key key = A_COMPLEX_KEY; protected final Key anotherKey = B_COMPLEX_KEY; protected final String payerIdLiteral = "0.0.3"; protected final AccountID payerId = protoToPbj(asAccount(payerIdLiteral), AccountID.class); public static final AccountID anotherPayer = AccountID.newBuilder().accountNum(13257).build(); - protected final AccountID autoRenewId = AccountID.newBuilder().accountNum(4).build(); + protected final AccountID autoRenewId = AccountID.newBuilder().accountNum(1).build(); protected final byte[] runningHash = "runningHash".getBytes(); protected final Key adminKey = key; @@ -77,6 +77,8 @@ public class ConsensusHandlerTestBase { protected Topic topic; + protected Topic topicNoKeys; + @Mock protected ReadableStates readableStates; @@ -171,6 +173,17 @@ protected void givenValidTopic( memo, withAdminKey ? key : null, withSubmitKey ? key : null); + topicNoKeys = new Topic( + topicId.topicNum(), + sequenceNumber, + expirationTime, + autoRenewSecs, + autoRenewAccountNumber, + deleted, + Bytes.wrap(runningHash), + memo, + null, + null); } protected Topic createTopic() { @@ -187,4 +200,17 @@ protected Topic createTopic() { .runningHash(Bytes.wrap(runningHash)) .build(); } + + protected Topic createTopicEmptyKeys() { + return new Topic.Builder() + .topicNumber(topicId.topicNum()) + .autoRenewPeriod(autoRenewSecs) + .autoRenewAccountNumber(autoRenewId.accountNum()) + .expiry(expirationTime) + .sequenceNumber(sequenceNumber) + .memo(memo) + .deleted(true) + .runningHash(Bytes.wrap(runningHash)) + .build(); + } } diff --git a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusUpdateTopicHandlerTest.java b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusUpdateTopicTest.java similarity index 99% rename from hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusUpdateTopicHandlerTest.java rename to hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusUpdateTopicTest.java index 879e95b077e1..1c8bb4d0512c 100644 --- a/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusUpdateTopicHandlerTest.java +++ b/hedera-node/hedera-consensus-service-impl/src/test/java/com/hedera/node/app/service/consensus/impl/test/handlers/ConsensusUpdateTopicTest.java @@ -66,7 +66,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class ConsensusUpdateTopicHandlerTest extends ConsensusHandlerTestBase { +class ConsensusUpdateTopicTest extends ConsensusTestBase { private static final long maxLifetime = 3_000_000L; private final ConsensusUpdateTopicTransactionBody.Builder OP_BUILDER = From 8d74681d30af7522335a03f4771a5b87b2f31a44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20Brandst=C3=A4tter?= Date: Wed, 21 Jun 2023 16:11:14 +0200 Subject: [PATCH 69/70] #7172 Move logging for false properties from settings to config (#7174) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Timo Brandstätter Co-authored-by: Hendrik Ebbers --- .../swirlds/common/config/BasicConfig.java | 1 + .../java/com/swirlds/platform/Browser.java | 4 +- .../swirlds/platform/SettingConstants.java | 144 ------------------ .../java/com/swirlds/platform/Settings.java | 46 ++---- .../config/{ => internal}/ConfigMappings.java | 6 +- .../config/internal/PlatformConfigUtils.java | 69 +++++++++ .../platform/config/ConfigMappingsTest.java | 1 + 7 files changed, 93 insertions(+), 178 deletions(-) rename platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/{ => internal}/ConfigMappings.java (96%) create mode 100644 platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/PlatformConfigUtils.java diff --git a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java index 890a41e3e9b2..bcbcca272eca 100644 --- a/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java +++ b/platform-sdk/swirlds-common/src/main/java/com/swirlds/common/config/BasicConfig.java @@ -242,4 +242,5 @@ public record BasicConfig( @ConfigProperty(value = "maxOutgoingSyncs", defaultValue = "2") int maxOutgoingSyncs, @ConfigProperty(value = "logPath", defaultValue = "log4j2.xml") Path logPath, @ConfigProperty(value = "hangingThreadDuration", defaultValue = "60s") Duration hangingThreadDuration, + @ConfigProperty(defaultValue = "data/saved") String emergencyRecoveryFileLoadDir, @ConfigProperty(value = "genesisFreezeTime", defaultValue = "0") long genesisFreezeTime) {} diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java index 6457faece36c..418ba701aee1 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Browser.java @@ -78,8 +78,9 @@ import com.swirlds.logging.payloads.NodeAddressMismatchPayload; import com.swirlds.logging.payloads.NodeStartPayload; import com.swirlds.platform.config.AddressBookConfig; -import com.swirlds.platform.config.ConfigMappings; import com.swirlds.platform.config.ThreadConfig; +import com.swirlds.platform.config.internal.ConfigMappings; +import com.swirlds.platform.config.internal.PlatformConfigUtils; import com.swirlds.platform.config.legacy.ConfigPropertiesSource; import com.swirlds.platform.config.legacy.LegacyConfigProperties; import com.swirlds.platform.config.legacy.LegacyConfigPropertiesLoader; @@ -246,6 +247,7 @@ private Browser(@NonNull final Set localNodesToStart) throws IOException } this.configuration = configurationBuilder.build(); + PlatformConfigUtils.logNotKnownConfigProperties(configuration); // Set the configuration on all SwirldMain instances. appMains.values().forEach(swirldMain -> swirldMain.setConfiguration(configuration)); diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java index ce2998eea511..6ee8c5975108 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/SettingConstants.java @@ -16,8 +16,6 @@ package com.swirlds.platform; -import java.util.Set; - /** * @deprecated will be replaced by the {@link com.swirlds.config.api.Configuration} API in near future. If you need * to use this class please try to do as less static access as possible. @@ -70,147 +68,5 @@ public final class SettingConstants { static final int JVM_PAUSE_REPORT_MS_DEFAULT_VALUE = 1000; static final boolean GOSSIP_WITH_DIFFERENT_VERSIONS_DEFAULT_VALUE = false; - static final Set REMOVED_SETTINGS = Set.of( - "reconnect.active", - "reconnect.reconnectWindowSeconds", - "reconnect.fallenBehindThreshold", - "reconnect.asyncStreamTimeoutMilliseconds", - "reconnect.asyncOutputStreamFlushMilliseconds", - "reconnect.asyncStreamBufferSize", - "reconnect.asyncStreams", - "reconnect.maxAckDelayMilliseconds", - "reconnect.maximumReconnectFailuresBeforeShutdown", - "reconnect.minimumTimeBetweenReconnects", - "chatter.useChatter", - "chatter.attemptedChatterEventPerSecond", - "chatter.chatteringCreationThreshold", - "chatter.chatterIntakeThrottle", - "chatter.otherEventDelay", - "chatter.selfEventQueueCapacity", - "chatter.otherEventQueueCapacity", - "chatter.descriptorQueueCapacity", - "chatter.processingTimeInterval", - "chatter.heartbeatInterval", - "chatter.futureGenerationLimit", - "chatter.criticalQuorumSoftening", - "chatter.sleepAfterFailedNegotiation", - "fcHashMap.maximumGCQueueSize", - "fcHashMap.gCQueueThresholdPeriod", - "fcHashMap.archiveEnabled", - "fcHashMap.rebuildSplitFactor", - "fcHashMap.rebuildThreadCount", - "jasperDb.maxNumOfKeys", - "jasperDb.hashesRamToDiskThreshold", - "jasperDb.mediumMergeCutoffMb", - "jasperDb.smallMergeCutoffMb", - "jasperDb.mergePeriodUnit", - "jasperDb.maxNumberOfFilesInMerge", - "jasperDb.minNumberOfFilesInMerge", - "jasperDb.mergeActivatePeriod", - "jasperDb.mediumMergePeriod", - "jasperDb.fullMergePeriod", - "jasperDb.maxDataFileBytes", - "jasperDb.moveListChunkSize", - "jasperDb.maxRamUsedForMergingGb", - "jasperDb.iteratorInputBufferBytes", - "jasperDb.writerOutputBufferBytes", - "jasperDb.reconnectKeyLeakMitigationEnabled", - "jasperDb.keySetBloomFilterHashCount", - "jasperDb.keySetBloomFilterSizeInBytes", - "jasperDb.keySetHalfDiskHashMapSize", - "jasperDb.keySetHalfDiskHashMapBuffer", - "jasperDb.indexRebuildingEnforced", - "jasperDb.leafRecordCacheSize", - "virtualMap.percentHashThreads", - "virtualMap.numHashThreads", - "virtualMap.percentCleanerThreads", - "virtualMap.numCleanerThreads", - "virtualMap.maximumVirtualMapSize", - "virtualMap.virtualMapWarningThreshold", - "virtualMap.virtualMapWarningInterval", - "virtualMap.flushInterval", - "virtualMap.copyFlushThreshold", - "virtualMap.familyThrottleThreshold", - "virtualMap.preferredFlushQueueSize", - "virtualMap.flushThrottleStepSize", - "virtualMap.maximumFlushThrottlePeriod", - "state.savedStateDirectory", - "state.mainClassNameOverride", - "state.cleanSavedStateDirectory", - "state.stateSavingQueueSize", - "state.saveStatePeriod", - "state.saveReconnectStateToDisk", - "state.signedStateDisk", - "state.dumpStateOnAnyISS", - "state.dumpStateOnFatal", - "state.haltOnAnyIss", - "state.automatedSelfIssRecovery", - "state.haltOnCatastrophicIss", - "state.secondsBetweenISSDumps", - "state.secondsBetweenIssLogs", - "state.stateDeletionErrorLogFrequencySeconds", - "state.enableHashStreamLogging", - "state.debugHashDepth", - "state.maxAgeOfFutureStateSignatures", - "state.roundsToKeepForSigning", - "state.roundsToKeepAfterSigning", - "state.suspiciousSignedStateAge", - "state.stateHistoryEnabled", - "state.debugStackTracesEnabled", - "state.requireStateLoad", - "state.emergencyStateFileName", - "state.checkSignedStateFromDisk", - "signedStateFreq", - "maxEventQueueForCons", - "eventIntakeQueueThrottleSize", - "eventIntakeQueueSize", - "randomEventProbability", - "staleEventPreventionThreshold", - "rescueChildlessInverseProbability", - "eventStreamQueueCapacity", - "eventsLogPeriod", - "eventsLogDir", - "enableEventStreaming", - "event.maxEventQueueForCons", - "event.eventIntakeQueueThrottleSize", - "event.eventIntakeQueueSize", - "event.randomEventProbability", - "event.staleEventPreventionThreshold", - "event.rescueChildlessInverseProbability", - "event.eventStreamQueueCapacity", - "event.eventsLogPeriod", - "event.eventsLogDir", - "event.enableEventStreaming", - "halfLife", - "csvWriteFrequency", - "csvOutputFolder", - "csvFileName", - "csvAppend", - "prometheusEndpointEnabled", - "prometheusEndpointPortNumber", - "prometheusEndpointMaxBacklogAllowed", - "disableMetricsOutput", - "metrics.halfLife", - "metrics.csvWriteFrequency", - "metrics.csvOutputFolder", - "metrics.csvFileName", - "metrics.csvAppend", - "metrics.disableMetricsOutput", - "prometheus.endpointEnabled", - "prometheus.endpointPortNumber", - "prometheus.endpointMaxBacklogAllowed", - "configPath", - "settingsPath", - "settingsUsedDir", - "keysDirPath", - "appsDirPath", - "logPath", - "paths.configPath", - "paths.settingsPath", - "paths.settingsUsedDir", - "paths.keysDirPath", - "paths.appsDirPath", - "paths.logPath"); - private SettingConstants() {} } diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java index 48438e7bf1cd..ace43b0aa34f 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/Settings.java @@ -39,7 +39,6 @@ import static com.swirlds.platform.SettingConstants.MAX_TRANSACTION_COUNT_PER_EVENT_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.NUM_CONNECTIONS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.NUM_CRYPTO_THREADS_DEFAULT_VALUE; -import static com.swirlds.platform.SettingConstants.REMOVED_SETTINGS; import static com.swirlds.platform.SettingConstants.SAVED_STRING; import static com.swirlds.platform.SettingConstants.SHOW_INTERNAL_STATS_DEFAULT_VALUE; import static com.swirlds.platform.SettingConstants.SLEEP_CALLER_SKIPS_DEFAULT_VALUE; @@ -389,10 +388,7 @@ public void loadSettings(final File settingsFile) { final String[] pars = splitLine(line); if (pars.length > 0) { // ignore empty lines try { - if (!handleSetting(pars)) { - CommonUtils.tellUserConsole( - "bad name of setting in settings.txt line " + count + ": " + originalLine); - } + handleSetting(pars); } catch (final Exception e) { CommonUtils.tellUserConsole( "syntax error in settings.txt on line " + count + ": " + originalLine); @@ -442,35 +438,25 @@ private boolean handleSetting(final String[] pars) { name = split[0]; subName = split[1]; } - if (!REMOVED_SETTINGS.contains(name)) { - final String val = pars.length > 1 ? pars[1].trim() : ""; // the first parameter passed in, or "" if none - boolean good = false; // is name a valid name of a non-final static field in Settings? - final Field field = getFieldByName(Settings.class.getDeclaredFields(), name); - if (field != null && !Modifier.isFinal(field.getModifiers())) { - try { - if (subName == null) { - good = setValue(field, this, val); - } else { - final Field subField = getFieldByName(field.getType().getDeclaredFields(), subName); - if (subField != null) { - good = setValue(subField, field.get(this), val); - } + final String val = pars.length > 1 ? pars[1].trim() : ""; // the first parameter passed in, or "" if none + boolean good = false; // is name a valid name of a non-final static field in Settings? + final Field field = getFieldByName(Settings.class.getDeclaredFields(), name); + if (field != null && !Modifier.isFinal(field.getModifiers())) { + try { + if (subName == null) { + good = setValue(field, this, val); + } else { + final Field subField = getFieldByName(field.getType().getDeclaredFields(), subName); + if (subField != null) { + good = setValue(subField, field.get(this), val); } - } catch (final IllegalArgumentException | IllegalAccessException | SettingsException e) { - logger.error( - EXCEPTION.getMarker(), "illegal line in settings.txt: {}, {} {}", pars[0], pars[1], e); } - } - - if (!good) { - final String err = "WARNING: " + pars[0] + " is not a valid setting name."; - // this only happens if settings.txt exist, so it's internal, not users, so print it - CommonUtils.tellUserConsole(err); - logger.warn(STARTUP.getMarker(), err); - return false; + } catch (final IllegalArgumentException | IllegalAccessException | SettingsException e) { + logger.error(EXCEPTION.getMarker(), "illegal line in settings.txt: {}, {} {}", pars[0], pars[1], e); } } - return true; + + return good; } /** diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/ConfigMappings.java similarity index 96% rename from platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java rename to platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/ConfigMappings.java index 21ab4cacaf69..cd7a9e37f142 100644 --- a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/ConfigMappings.java +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/ConfigMappings.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016-2023 Hedera Hashgraph, LLC + * Copyright (C) 2023 Hedera Hashgraph, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.swirlds.platform.config; +package com.swirlds.platform.config.internal; import com.swirlds.common.config.sources.ConfigMapping; import com.swirlds.common.config.sources.MappedConfigSource; @@ -32,7 +32,7 @@ public final class ConfigMappings { private ConfigMappings() {} - private static final List MAPPINGS = List.of( + static final List MAPPINGS = List.of( new ConfigMapping("consensus.roundsNonAncient", "state.roundsNonAncient"), new ConfigMapping("consensus.roundsExpired", "state.roundsExpired"), new ConfigMapping("consensus.coinFreq", "coinFreq"), diff --git a/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/PlatformConfigUtils.java b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/PlatformConfigUtils.java new file mode 100644 index 000000000000..dca43b9bbcdc --- /dev/null +++ b/platform-sdk/swirlds-platform-core/src/main/java/com/swirlds/platform/config/internal/PlatformConfigUtils.java @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.swirlds.platform.config.internal; + +import static com.swirlds.logging.LogMarker.STARTUP; + +import com.swirlds.common.config.reflection.ConfigReflectionUtils; +import com.swirlds.common.config.sources.ConfigMapping; +import com.swirlds.config.api.Configuration; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Arrays; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * This class contains utility methods for the platform config. + */ +public class PlatformConfigUtils { + private static final Logger logger = LogManager.getLogger(PlatformConfigUtils.class); + + private PlatformConfigUtils() { + // Utility class + } + + /** + * Logs all configuration properties that are not known by any configuration data type. + * + * @param configuration the configuration to check + */ + public static void logNotKnownConfigProperties(@NonNull final Configuration configuration) { + Objects.requireNonNull(configuration, "configuration must not be null"); + + final Set configNames = configuration.getConfigDataTypes().stream() + .flatMap(configDataType -> { + final String propertyNamePrefix = + ConfigReflectionUtils.getNamePrefixForConfigDataRecord(configDataType); + return Arrays.stream(configDataType.getRecordComponents()) + .map(component -> ConfigReflectionUtils.getPropertyNameForConfigDataProperty( + propertyNamePrefix, component)); + }) + .collect(Collectors.toSet()); + ConfigMappings.MAPPINGS.stream().map(ConfigMapping::originalName).forEach(configNames::add); + configuration + .getPropertyNames() + .filter(name -> !configNames.contains(name)) + .forEach(name -> { + final String message = + "Configuration property '%s' is not used by any configuration data type".formatted(name); + logger.warn(STARTUP.getMarker(), message); + }); + } +} diff --git a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/config/ConfigMappingsTest.java b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/config/ConfigMappingsTest.java index 49351b641dd2..c298c0eeb2b6 100644 --- a/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/config/ConfigMappingsTest.java +++ b/platform-sdk/swirlds-platform-core/src/test/java/com/swirlds/platform/config/ConfigMappingsTest.java @@ -22,6 +22,7 @@ import com.swirlds.common.config.sources.SimpleConfigSource; import com.swirlds.config.api.Configuration; import com.swirlds.config.api.ConfigurationBuilder; +import com.swirlds.platform.config.internal.ConfigMappings; import org.junit.jupiter.api.Test; class ConfigMappingsTest { From 5f564ae486ec1c4b8fdd571ed41c1f6a56a60980 Mon Sep 17 00:00:00 2001 From: Jendrik Johannes Date: Wed, 21 Jun 2023 16:12:49 +0200 Subject: [PATCH 70/70] Introduce Platform (BOM) for version management (#6965) Signed-off-by: Jendrik Johannes Co-authored-by: Nathan Klick --- .github/CODEOWNERS | 2 +- {buildSrc => build-logic}/build.gradle.kts | 2 +- .../RepairDashedCommentsFormatterStep.kt | 0 .../kotlin/StripOldLicenseFormatterStep.kt | 0 .../src/main/kotlin/Utils.kt | 0 ...era.hashgraph.aggregate-reports.gradle.kts | 0 ...hashgraph.benchmark-conventions.gradle.kts | 0 ...om.hedera.hashgraph.conventions.gradle.kts | 26 ++++ ...a.hashgraph.dependency-analysis.gradle.kts | 0 .../com.hedera.hashgraph.hapi.gradle.kts | 4 +- ...m.hedera.hashgraph.jpms-modules.gradle.kts | 3 +- ....hedera.hashgraph.maven-publish.gradle.kts | 8 + .../com.hedera.hashgraph.platform.gradle.kts | 22 +++ ...com.hedera.hashgraph.shadow-jar.gradle.kts | 0 ....hashgraph.spotless-conventions.gradle.kts | 0 ...graph.spotless-java-conventions.gradle.kts | 0 ...aph.spotless-kotlin-conventions.gradle.kts | 0 .../rules/IoGrpcDependencyMetadataRule.kt | 0 .../gradlebuild/rules/IoGrpcMetadataRule.kt | 0 .../rules/IoNettyNativeEpollMetadataRule.kt | 0 .../rules/JavaxAnnotationMetadataRule.kt | 0 .../src/main/java/module-info.java | 2 +- hedera-node/docker/Dockerfile | 6 +- hedera-node/docs/design/modules.md | 2 +- hedera-node/docs/gradle-quickstart.md | 6 +- hedera-node/hapi/build.gradle.kts | 1 + .../hapi/src/main/java/module-info.java | 2 +- .../src/testFixtures/java/module-info.java | 2 +- hedera-node/hedera-app-spi/build.gradle.kts | 1 + hedera-node/hedera-app/build.gradle.kts | 25 +-- .../hedera-app/src/main/java/module-info.java | 2 +- .../src/testFixtures/java/module-info.java | 2 +- hedera-node/hedera-config/build.gradle.kts | 1 + .../build.gradle.kts | 12 +- .../src/main/java/module-info.java | 1 + hedera-node/hedera-evm-impl/build.gradle.kts | 8 - .../src/main/java/module-info.java | 1 - .../hedera-file-service-impl/build.gradle.kts | 8 +- .../src/main/java/module-info.java | 9 +- .../hedera-mono-service/build.gradle.kts | 9 +- .../src/main/java/module-info.java | 9 +- .../src/testFixtures/java/module-info.java | 5 +- .../build.gradle.kts | 11 +- .../src/main/java/module-info.java | 2 + .../src/test/java/module-info.java | 1 - .../src/main/java/module-info.java | 1 + .../build.gradle.kts | 9 +- .../src/main/java/module-info.java | 2 + .../build.gradle.kts | 11 +- .../src/main/java/module-info.java | 4 +- .../src/main/java/module-info.java | 4 +- .../hedera-util-service-impl/build.gradle.kts | 4 +- .../src/main/java/module-info.java | 2 + hedera-node/test-clients/build.gradle.kts | 21 +-- hedera-platform/build.gradle.kts | 114 ++++++++++++++ hedera-platform/settings.gradle.kts | 20 +++ settings.gradle.kts | 143 ++++-------------- 57 files changed, 323 insertions(+), 207 deletions(-) rename {buildSrc => build-logic}/build.gradle.kts (96%) rename {buildSrc => build-logic}/src/main/kotlin/RepairDashedCommentsFormatterStep.kt (100%) rename {buildSrc => build-logic}/src/main/kotlin/StripOldLicenseFormatterStep.kt (100%) rename {buildSrc => build-logic}/src/main/kotlin/Utils.kt (100%) rename {buildSrc => build-logic}/src/main/kotlin/com.hedera.hashgraph.aggregate-reports.gradle.kts (100%) rename {buildSrc => build-logic}/src/main/kotlin/com.hedera.hashgraph.benchmark-conventions.gradle.kts (100%) rename {buildSrc => build-logic}/src/main/kotlin/com.hedera.hashgraph.conventions.gradle.kts (90%) rename {buildSrc => build-logic}/src/main/kotlin/com.hedera.hashgraph.dependency-analysis.gradle.kts (100%) rename {buildSrc => build-logic}/src/main/kotlin/com.hedera.hashgraph.hapi.gradle.kts (95%) rename {buildSrc => build-logic}/src/main/kotlin/com.hedera.hashgraph.jpms-modules.gradle.kts (99%) rename {buildSrc => build-logic}/src/main/kotlin/com.hedera.hashgraph.maven-publish.gradle.kts (93%) create mode 100644 build-logic/src/main/kotlin/com.hedera.hashgraph.platform.gradle.kts rename {buildSrc => build-logic}/src/main/kotlin/com.hedera.hashgraph.shadow-jar.gradle.kts (100%) rename {buildSrc => build-logic}/src/main/kotlin/com.hedera.hashgraph.spotless-conventions.gradle.kts (100%) rename {buildSrc => build-logic}/src/main/kotlin/com.hedera.hashgraph.spotless-java-conventions.gradle.kts (100%) rename {buildSrc => build-logic}/src/main/kotlin/com.hedera.hashgraph.spotless-kotlin-conventions.gradle.kts (100%) rename {buildSrc => build-logic}/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoGrpcDependencyMetadataRule.kt (100%) rename {buildSrc => build-logic}/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoGrpcMetadataRule.kt (100%) rename {buildSrc => build-logic}/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoNettyNativeEpollMetadataRule.kt (100%) rename {buildSrc => build-logic}/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/JavaxAnnotationMetadataRule.kt (100%) create mode 100644 hedera-platform/build.gradle.kts create mode 100644 hedera-platform/settings.gradle.kts diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 138047cf2caa..e0cfe30ae37d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -77,7 +77,7 @@ /gradle/ @hashgraph/release-engineering @hashgraph/release-engineering-managers gradlew @hashgraph/release-engineering @hashgraph/release-engineering-managers gradlew.bat @hashgraph/release-engineering @hashgraph/release-engineering-managers -**/buildSrc/ @hashgraph/release-engineering @hashgraph/release-engineering-managers +**/build-logic/ @hashgraph/release-engineering @hashgraph/release-engineering-managers **/gradle.* @hashgraph/release-engineering @hashgraph/release-engineering-managers **/*.gradle.* @hashgraph/release-engineering @hashgraph/release-engineering-managers diff --git a/buildSrc/build.gradle.kts b/build-logic/build.gradle.kts similarity index 96% rename from buildSrc/build.gradle.kts rename to build-logic/build.gradle.kts index 952943708f78..80e2f59f36ba 100644 --- a/buildSrc/build.gradle.kts +++ b/build-logic/build.gradle.kts @@ -35,6 +35,6 @@ dependencies { implementation("net.swiftzer.semver:semver:1.1.2") implementation("org.gradlex:extra-java-module-info:1.4") implementation("org.gradlex:java-ecosystem-capabilities:1.1") - implementation("org.gradlex:java-module-dependencies:1.3") + implementation("org.gradlex:java-module-dependencies:1.3.1") implementation("org.sonarsource.scanner.gradle:sonarqube-gradle-plugin:3.5.0.2730") } diff --git a/buildSrc/src/main/kotlin/RepairDashedCommentsFormatterStep.kt b/build-logic/src/main/kotlin/RepairDashedCommentsFormatterStep.kt similarity index 100% rename from buildSrc/src/main/kotlin/RepairDashedCommentsFormatterStep.kt rename to build-logic/src/main/kotlin/RepairDashedCommentsFormatterStep.kt diff --git a/buildSrc/src/main/kotlin/StripOldLicenseFormatterStep.kt b/build-logic/src/main/kotlin/StripOldLicenseFormatterStep.kt similarity index 100% rename from buildSrc/src/main/kotlin/StripOldLicenseFormatterStep.kt rename to build-logic/src/main/kotlin/StripOldLicenseFormatterStep.kt diff --git a/buildSrc/src/main/kotlin/Utils.kt b/build-logic/src/main/kotlin/Utils.kt similarity index 100% rename from buildSrc/src/main/kotlin/Utils.kt rename to build-logic/src/main/kotlin/Utils.kt diff --git a/buildSrc/src/main/kotlin/com.hedera.hashgraph.aggregate-reports.gradle.kts b/build-logic/src/main/kotlin/com.hedera.hashgraph.aggregate-reports.gradle.kts similarity index 100% rename from buildSrc/src/main/kotlin/com.hedera.hashgraph.aggregate-reports.gradle.kts rename to build-logic/src/main/kotlin/com.hedera.hashgraph.aggregate-reports.gradle.kts diff --git a/buildSrc/src/main/kotlin/com.hedera.hashgraph.benchmark-conventions.gradle.kts b/build-logic/src/main/kotlin/com.hedera.hashgraph.benchmark-conventions.gradle.kts similarity index 100% rename from buildSrc/src/main/kotlin/com.hedera.hashgraph.benchmark-conventions.gradle.kts rename to build-logic/src/main/kotlin/com.hedera.hashgraph.benchmark-conventions.gradle.kts diff --git a/buildSrc/src/main/kotlin/com.hedera.hashgraph.conventions.gradle.kts b/build-logic/src/main/kotlin/com.hedera.hashgraph.conventions.gradle.kts similarity index 90% rename from buildSrc/src/main/kotlin/com.hedera.hashgraph.conventions.gradle.kts rename to build-logic/src/main/kotlin/com.hedera.hashgraph.conventions.gradle.kts index 9e1c6cca1290..4fe6aa101eb3 100644 --- a/buildSrc/src/main/kotlin/com.hedera.hashgraph.conventions.gradle.kts +++ b/build-logic/src/main/kotlin/com.hedera.hashgraph.conventions.gradle.kts @@ -74,6 +74,32 @@ repositories { } } +val internal: Configuration = configurations.create("internal") { + isCanBeConsumed = false + isCanBeResolved = false +} + +dependencies { + "internal"(platform("com.hedera.hashgraph:hedera-platform")) +} +javaModuleDependencies { + versionsFromConsistentResolution(":app") +} +configurations.getByName("mainRuntimeClasspath") { + extendsFrom(internal) +} +sourceSets.all { + configurations.getByName(annotationProcessorConfigurationName) { + extendsFrom(internal) + } + configurations.getByName(compileClasspathConfigurationName) { + extendsFrom(internal) + } + configurations.getByName(runtimeClasspathConfigurationName) { + extendsFrom(internal) + } +} + // Make sure we use UTF-8 encoding when compiling tasks.withType().configureEach { options.encoding = "UTF-8" diff --git a/buildSrc/src/main/kotlin/com.hedera.hashgraph.dependency-analysis.gradle.kts b/build-logic/src/main/kotlin/com.hedera.hashgraph.dependency-analysis.gradle.kts similarity index 100% rename from buildSrc/src/main/kotlin/com.hedera.hashgraph.dependency-analysis.gradle.kts rename to build-logic/src/main/kotlin/com.hedera.hashgraph.dependency-analysis.gradle.kts diff --git a/buildSrc/src/main/kotlin/com.hedera.hashgraph.hapi.gradle.kts b/build-logic/src/main/kotlin/com.hedera.hashgraph.hapi.gradle.kts similarity index 95% rename from buildSrc/src/main/kotlin/com.hedera.hashgraph.hapi.gradle.kts rename to build-logic/src/main/kotlin/com.hedera.hashgraph.hapi.gradle.kts index 3dce74462ac0..b9cf9b556599 100644 --- a/buildSrc/src/main/kotlin/com.hedera.hashgraph.hapi.gradle.kts +++ b/build-logic/src/main/kotlin/com.hedera.hashgraph.hapi.gradle.kts @@ -25,12 +25,12 @@ plugins { protobuf { val libs = the().named("libs") protoc { - artifact = "com.google.protobuf:protoc:" + libs.findVersion("com.google.protobuf").get() + artifact = "com.google.protobuf:protoc:" + libs.findVersion("google-proto").get() } plugins { // Add GRPC plugin as we need to generate GRPC services id("grpc") { - artifact = "io.grpc:protoc-gen-grpc-java:" + libs.findVersion("grpc.protobuf").get() + artifact = "io.grpc:protoc-gen-grpc-java:" + libs.findVersion("grpc-proto").get() } } generateProtoTasks { diff --git a/buildSrc/src/main/kotlin/com.hedera.hashgraph.jpms-modules.gradle.kts b/build-logic/src/main/kotlin/com.hedera.hashgraph.jpms-modules.gradle.kts similarity index 99% rename from buildSrc/src/main/kotlin/com.hedera.hashgraph.jpms-modules.gradle.kts rename to build-logic/src/main/kotlin/com.hedera.hashgraph.jpms-modules.gradle.kts index 48e616210e32..72558273bc41 100644 --- a/buildSrc/src/main/kotlin/com.hedera.hashgraph.jpms-modules.gradle.kts +++ b/build-logic/src/main/kotlin/com.hedera.hashgraph.jpms-modules.gradle.kts @@ -21,14 +21,13 @@ import com.hedera.hashgraph.gradlebuild.rules.IoNettyNativeEpollMetadataRule import com.hedera.hashgraph.gradlebuild.rules.JavaxAnnotationMetadataRule plugins { - id("java") id("org.gradlex.java-ecosystem-capabilities") id("org.gradlex.extra-java-module-info") id("org.gradlex.java-module-dependencies") } javaModuleDependencies { - versionsFromConsistentResolution(":hedera-node:node-app") + warnForMissingVersions.set(false) // do not expect versions in catalog moduleNameToGA.put("com.hedera.hashgraph.protobuf.java.api", "com.hedera.hashgraph:hedera-protobuf-java-api") moduleNameToGA.put("com.hedera.pbj.runtime", "com.hedera.pbj:pbj-runtime") diff --git a/buildSrc/src/main/kotlin/com.hedera.hashgraph.maven-publish.gradle.kts b/build-logic/src/main/kotlin/com.hedera.hashgraph.maven-publish.gradle.kts similarity index 93% rename from buildSrc/src/main/kotlin/com.hedera.hashgraph.maven-publish.gradle.kts rename to build-logic/src/main/kotlin/com.hedera.hashgraph.maven-publish.gradle.kts index ee439544b3c2..51f22ac78263 100644 --- a/buildSrc/src/main/kotlin/com.hedera.hashgraph.maven-publish.gradle.kts +++ b/build-logic/src/main/kotlin/com.hedera.hashgraph.maven-publish.gradle.kts @@ -25,6 +25,14 @@ publishing { publications { create("maven") { from(components.getByName("java")) + versionMapping { + usage("java-api") { + fromResolutionResult() + } + usage("java-runtime") { + fromResolutionResult() + } + } pom { packaging = findProperty("maven.project.packaging")?.toString() ?: "jar" diff --git a/build-logic/src/main/kotlin/com.hedera.hashgraph.platform.gradle.kts b/build-logic/src/main/kotlin/com.hedera.hashgraph.platform.gradle.kts new file mode 100644 index 000000000000..b40a4a8e7253 --- /dev/null +++ b/build-logic/src/main/kotlin/com.hedera.hashgraph.platform.gradle.kts @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + `java-platform` + id("com.hedera.hashgraph.jpms-modules") +} + +group = "com.hedera.hashgraph" diff --git a/buildSrc/src/main/kotlin/com.hedera.hashgraph.shadow-jar.gradle.kts b/build-logic/src/main/kotlin/com.hedera.hashgraph.shadow-jar.gradle.kts similarity index 100% rename from buildSrc/src/main/kotlin/com.hedera.hashgraph.shadow-jar.gradle.kts rename to build-logic/src/main/kotlin/com.hedera.hashgraph.shadow-jar.gradle.kts diff --git a/buildSrc/src/main/kotlin/com.hedera.hashgraph.spotless-conventions.gradle.kts b/build-logic/src/main/kotlin/com.hedera.hashgraph.spotless-conventions.gradle.kts similarity index 100% rename from buildSrc/src/main/kotlin/com.hedera.hashgraph.spotless-conventions.gradle.kts rename to build-logic/src/main/kotlin/com.hedera.hashgraph.spotless-conventions.gradle.kts diff --git a/buildSrc/src/main/kotlin/com.hedera.hashgraph.spotless-java-conventions.gradle.kts b/build-logic/src/main/kotlin/com.hedera.hashgraph.spotless-java-conventions.gradle.kts similarity index 100% rename from buildSrc/src/main/kotlin/com.hedera.hashgraph.spotless-java-conventions.gradle.kts rename to build-logic/src/main/kotlin/com.hedera.hashgraph.spotless-java-conventions.gradle.kts diff --git a/buildSrc/src/main/kotlin/com.hedera.hashgraph.spotless-kotlin-conventions.gradle.kts b/build-logic/src/main/kotlin/com.hedera.hashgraph.spotless-kotlin-conventions.gradle.kts similarity index 100% rename from buildSrc/src/main/kotlin/com.hedera.hashgraph.spotless-kotlin-conventions.gradle.kts rename to build-logic/src/main/kotlin/com.hedera.hashgraph.spotless-kotlin-conventions.gradle.kts diff --git a/buildSrc/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoGrpcDependencyMetadataRule.kt b/build-logic/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoGrpcDependencyMetadataRule.kt similarity index 100% rename from buildSrc/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoGrpcDependencyMetadataRule.kt rename to build-logic/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoGrpcDependencyMetadataRule.kt diff --git a/buildSrc/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoGrpcMetadataRule.kt b/build-logic/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoGrpcMetadataRule.kt similarity index 100% rename from buildSrc/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoGrpcMetadataRule.kt rename to build-logic/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoGrpcMetadataRule.kt diff --git a/buildSrc/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoNettyNativeEpollMetadataRule.kt b/build-logic/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoNettyNativeEpollMetadataRule.kt similarity index 100% rename from buildSrc/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoNettyNativeEpollMetadataRule.kt rename to build-logic/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/IoNettyNativeEpollMetadataRule.kt diff --git a/buildSrc/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/JavaxAnnotationMetadataRule.kt b/build-logic/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/JavaxAnnotationMetadataRule.kt similarity index 100% rename from buildSrc/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/JavaxAnnotationMetadataRule.kt rename to build-logic/src/main/kotlin/com/hedera/hashgraph/gradlebuild/rules/JavaxAnnotationMetadataRule.kt diff --git a/hedera-node/cli-clients/src/main/java/module-info.java b/hedera-node/cli-clients/src/main/java/module-info.java index 8b1225c45c35..61729f33e8b1 100644 --- a/hedera-node/cli-clients/src/main/java/module-info.java +++ b/hedera-node/cli-clients/src/main/java/module-info.java @@ -1,4 +1,4 @@ -module com.hedera.services.cli { +module com.hedera.node.services.cli { exports com.hedera.services.cli; exports com.hedera.services.cli.sign; diff --git a/hedera-node/docker/Dockerfile b/hedera-node/docker/Dockerfile index 448683dacf70..4be5b99ae5f7 100644 --- a/hedera-node/docker/Dockerfile +++ b/hedera-node/docker/Dockerfile @@ -42,8 +42,10 @@ RUN mkdir /opt/hedera/services/hedera-node COPY hedera-node /opt/hedera/services/hedera-node RUN mkdir /opt/hedera/services/test-clients COPY hedera-node/test-clients /opt/hedera/services/test-clients -RUN mkdir /opt/hedera/services/buildSrc -COPY buildSrc /opt/hedera/services/buildSrc +RUN mkdir /opt/hedera/services/build-logic +COPY build-logic /opt/hedera/services/build-logic +RUN mkdir /opt/hedera/services/hedera-platform +COPY hedera-platform /opt/hedera/services/hedera-platform RUN mkdir /opt/hedera/services/gradle COPY gradle /opt/hedera/services/gradle COPY gradlew /opt/hedera/services/gradlew diff --git a/hedera-node/docs/design/modules.md b/hedera-node/docs/design/modules.md index fd7ef4c47b9b..7efa6e7c3ee8 100644 --- a/hedera-node/docs/design/modules.md +++ b/hedera-node/docs/design/modules.md @@ -12,7 +12,7 @@ Java modules. Each module needs a `build.gradle.kts` file that describes the module. General best practices for all our (Java) modules are defined in custom plugins that can be found -under `buildSrc/src/main/kotlin`. For a Java module the `com.hedera.hashgraph.javaConventions` plugin should be used. +under `build-logic/src/main/kotlin`. For a Java module the `com.hedera.hashgraph.javaConventions` plugin should be used. Next to this each module should have a description. Since nothing else is needed for a minimal module the most simple `build.gradle.kts` looks like this: diff --git a/hedera-node/docs/gradle-quickstart.md b/hedera-node/docs/gradle-quickstart.md index 43691075d906..c01028cf2b57 100644 --- a/hedera-node/docs/gradle-quickstart.md +++ b/hedera-node/docs/gradle-quickstart.md @@ -27,12 +27,12 @@ single master-list of all libraries that have been approved for the project, inc Our Gradle build has a single version number for all projects. It is defined in gradle.properties. Changing this version number will automatically apply to every subproject. -## buildSrc +## build-logic Gradle has plugins. One type of plugin is the "convention plugin". A convention plugin is a plugin that applies a -certain set of defaults to all builds that include that convention. We define one such `hedera-convention` in buildSrc. +certain set of defaults to all builds that include that convention. We define one such `hedera-convention` in build-logic. It is then used by each of the subprojects to reduce the amount of boilerplate. We can create additional conventions in -the future if need be. buildSrc is a special directory in Gradle for hosting custom project plugins. +the future if need be. 'build-logic' is a special directory in Gradle for hosting custom project plugins. ## Sub Projects diff --git a/hedera-node/hapi/build.gradle.kts b/hedera-node/hapi/build.gradle.kts index 60a3aedc028d..bdce8ecba8d5 100644 --- a/hedera-node/hapi/build.gradle.kts +++ b/hedera-node/hapi/build.gradle.kts @@ -24,6 +24,7 @@ description = "Hedera API" dependencies { javaModuleDependencies { + testImplementation(project(":hapi")) // we depend on the protoc compiled hapi during test as we test our pbj generated code // against it to make sure it is compatible testImplementation(gav("com.google.protobuf.util")) diff --git a/hedera-node/hapi/src/main/java/module-info.java b/hedera-node/hapi/src/main/java/module-info.java index 9e7485435117..2195f9d435d5 100644 --- a/hedera-node/hapi/src/main/java/module-info.java +++ b/hedera-node/hapi/src/main/java/module-info.java @@ -44,9 +44,9 @@ exports com.hederahashgraph.api.proto.java; exports com.hederahashgraph.service.proto.java; - requires transitive com.hedera.pbj.runtime; requires transitive com.google.common; requires transitive com.google.protobuf; + requires transitive com.hedera.pbj.runtime; requires transitive grpc.stub; requires transitive io.grpc; requires grpc.protobuf; diff --git a/hedera-node/hapi/src/testFixtures/java/module-info.java b/hedera-node/hapi/src/testFixtures/java/module-info.java index c18ea6877fad..00cf480dde3a 100644 --- a/hedera-node/hapi/src/testFixtures/java/module-info.java +++ b/hedera-node/hapi/src/testFixtures/java/module-info.java @@ -1,7 +1,7 @@ module com.hedera.node.hapi.test.fixtures { exports com.hedera.node.hapi.fixtures; + requires transitive com.hedera.node.hapi; requires transitive com.hedera.pbj.runtime; - requires com.hedera.node.hapi; requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-app-spi/build.gradle.kts b/hedera-node/hedera-app-spi/build.gradle.kts index a01339241138..2bb6c06c5ae3 100644 --- a/hedera-node/hedera-app-spi/build.gradle.kts +++ b/hedera-node/hedera-app-spi/build.gradle.kts @@ -23,6 +23,7 @@ description = "Hedera Application - SPI" dependencies { javaModuleDependencies { + testImplementation(project(":app-spi")) testImplementation(gav("org.apache.commons.lang3")) testImplementation(gav("org.assertj.core")) testImplementation(gav("org.junit.jupiter.api")) diff --git a/hedera-node/hedera-app/build.gradle.kts b/hedera-node/hedera-app/build.gradle.kts index 5c053c7fef81..d2d7e1242129 100644 --- a/hedera-node/hedera-app/build.gradle.kts +++ b/hedera-node/hedera-app/build.gradle.kts @@ -26,9 +26,10 @@ dependencies { javaModuleDependencies { annotationProcessor(gav("dagger.compiler")) - testImplementation(testFixtures(project(":hedera-node:node-config"))) - testImplementation(testFixtures(project(":hedera-node:node-app-service-mono"))) - testImplementation(testFixtures(project(":hedera-node:node-app-spi"))) + testImplementation(project(":app")) + testImplementation(testFixtures(project(":config"))) + testImplementation(testFixtures(project(":app-service-mono"))) + testImplementation(testFixtures(project(":app-spi"))) testImplementation(gav("com.swirlds.base")) testImplementation(gav("io.github.classgraph")) testImplementation(gav("org.assertj.core")) @@ -41,10 +42,11 @@ dependencies { testImplementation(gav("uk.org.webcompere.systemstubs.core")) testCompileOnly(gav("com.github.spotbugs.annotations")) - itestImplementation(project(":hedera-node:node-app")) - itestImplementation(project(":hedera-node:node-app-spi")) - itestImplementation(project(":hedera-node:node-hapi")) - itestImplementation(testFixtures(project(":hedera-node:node-app-spi"))) + itestImplementation(project(":app")) + itestImplementation(project(":app-spi")) + itestImplementation(project(":config")) + itestImplementation(project(":hapi")) + itestImplementation(testFixtures(project(":app-spi"))) itestImplementation(gav("com.github.spotbugs.annotations")) itestImplementation(gav("com.hedera.pbj.runtime")) itestImplementation(gav("com.swirlds.common")) @@ -52,15 +54,16 @@ dependencies { itestImplementation(gav("io.grpc")) itestImplementation(gav("io.helidon.grpc.client")) itestImplementation(gav("io.helidon.grpc.server")) + itestImplementation(gav("org.apache.logging.log4j")) itestImplementation(gav("org.assertj.core")) itestImplementation(gav("org.bouncycastle.provider")) itestImplementation(gav("org.junit.jupiter.api")) itestImplementation(gav("org.junit.jupiter.params")) - jmhImplementation(project(":hedera-node:node-app")) - jmhImplementation(project(":hedera-node:node-app-service-mono")) - jmhImplementation(project(":hedera-node:node-hapi")) - jmhImplementation(testFixtures(project(":hedera-node:node-app-spi"))) + jmhImplementation(project(":app")) + jmhImplementation(project(":app-service-mono")) + jmhImplementation(project(":hapi")) + jmhImplementation(testFixtures(project(":app-spi"))) jmhImplementation(gav("com.hedera.pbj.runtime")) jmhImplementation(gav("com.swirlds.common")) jmhImplementation(gav("jmh.core")) diff --git a/hedera-node/hedera-app/src/main/java/module-info.java b/hedera-node/hedera-app/src/main/java/module-info.java index ba80b3131c6c..23d196bacf0f 100644 --- a/hedera-node/hedera-app/src/main/java/module-info.java +++ b/hedera-node/hedera-app/src/main/java/module-info.java @@ -35,9 +35,9 @@ requires com.swirlds.platform; requires grpc.stub; requires io.grpc; + requires io.helidon.common.configurable; requires io.helidon.grpc.core; requires io.helidon.grpc.server; - requires org.apache.commons.codec; // Temporary until AdaptedMonoProcessLogic is removed requires org.apache.commons.lang3; requires org.apache.logging.log4j; requires org.hyperledger.besu.datatypes; diff --git a/hedera-node/hedera-app/src/testFixtures/java/module-info.java b/hedera-node/hedera-app/src/testFixtures/java/module-info.java index 920e4ea0a134..984a526961da 100644 --- a/hedera-node/hedera-app/src/testFixtures/java/module-info.java +++ b/hedera-node/hedera-app/src/testFixtures/java/module-info.java @@ -2,8 +2,8 @@ exports com.hedera.node.app.fixtures.state; requires transitive com.hedera.node.app.spi; + requires transitive com.hedera.node.app; requires com.hedera.node.app.spi.test.fixtures; - requires com.hedera.node.app; requires com.swirlds.config; requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-config/build.gradle.kts b/hedera-node/hedera-config/build.gradle.kts index c1d0745a39a2..93146c19f28a 100644 --- a/hedera-node/hedera-config/build.gradle.kts +++ b/hedera-node/hedera-config/build.gradle.kts @@ -23,6 +23,7 @@ description = "Hedera Configuration" dependencies { javaModuleDependencies { + testImplementation(project(":config")) testImplementation(gav("org.assertj.core")) testImplementation(gav("org.hyperledger.besu.datatypes")) testImplementation(gav("org.junit.jupiter.api")) diff --git a/hedera-node/hedera-consensus-service-impl/build.gradle.kts b/hedera-node/hedera-consensus-service-impl/build.gradle.kts index b479ce839ff9..e659c80e52f8 100644 --- a/hedera-node/hedera-consensus-service-impl/build.gradle.kts +++ b/hedera-node/hedera-consensus-service-impl/build.gradle.kts @@ -22,12 +22,12 @@ dependencies { javaModuleDependencies { annotationProcessor(gav("dagger.compiler")) - testImplementation(testFixtures(project(":hedera-node:node-app-service-mono"))) - testImplementation(testFixtures(project(":hedera-node:node-app-spi"))) - testImplementation(testFixtures(project(":hedera-node:node-config"))) - testImplementation(project(":hedera-node:node-app")) - testImplementation(project(":hedera-node:node-app-service-consensus-impl")) - testImplementation(project(":hedera-node:node-app-service-token")) + testImplementation(testFixtures(project(":app-service-mono"))) + testImplementation(testFixtures(project(":app-spi"))) + testImplementation(testFixtures(project(":config"))) + testImplementation(project(":app")) + testImplementation(project(":app-service-consensus-impl")) + testImplementation(project(":app-service-token")) testImplementation(gav("com.google.protobuf")) testImplementation(gav("com.swirlds.common")) testImplementation(gav("org.assertj.core")) diff --git a/hedera-node/hedera-consensus-service/src/main/java/module-info.java b/hedera-node/hedera-consensus-service/src/main/java/module-info.java index 4e335e48e3bc..8943a834d561 100644 --- a/hedera-node/hedera-consensus-service/src/main/java/module-info.java +++ b/hedera-node/hedera-consensus-service/src/main/java/module-info.java @@ -5,5 +5,6 @@ requires transitive com.hedera.node.app.spi; requires transitive com.hedera.node.hapi; + requires transitive com.hedera.pbj.runtime; requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-evm-impl/build.gradle.kts b/hedera-node/hedera-evm-impl/build.gradle.kts index 8d962bca1db3..50d7abb222cd 100644 --- a/hedera-node/hedera-evm-impl/build.gradle.kts +++ b/hedera-node/hedera-evm-impl/build.gradle.kts @@ -19,11 +19,3 @@ plugins { id("com.hedera.hashgraph.conventions") } group = "com.hedera.evm" description = "Hedera EVM - Implementation" - -// TODO module-info.java in 'test' -// https://github.com/autonomousapps/dependency-analysis-android-gradle-plugin/issues/900 -dependencyAnalysis.issues { - onUnusedDependencies { - exclude(javaModuleDependencies.ga("com.github.spotbugs.annotations").get()) - } -} diff --git a/hedera-node/hedera-evm-impl/src/main/java/module-info.java b/hedera-node/hedera-evm-impl/src/main/java/module-info.java index bb3d6ae531d2..c3b709ef70b6 100644 --- a/hedera-node/hedera-evm-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-evm-impl/src/main/java/module-info.java @@ -1,4 +1,3 @@ module com.hedera.node.app.service.evm.impl { requires transitive com.hedera.node.app.service.evm; - requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-file-service-impl/build.gradle.kts b/hedera-node/hedera-file-service-impl/build.gradle.kts index b4450b10d41b..4e45d4c1ac8f 100644 --- a/hedera-node/hedera-file-service-impl/build.gradle.kts +++ b/hedera-node/hedera-file-service-impl/build.gradle.kts @@ -22,10 +22,10 @@ dependencies { javaModuleDependencies { annotationProcessor(gav("dagger.compiler")) - testImplementation(project(":hedera-node:node-app-service-token")) - testImplementation(testFixtures(project(":hedera-node:node-app-service-mono"))) - testImplementation(testFixtures(project(":hedera-node:node-config"))) - testImplementation(testFixtures(project(":hedera-node:node-app-spi"))) + testImplementation(project(":app-service-token")) + testImplementation(testFixtures(project(":app-service-mono"))) + testImplementation(testFixtures(project(":config"))) + testImplementation(testFixtures(project(":app-spi"))) testImplementation(gav("org.assertj.core")) testImplementation(gav("org.junit.jupiter.api")) testImplementation(gav("org.mockito")) diff --git a/hedera-node/hedera-file-service-impl/src/main/java/module-info.java b/hedera-node/hedera-file-service-impl/src/main/java/module-info.java index 28898406effc..61e7a94a68bd 100644 --- a/hedera-node/hedera-file-service-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-file-service-impl/src/main/java/module-info.java @@ -1,6 +1,10 @@ import com.hedera.node.app.service.file.impl.FileServiceImpl; module com.hedera.node.app.service.file.impl { + requires com.fasterxml.jackson.databind; + requires com.swirlds.config; + requires org.apache.commons.lang3; + requires org.apache.logging.log4j; requires transitive com.hedera.node.app.service.file; requires transitive com.hedera.node.app.service.mono; requires transitive com.hedera.node.app.spi; @@ -9,11 +13,6 @@ requires transitive com.hedera.pbj.runtime; requires transitive dagger; requires transitive javax.inject; - requires com.fasterxml.jackson.databind; - requires com.swirlds.config; - requires org.apache.commons.lang3; - requires org.apache.logging.log4j; - requires org.apache.commons.codec; requires static com.github.spotbugs.annotations; provides com.hedera.node.app.service.file.FileService with diff --git a/hedera-node/hedera-mono-service/build.gradle.kts b/hedera-node/hedera-mono-service/build.gradle.kts index 8640f9535cdf..34d4a9358ab1 100644 --- a/hedera-node/hedera-mono-service/build.gradle.kts +++ b/hedera-node/hedera-mono-service/build.gradle.kts @@ -26,6 +26,7 @@ dependencies { javaModuleDependencies { annotationProcessor(gav("dagger.compiler")) + testImplementation(project(":app-service-mono")) testImplementation(gav("awaitility")) testImplementation(gav("com.swirlds.config")) testImplementation(gav("io.github.classgraph")) @@ -42,12 +43,12 @@ dependencies { testImplementation(gav("org.mockito.junit.jupiter")) testCompileOnly(gav("com.github.spotbugs.annotations")) - jmhImplementation(project(":hedera-node:node-app-hapi-utils")) - jmhImplementation(project(":hedera-node:node-app-spi")) + jmhImplementation(project(":app-hapi-utils")) + jmhImplementation(project(":app-spi")) jmhImplementation(gav("com.github.spotbugs.annotations")) jmhImplementation(gav("com.google.common")) jmhImplementation(gav("com.google.protobuf")) - jmhImplementation(project(":hedera-node:node-hapi")) + jmhImplementation(project(":hapi")) jmhImplementation(gav("com.swirlds.common")) jmhImplementation(gav("com.swirlds.fcqueue")) jmhImplementation(gav("com.swirlds.jasperdb")) @@ -65,7 +66,7 @@ dependencies { } } -val apt = configurations.create("apt") +val apt = configurations.create("apt") { extendsFrom(configurations.internal.get()) } dependencies { @Suppress("UnstableApiUsage") apt(javaModuleDependencies.gav("dagger.compiler")) } diff --git a/hedera-node/hedera-mono-service/src/main/java/module-info.java b/hedera-node/hedera-mono-service/src/main/java/module-info.java index c116353fc00d..215999f4c146 100644 --- a/hedera-node/hedera-mono-service/src/main/java/module-info.java +++ b/hedera-node/hedera-mono-service/src/main/java/module-info.java @@ -47,7 +47,7 @@ com.hedera.node.app.service.networkadmin.impl, com.hedera.node.app.service.consensus.impl.test, com.hedera.node.app.service.networkadmin.impl.test, - com.hedera.services.cli, + com.hedera.node.services.cli, com.hedera.node.app.service.file.impl, com.hedera.node.app.service.file.impl.test, com.hedera.node.app.service.network.admin.impl; @@ -265,10 +265,10 @@ requires transitive com.hedera.node.app.service.evm; requires transitive com.hedera.node.app.spi; requires transitive com.hedera.node.hapi; - requires transitive com.hedera.pbj.runtime; requires transitive com.fasterxml.jackson.databind; requires transitive com.google.common; requires transitive com.google.protobuf; + requires transitive com.hedera.pbj.runtime; requires transitive com.swirlds.common; requires transitive com.swirlds.fchashmap; requires transitive com.swirlds.fcqueue; @@ -279,8 +279,8 @@ requires transitive grpc.stub; requires transitive headlong; requires transitive io.grpc; + requires transitive io.helidon.grpc.server; requires transitive javax.inject; - requires transitive org.apache.commons.codec; requires transitive org.apache.commons.lang3; requires transitive org.apache.logging.log4j; requires transitive org.eclipse.collections.api; @@ -293,8 +293,9 @@ requires com.swirlds.logging; requires com.swirlds.platform; requires io.helidon.common.configurable; + requires io.helidon.config; requires io.helidon.grpc.core; - requires io.helidon.grpc.server; + requires org.apache.commons.codec; requires org.apache.commons.collections4; requires org.apache.commons.io; requires org.bouncycastle.provider; diff --git a/hedera-node/hedera-mono-service/src/testFixtures/java/module-info.java b/hedera-node/hedera-mono-service/src/testFixtures/java/module-info.java index a96df33f19ea..0193fd83a70e 100644 --- a/hedera-node/hedera-mono-service/src/testFixtures/java/module-info.java +++ b/hedera-node/hedera-mono-service/src/testFixtures/java/module-info.java @@ -6,6 +6,7 @@ exports com.hedera.test.factories.scenarios; requires transitive com.hedera.node.app.hapi.utils; + requires transitive com.hedera.node.app.service.mono; requires transitive com.hedera.node.app.service.token; requires transitive com.hedera.node.app.spi; requires transitive com.hedera.node.hapi; @@ -13,12 +14,10 @@ requires transitive com.swirlds.common; requires transitive com.swirlds.merkle; requires transitive com.swirlds.virtualmap; - requires transitive org.apache.commons.codec; requires transitive org.bouncycastle.provider; requires com.hedera.node.app.service.evm; - requires com.hedera.node.app.service.mono; - requires com.hedera.pbj.runtime; requires com.google.common; + requires com.hedera.pbj.runtime; requires net.i2p.crypto.eddsa; requires org.junit.jupiter.api; requires org.mockito; diff --git a/hedera-node/hedera-network-admin-service-impl/build.gradle.kts b/hedera-node/hedera-network-admin-service-impl/build.gradle.kts index 0cbb9c92488b..f1a382e6155e 100644 --- a/hedera-node/hedera-network-admin-service-impl/build.gradle.kts +++ b/hedera-node/hedera-network-admin-service-impl/build.gradle.kts @@ -22,11 +22,12 @@ dependencies { javaModuleDependencies { annotationProcessor(gav("dagger.compiler")) - testImplementation(project(":hedera-node:node-app-service-token-impl")) - testImplementation(project(":hedera-node:node-app")) - testImplementation(testFixtures(project(":hedera-node:node-config"))) - testImplementation(testFixtures(project(":hedera-node:node-app"))) - testImplementation(testFixtures(project(":hedera-node:node-app-spi"))) + testImplementation(project(":app")) + testImplementation(project(":app-service-network-admin-impl")) + testImplementation(project(":app-service-token-impl")) + testImplementation(testFixtures(project(":config"))) + testImplementation(testFixtures(project(":app"))) + testImplementation(testFixtures(project(":app-spi"))) testImplementation(gav("com.swirlds.fcqueue")) testImplementation(gav("org.assertj.core")) testImplementation(gav("org.junit.jupiter.api")) diff --git a/hedera-node/hedera-network-admin-service/src/main/java/module-info.java b/hedera-node/hedera-network-admin-service/src/main/java/module-info.java index 68d11d332a84..7d8c9e4ae3d4 100644 --- a/hedera-node/hedera-network-admin-service/src/main/java/module-info.java +++ b/hedera-node/hedera-network-admin-service/src/main/java/module-info.java @@ -5,6 +5,8 @@ uses com.hedera.node.app.service.networkadmin.NetworkService; requires transitive com.hedera.node.app.spi; + requires transitive com.hedera.node.hapi; + requires transitive com.hedera.pbj.runtime; requires transitive com.swirlds.common; requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-schedule-service-impl/src/test/java/module-info.java b/hedera-node/hedera-schedule-service-impl/src/test/java/module-info.java index 8af7168250da..d55cd8324eb3 100644 --- a/hedera-node/hedera-schedule-service-impl/src/test/java/module-info.java +++ b/hedera-node/hedera-schedule-service-impl/src/test/java/module-info.java @@ -4,7 +4,6 @@ requires com.hedera.node.app.service.token; requires com.hedera.node.app.spi.test.fixtures; requires com.swirlds.common; - requires org.apache.commons.codec; requires org.junit.jupiter.api; requires org.mockito.junit.jupiter; requires org.mockito; diff --git a/hedera-node/hedera-schedule-service/src/main/java/module-info.java b/hedera-node/hedera-schedule-service/src/main/java/module-info.java index d989fc3d0d9f..7ef78b1f2e1b 100644 --- a/hedera-node/hedera-schedule-service/src/main/java/module-info.java +++ b/hedera-node/hedera-schedule-service/src/main/java/module-info.java @@ -5,5 +5,6 @@ requires transitive com.hedera.node.app.spi; requires transitive com.hedera.node.hapi; + requires transitive com.hedera.pbj.runtime; requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-smart-contract-service-impl/build.gradle.kts b/hedera-node/hedera-smart-contract-service-impl/build.gradle.kts index 0b84f573b65d..6752628d3522 100644 --- a/hedera-node/hedera-smart-contract-service-impl/build.gradle.kts +++ b/hedera-node/hedera-smart-contract-service-impl/build.gradle.kts @@ -21,8 +21,8 @@ description = "Default Hedera Smart Contract Service Implementation" dependencies { javaModuleDependencies { annotationProcessor(gav("dagger.compiler")) - testImplementation(testFixtures(project(":hedera-node:node-app-spi"))) - testImplementation(testFixtures(project(":hedera-node:node-config"))) + testImplementation(testFixtures(project(":app-spi"))) + testImplementation(testFixtures(project(":config"))) testRuntimeOnly(gav("org.mockito.inline")) } @@ -32,8 +32,9 @@ dependencies { // https://github.com/autonomousapps/dependency-analysis-android-gradle-plugin/issues/900 dependencyAnalysis.issues { onUnusedDependencies { - exclude(":hedera-node:node-app-service-mono") - exclude(":hedera-node:node-app-service-token") + exclude(":config") + exclude(":app-service-mono") + exclude(":app-service-token") exclude("org.apache.tuweni:tuweni-units") } } diff --git a/hedera-node/hedera-smart-contract-service/src/main/java/module-info.java b/hedera-node/hedera-smart-contract-service/src/main/java/module-info.java index 9049e895a033..4d01febf37c9 100644 --- a/hedera-node/hedera-smart-contract-service/src/main/java/module-info.java +++ b/hedera-node/hedera-smart-contract-service/src/main/java/module-info.java @@ -3,6 +3,8 @@ uses com.hedera.node.app.service.contract.ContractService; + requires com.hedera.node.hapi; requires transitive com.hedera.node.app.spi; + requires transitive com.hedera.pbj.runtime; requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-token-service-impl/build.gradle.kts b/hedera-node/hedera-token-service-impl/build.gradle.kts index 4e8869c29738..2f71e667cb12 100644 --- a/hedera-node/hedera-token-service-impl/build.gradle.kts +++ b/hedera-node/hedera-token-service-impl/build.gradle.kts @@ -22,12 +22,11 @@ dependencies { javaModuleDependencies { annotationProcessor(gav("dagger.compiler")) - testImplementation(project(":hedera-node:node-app-service-token-impl")) - testImplementation(project(":hedera-node:node-app")) - testImplementation(testFixtures(project(":hedera-node:node-app-service-mono"))) - testImplementation(testFixtures(project(":hedera-node:node-app-spi"))) - testImplementation(testFixtures(project(":hedera-node:node-config"))) - testImplementation(gav("com.swirlds.common")) + testImplementation(project(":app-service-token-impl")) + testImplementation(project(":app")) + testImplementation(testFixtures(project(":app-service-mono"))) + testImplementation(testFixtures(project(":app-spi"))) + testImplementation(testFixtures(project(":config"))) testImplementation(gav("org.assertj.core")) testImplementation(gav("org.hamcrest")) testImplementation(gav("org.junit.jupiter.api")) diff --git a/hedera-node/hedera-token-service-impl/src/main/java/module-info.java b/hedera-node/hedera-token-service-impl/src/main/java/module-info.java index 3f5678bae74d..d8a8cee27652 100644 --- a/hedera-node/hedera-token-service-impl/src/main/java/module-info.java +++ b/hedera-node/hedera-token-service-impl/src/main/java/module-info.java @@ -7,14 +7,16 @@ requires transitive com.hedera.pbj.runtime; requires transitive dagger; requires transitive javax.inject; + requires com.hedera.node.app.hapi.utils; requires com.hedera.node.app.service.evm; requires com.google.common; requires com.google.protobuf; + requires com.swirlds.common; requires com.swirlds.config; requires com.swirlds.jasperdb; requires org.apache.commons.lang3; + requires org.apache.logging.log4j; requires org.slf4j; - requires tuweni.bytes; requires static com.github.spotbugs.annotations; provides com.hedera.node.app.service.token.TokenService with diff --git a/hedera-node/hedera-token-service/src/main/java/module-info.java b/hedera-node/hedera-token-service/src/main/java/module-info.java index 1c38f35a0bf6..86bcdc2630bf 100644 --- a/hedera-node/hedera-token-service/src/main/java/module-info.java +++ b/hedera-node/hedera-token-service/src/main/java/module-info.java @@ -5,6 +5,6 @@ requires transitive com.hedera.node.app.spi; requires transitive com.hedera.node.hapi; - requires com.hedera.pbj.runtime; - requires static com.github.spotbugs.annotations; + requires transitive com.hedera.pbj.runtime; + requires com.github.spotbugs.annotations; } diff --git a/hedera-node/hedera-util-service-impl/build.gradle.kts b/hedera-node/hedera-util-service-impl/build.gradle.kts index 97918246ae0f..5f44eeae8144 100644 --- a/hedera-node/hedera-util-service-impl/build.gradle.kts +++ b/hedera-node/hedera-util-service-impl/build.gradle.kts @@ -22,8 +22,8 @@ dependencies { javaModuleDependencies { annotationProcessor(gav("dagger.compiler")) - testImplementation(testFixtures(project(":hedera-node:node-app-spi"))) - testImplementation(testFixtures(project(":hedera-node:node-config"))) + testImplementation(testFixtures(project(":app-spi"))) + testImplementation(testFixtures(project(":config"))) testImplementation(gav("org.assertj.core")) testImplementation(gav("org.junit.jupiter.api")) testImplementation(gav("org.mockito")) diff --git a/hedera-node/hedera-util-service/src/main/java/module-info.java b/hedera-node/hedera-util-service/src/main/java/module-info.java index 48a43b4a200a..48d2cd50dcad 100644 --- a/hedera-node/hedera-util-service/src/main/java/module-info.java +++ b/hedera-node/hedera-util-service/src/main/java/module-info.java @@ -3,6 +3,8 @@ uses com.hedera.node.app.service.util.UtilService; + requires com.hedera.node.hapi; requires transitive com.hedera.node.app.spi; + requires transitive com.hedera.pbj.runtime; requires static com.github.spotbugs.annotations; } diff --git a/hedera-node/test-clients/build.gradle.kts b/hedera-node/test-clients/build.gradle.kts index 3cdd4f5be624..aabf770f9df6 100644 --- a/hedera-node/test-clients/build.gradle.kts +++ b/hedera-node/test-clients/build.gradle.kts @@ -33,7 +33,7 @@ tasks.test { exclude("**/*") } -configurations { evaluationDependsOn(":hedera-node:node-app-hapi-fees") } +configurations { evaluationDependsOn(":app-hapi-fees") } sourceSets { // Needed because "resource" directory is misnamed. See @@ -43,12 +43,12 @@ sourceSets { dependencies { javaModuleDependencies { - api(project(":hedera-node:node-app-hapi-fees")) - api(project(":hedera-node:node-app-hapi-utils")) + api(project(":app-hapi-fees")) + api(project(":app-hapi-utils")) api(gav("com.fasterxml.jackson.annotation")) api(gav("com.google.common")) api(gav("com.google.protobuf")) - api(project(":hedera-node:node-hapi")) + api(project(":hapi")) api(gav("com.swirlds.common")) api(gav("headlong")) api(gav("info.picocli")) @@ -62,7 +62,7 @@ dependencies { api(gav("org.yaml.snakeyaml")) api(gav("tuweni.bytes")) - implementation(project(":hedera-node:node-app-service-evm")) + implementation(project(":app-service-evm")) implementation(gav("com.fasterxml.jackson.core")) implementation(gav("com.fasterxml.jackson.databind")) implementation(gav("com.github.docker.java.api")) @@ -81,10 +81,11 @@ dependencies { implementation(gav("tuweni.units")) itestImplementation(project(path)) + itestImplementation(project(":hapi")) + itestImplementation(gav("org.apache.commons.lang3")) + itestImplementation(gav("org.junit.jupiter.api")) itestImplementation(gav("org.testcontainers")) itestImplementation(gav("org.testcontainers.junit.jupiter")) - itestImplementation(project(":hedera-node:node-hapi")) - itestImplementation(gav("org.junit.jupiter.api")) eetImplementation(project(path)) eetImplementation(gav("org.junit.jupiter.api")) @@ -105,7 +106,7 @@ tasks.eet { } tasks.shadowJar { - dependsOn(project(":hedera-node:node-app-hapi-fees").tasks.jar) + dependsOn(project(":app-hapi-fees").tasks.jar) mergeServiceFiles() @@ -125,7 +126,7 @@ tasks.shadowJar { val yahCliJar = tasks.register("yahCliJar") { - dependsOn(project(":hedera-node:node-app-hapi-fees").tasks.jar) + dependsOn(project(":app-hapi-fees").tasks.jar) group = "shadow" from(sourceSets.main.get().output) @@ -150,7 +151,7 @@ val yahCliJar = val validationJar = tasks.register("validationJar") { - dependsOn(project(":hedera-node:node-app-hapi-fees").tasks.jar) + dependsOn(project(":app-hapi-fees").tasks.jar) group = "shadow" from(sourceSets.main.get().output) diff --git a/hedera-platform/build.gradle.kts b/hedera-platform/build.gradle.kts new file mode 100644 index 000000000000..d03f5143f626 --- /dev/null +++ b/hedera-platform/build.gradle.kts @@ -0,0 +1,114 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id("com.hedera.hashgraph.platform") +} + +val besuNativeVersion = "0.6.1" +val besuVersion = "23.1.2" +val bouncycastleVersion = "1.70" +val daggerVersion = "2.42" +val eclipseCollectionsVersion = "10.4.0" +val grpcVersion = "1.45.1" +val helidonVersion = "3.0.2" +val jacksonVersion = "2.13.3" +val log4jVersion = "2.17.1" +val mockitoVersion = "4.6.1" +val nettyVersion = "4.1.77.Final" +val protobufVersion = "3.19.4" +val swirldsVersion = "0.39.0-alpha.3" +val systemStubsVersion = "2.0.2" +val testContainersVersion = "1.17.2" +val tuweniVersion = "2.2.0" + +dependencies.constraints { + javaModuleDependencies { + api(gav("awaitility", "4.2.0")) + api(gav("com.fasterxml.jackson.core", jacksonVersion)) + api(gav("com.fasterxml.jackson.databind", jacksonVersion)) + api(gav("com.github.benmanes.caffeine", "3.0.6")) + api(gav("com.github.docker.java.api", "3.2.13")) + api(gav("com.github.spotbugs.annotations", "4.7.3")) + api(gav("com.google.common", "31.1-jre")) + api(gav("com.google.protobuf", protobufVersion)) + api(gav("com.google.protobuf.util", protobufVersion)) + api(gav("com.hedera.hashgraph.protobuf.java.api", "0.40.0-blocks-state-SNAPSHOT")) // TODO removed through other PR + api(gav("com.hedera.pbj.runtime", "0.7.0")) + api(gav("com.sun.jna", "5.12.1")) + api(gav("com.swirlds.base", swirldsVersion)) + api(gav("com.swirlds.cli", swirldsVersion)) + api(gav("com.swirlds.common", swirldsVersion)) + api(gav("com.swirlds.config", swirldsVersion)) + api(gav("com.swirlds.fchashmap", swirldsVersion)) + api(gav("com.swirlds.fcqueue", swirldsVersion)) + api(gav("com.swirlds.jasperdb", swirldsVersion)) + api(gav("com.swirlds.logging", swirldsVersion)) + api(gav("com.swirlds.merkle", swirldsVersion)) + api(gav("com.swirlds.platform", swirldsVersion)) + api(gav("com.swirlds.test.framework", swirldsVersion)) + api(gav("com.swirlds.virtualmap", swirldsVersion)) + api(gav("dagger", daggerVersion)) + api(gav("dagger.compiler", daggerVersion)) + api(gav("grpc.netty", grpcVersion)) + api(gav("grpc.stub", grpcVersion)) + api(gav("headlong", "6.1.1")) + api(gav("info.picocli", "4.6.3")) + api(gav("io.github.classgraph", "4.8.65")) + api(gav("io.grpc", helidonVersion)) + api(gav("io.helidon.grpc.client", helidonVersion)) + api(gav("io.helidon.grpc.core", helidonVersion)) + api(gav("io.helidon.grpc.server", helidonVersion)) + api(gav("io.netty.handler", nettyVersion)) + api(gav("io.netty.transport", nettyVersion)) + api(gav("io.netty.transport.classes.epoll", nettyVersion)) + api(gav("io.netty.transport.epoll", nettyVersion)) + api(gav("javax.inject", "1")) + api(gav("net.i2p.crypto.eddsa", "0.3.0")) + api(gav("org.antlr.antlr4.runtime", "4.11.1")) + api(gav("org.apache.commons.codec", "1.15")) + api(gav("org.apache.commons.collections4", "4.4")) + api(gav("org.apache.commons.io", "2.11.0")) + api(gav("org.apache.commons.lang3", "3.12.0")) + api(gav("org.apache.logging.log4j", log4jVersion)) + api(gav("org.apache.logging.log4j.core", log4jVersion)) + api(gav("org.assertj.core", "3.23.1")) + api(gav("org.bouncycastle.pkix", bouncycastleVersion)) + api(gav("org.bouncycastle.provider", bouncycastleVersion)) + api(gav("org.eclipse.collections.api", eclipseCollectionsVersion)) + api(gav("org.eclipse.collections.impl", eclipseCollectionsVersion)) + api(gav("org.hamcrest", "2.2")) + api(gav("org.hyperledger.besu.crypto", besuVersion)) + api(gav("org.hyperledger.besu.datatypes", besuVersion)) + api(gav("org.hyperledger.besu.evm", besuVersion)) + api(gav("org.hyperledger.besu.secp256k1", besuNativeVersion)) + api(gav("org.json", "20210307")) + api(gav("org.junit.jupiter.api", "5.9.0")) + api(gav("org.junitpioneer", "2.0.1")) + api(gav("org.mockito", mockitoVersion)) + api(gav("org.mockito.inline", mockitoVersion)) + api(gav("org.mockito.junit.jupiter", mockitoVersion)) + api(gav("org.opentest4j", "1.2.0")) + api(gav("org.slf4j", "2.0.3")) + api(gav("org.testcontainers", testContainersVersion)) + api(gav("org.testcontainers.junit.jupiter", testContainersVersion)) + api(gav("org.yaml.snakeyaml", "1.33")) + api(gav("tuweni.bytes", tuweniVersion)) + api(gav("tuweni.units", tuweniVersion)) + api(gav("uk.org.webcompere.systemstubs.core", systemStubsVersion)) + api(gav("uk.org.webcompere.systemstubs.jupiter", systemStubsVersion)) + } +} diff --git a/hedera-platform/settings.gradle.kts b/hedera-platform/settings.gradle.kts new file mode 100644 index 000000000000..deba071f5ffe --- /dev/null +++ b/hedera-platform/settings.gradle.kts @@ -0,0 +1,20 @@ +/* + * Copyright (C) 2023 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +pluginManagement { + @Suppress("UnstableApiUsage") + includeBuild("../build-logic") +} \ No newline at end of file diff --git a/settings.gradle.kts b/settings.gradle.kts index 23184035139a..0a58cc755bb8 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -23,6 +23,7 @@ pluginManagement { mavenCentral() maven { url = uri("https://oss.sonatype.org/content/repositories/snapshots") } } + @Suppress("UnstableApiUsage") includeBuild("build-logic") } plugins { @@ -32,63 +33,63 @@ plugins { id("me.champeau.includegit").version("0.1.6") } -includeBuild(".") // https://github.com/gradlex-org/java-module-dependencies/issues/26 +includeBuild("hedera-platform") include(":hedera-node") -include(":node-app-service-network-admin", "hedera-network-admin-service") +include(":app", "hedera-app") -include(":node-app-service-network-admin-impl", "hedera-network-admin-service-impl") +include(":app-hapi-fees", "hapi-fees") -include(":node-app-service-consensus", "hedera-consensus-service") +include(":app-hapi-utils", "hapi-utils") -include(":node-app-service-consensus-impl", "hedera-consensus-service-impl") +include(":app-service-consensus", "hedera-consensus-service") -include(":node-app-service-file", "hedera-file-service") +include(":app-service-consensus-impl", "hedera-consensus-service-impl") -include(":node-app-service-file-impl", "hedera-file-service-impl") +include(":app-service-contract", "hedera-smart-contract-service") -include(":node-app-service-schedule", "hedera-schedule-service") +include(":app-service-contract-impl", "hedera-smart-contract-service-impl") -include(":node-app-service-schedule-impl", "hedera-schedule-service-impl") +include(":app-service-evm", "hedera-evm") -include(":node-app-service-contract", "hedera-smart-contract-service") +include(":app-service-evm-impl", "hedera-evm-impl") -include(":node-app-service-contract-impl", "hedera-smart-contract-service-impl") +include(":app-service-file", "hedera-file-service") -include(":node-app-service-token", "hedera-token-service") +include(":app-service-file-impl", "hedera-file-service-impl") -include(":node-app-service-token-impl", "hedera-token-service-impl") +include(":app-service-mono", "hedera-mono-service") -include(":node-app-service-util", "hedera-util-service") +include(":app-service-network-admin", "hedera-network-admin-service") -include(":node-app-service-util-impl", "hedera-util-service-impl") +include(":app-service-network-admin-impl", "hedera-network-admin-service-impl") -include(":node-app-hapi-utils", "hapi-utils") +include(":app-service-schedule", "hedera-schedule-service") -include(":node-app-hapi-fees", "hapi-fees") +include(":app-service-schedule-impl", "hedera-schedule-service-impl") -include(":node-hapi", "hapi") +include(":app-service-token", "hedera-token-service") -include(":node-config", "hedera-config") +include(":app-service-token-impl", "hedera-token-service-impl") -include(":node-app", "hedera-app") +include(":app-service-util", "hedera-util-service") -include(":node-app-spi", "hedera-app-spi") +include(":app-service-util-impl", "hedera-util-service-impl") -include(":node-app-service-evm", "hedera-evm") +include(":app-spi", "hedera-app-spi") -include(":node-app-service-evm-impl", "hedera-evm-impl") +include(":config", "hedera-config") -include(":node-app-service-mono", "hedera-mono-service") +include(":hapi", "hapi") include(":services-cli", "cli-clients") -include(":hedera-node:test-clients") +include(":test-clients", "test-clients") fun include(name: String, path: String) { - include(":hedera-node$name") - project(":hedera-node$name").projectDir = File(rootDir, "hedera-node/$path") + include(name) + project(name).projectDir = File(rootDir, "hedera-node/$path") } // Enable Gradle Build Scan @@ -126,92 +127,8 @@ dependencyResolutionManagement { // runtime distribution. These libs can be depended on during compilation, or bundled as // part of runtime. create("libs") { - val besuNativeVersion = "0.6.1" - val besuVersion = "23.1.2" - val bouncycastleVersion = "1.70" - val daggerVersion = "2.42" - val eclipseCollectionsVersion = "10.4.0" - val helidonVersion = "3.0.2" - val ioGrpcVersion = "1.45.1" - val jacksonVersion = "2.13.3" - val log4jVersion = "2.17.1" - val mockitoVersion = "4.6.1" - val swirldsVersion = "0.39.0-alpha.3" - val systemStubsVersion = "2.0.2" - val testContainersVersion = "1.17.2" - val tuweniVersion = "2.2.0" - - version("awaitility", "4.2.0") - version("com.fasterxml.jackson.core", jacksonVersion) - version("com.fasterxml.jackson.databind", jacksonVersion) - version("com.github.benmanes.caffeine", "3.0.6") - version("com.github.docker.java.api", "3.2.13") - version("com.github.spotbugs.annotations", "4.7.3") - version("com.google.common", "31.1-jre") - version("com.google.protobuf", "3.19.4") - version("com.google.protobuf.util", "3.19.2") - version("com.hedera.pbj.runtime", "0.7.0") - version("com.sun.jna", "5.12.1") - version("com.swirlds.base", swirldsVersion) - version("com.swirlds.cli", swirldsVersion) - version("com.swirlds.common", swirldsVersion) - version("com.swirlds.config", swirldsVersion) - version("com.swirlds.fchashmap", swirldsVersion) - version("com.swirlds.fcqueue", swirldsVersion) - version("com.swirlds.jasperdb", swirldsVersion) - version("com.swirlds.logging", swirldsVersion) - version("com.swirlds.merkle", swirldsVersion) - version("com.swirlds.platform", swirldsVersion) - version("com.swirlds.test.framework", swirldsVersion) - version("com.swirlds.virtualmap", swirldsVersion) - version("dagger", daggerVersion) - version("dagger.compiler", daggerVersion) - version("grpc.protobuf", ioGrpcVersion) - version("grpc.stub", ioGrpcVersion) - version("headlong", "6.1.1") - version("info.picocli", "4.6.3") - version("io.github.classgraph", "4.8.65") - version("io.grpc", helidonVersion) - version("io.helidon.grpc.client", helidonVersion) - version("io.helidon.grpc.core", helidonVersion) - version("io.helidon.grpc.server", helidonVersion) - version("io_helidon_common_configurable", helidonVersion) - version("java.annotation", "3.0.2") - version("javax.inject", "1") - version("net.i2p.crypto.eddsa", "0.3.0") - version("org.antlr.antlr4.runtime", "4.11.1") - version("org.apache.commons.codec", "1.15") - version("org.apache.commons.collections4", "4.4") - version("org.apache.commons.io", "2.11.0") - version("org.apache.commons.lang3", "3.12.0") - version("org.apache.logging.log4j", log4jVersion) - version("org.apache.logging.log4j.core", log4jVersion) - version("org.apache.logging.log4j.jul", log4jVersion) - version("org.assertj.core", "3.23.1") - version("org.bouncycastle.pkix", bouncycastleVersion) - version("org.bouncycastle.provider", bouncycastleVersion) - version("org.eclipse.collections.api", eclipseCollectionsVersion) - version("org.eclipse.collections.impl", eclipseCollectionsVersion) - version("org.hamcrest", "2.2") - version("org.hyperledger.besu.crypto", besuVersion) - version("org.hyperledger.besu.datatypes", besuVersion) - version("org.hyperledger.besu.evm", besuVersion) - version("org.hyperledger.besu.secp256k1", besuNativeVersion) - version("org.json", "20210307") - version("org.junit.jupiter.api", "5.9.0") - version("org.junitpioneer", "2.0.1") - version("org.mockito", mockitoVersion) - version("org.mockito.inline", mockitoVersion) - version("org.mockito.junit.jupiter", mockitoVersion) - version("org.opentest4j", "1.2.0") - version("org.slf4j", "2.0.3") - version("org.testcontainers", testContainersVersion) - version("org.testcontainers.junit.jupiter", testContainersVersion) - version("tuweni.bytes", tuweniVersion) - version("tuweni.units", tuweniVersion) - version("uk.org.webcompere.systemstubs.core", systemStubsVersion) - version("uk.org.webcompere.systemstubs.jupiter", systemStubsVersion) - + version("google-proto", "3.19.4") + version("grpc-proto", "1.45.1") version("hapi-proto", hapiProtoVersion) plugin("pbj", "com.hedera.pbj.pbj-compiler").version("0.6.1")