Merge remote-tracking branch 'upstream/main' into tessera_as_internal_process

tessera_as_internal_process
George Tebrean 11 months ago
commit b5b95fbafb
  1. 17
      CHANGELOG.md
  2. 2
      README.md
  3. 1
      acceptance-tests/dsl/build.gradle
  4. 57
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/AcceptanceTestBase.java
  5. 200
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/AcceptanceTestBaseJunit5.java
  6. 58
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/AcceptanceTestBaseTestWatcher.java
  7. 30
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/bft/BftAcceptanceTestParameterization.java
  8. 38
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/bft/BftBlockRewardPaymentAcceptanceTest.java
  9. 15
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/bft/BftDiscardRpcAcceptanceTest.java
  10. 58
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/bft/BftMiningAcceptanceTest.java
  11. 15
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/bft/BftProposalRpcAcceptanceTest.java
  12. 15
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/bft/BftProposeRpcAcceptanceTest.java
  13. 17
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/bft/BftZeroValidatorsAcceptanceTest.java
  14. 26
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/bft/ParameterizedBftTestBase.java
  15. 26
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/bft/pki/ParameterizedPkiQbftTestBase.java
  16. 29
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/bft/pki/PkiQbftAcceptanceTest.java
  17. 91
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/bft/pki/PkiQbftAcceptanceTestParameterization.java
  18. 6
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/bft/qbft/QbftContractAcceptanceTest.java
  19. 6
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/clique/CliqueDiscardRpcAcceptanceTest.java
  20. 14
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/clique/CliqueGetSignersRpcAcceptanceTest.java
  21. 6
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/clique/CliqueMiningAcceptanceTest.java
  22. 6
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/clique/CliqueProposalRpcAcceptanceTest.java
  23. 6
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/clique/CliqueProposeRpcAcceptanceTest.java
  24. 6
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/clique/CliqueZeroValidatorsAcceptanceTest.java
  25. 4
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/plugins/BadCLIOptionsPluginTest.java
  26. 4
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/plugins/BesuEventsPluginTest.java
  27. 4
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/plugins/PermissioningPluginTest.java
  28. 2
      besu/src/main/java/org/hyperledger/besu/RunnerBuilder.java
  29. 16
      besu/src/main/java/org/hyperledger/besu/cli/BesuCommand.java
  30. 4
      besu/src/main/java/org/hyperledger/besu/cli/DefaultCommandValues.java
  31. 2
      besu/src/main/java/org/hyperledger/besu/cli/subcommands/operator/GenerateBlockchainConfig.java
  32. 10
      besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/RocksDbUsageHelper.java
  33. 6
      besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/StorageSubCommand.java
  34. 361
      besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogHelper.java
  35. 147
      besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogSubCommand.java
  36. 18
      besu/src/main/java/org/hyperledger/besu/controller/BesuController.java
  37. 3
      besu/src/main/java/org/hyperledger/besu/controller/BesuControllerBuilder.java
  38. 320
      besu/src/test/java/org/hyperledger/besu/ForkIdsNetworkConfigTest.java
  39. 34
      besu/src/test/java/org/hyperledger/besu/RawForkIdTest.java
  40. 24
      besu/src/test/java/org/hyperledger/besu/cli/BesuCommandTest.java
  41. 7
      besu/src/test/java/org/hyperledger/besu/cli/subcommands/operator/OperatorSubCommandTest.java
  42. 2
      besu/src/test/java/org/hyperledger/besu/cli/subcommands/rlp/RLPSubCommandTest.java
  43. 265
      besu/src/test/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogHelperTest.java
  44. 35
      besu/src/test/java/org/hyperledger/besu/controller/BesuControllerBuilderTest.java
  45. 8
      besu/src/test/java/org/hyperledger/besu/controller/BesuControllerTest.java
  46. 8
      besu/src/test/java/org/hyperledger/besu/controller/ConsensusScheduleBesuControllerBuilderTest.java
  47. 71
      besu/src/test/java/org/hyperledger/besu/controller/MergeBesuControllerBuilderTest.java
  48. 72
      besu/src/test/java/org/hyperledger/besu/controller/QbftBesuControllerBuilderTest.java
  49. 34
      besu/src/test/java/org/hyperledger/besu/controller/TransitionControllerBuilderTest.java
  50. 1
      config/src/main/resources/goerli.json
  51. 1
      config/src/main/resources/holesky.json
  52. 1
      config/src/main/resources/sepolia.json
  53. 2
      docs/trace_rpc_apis.md
  54. 9
      ethereum/api/src/integration-test/java/org/hyperledger/besu/ethereum/api/jsonrpc/methods/fork/frontier/EthEstimateGasIntegrationTest.java
  55. 2
      ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/jsonrpc/JsonRpcErrorConverter.java
  56. 21
      ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/jsonrpc/internal/methods/AbstractEstimateGas.java
  57. 7
      ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/jsonrpc/internal/methods/EthEstimateGas.java
  58. 4
      ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/jsonrpc/internal/response/JsonRpcError.java
  59. 1
      ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/jsonrpc/internal/response/RpcErrorType.java
  60. 12
      ethereum/api/src/test/java/org/hyperledger/besu/ethereum/api/jsonrpc/internal/methods/EthCreateAccessListTest.java
  61. 61
      ethereum/api/src/test/java/org/hyperledger/besu/ethereum/api/jsonrpc/internal/methods/EthEstimateGasTest.java
  62. 4
      ethereum/api/src/test/resources/org/hyperledger/besu/ethereum/api/jsonrpc/eth/eth_estimateGas_invalid.json
  63. 5
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/mainnet/BlockHeaderValidator.java
  64. 7
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/mainnet/MainnetTransactionProcessor.java
  65. 1
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/transaction/TransactionInvalidReason.java
  66. 39
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/trie/bonsai/BonsaiValue.java
  67. 2
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/trie/bonsai/storage/BonsaiWorldStateKeyValueStorage.java
  68. 2
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/trie/bonsai/trielog/TrieLogFactoryImpl.java
  69. 17
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/trie/bonsai/trielog/TrieLogPruner.java
  70. 91
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/trie/bonsai/worldview/BonsaiWorldState.java
  71. 2
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/trie/bonsai/worldview/BonsaiWorldStateUpdateAccumulator.java
  72. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/EthPeers.java
  73. 2
      evm/src/main/java/org/hyperledger/besu/evm/EvmSpecVersion.java
  74. 15
      evm/src/main/java/org/hyperledger/besu/evm/precompile/KZGPointEvalPrecompiledContract.java
  75. 2
      evm/src/test/java/org/hyperledger/besu/evm/fluent/EVMExecutorTest.java
  76. 4
      evm/src/test/java/org/hyperledger/besu/evm/precompile/KZGPointEvalPrecompileContractTest.java
  77. 4
      gradle.properties
  78. 2
      plugin-api/build.gradle
  79. 13
      plugin-api/src/main/java/org/hyperledger/besu/plugin/services/trielogs/TrieLog.java
  80. 7
      plugins/rocksdb/src/main/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/RocksDBColumnarKeyValueStorage.java

@ -1,8 +1,9 @@
# Changelog
## 23.10.4
## 24.1.0-SNAPSHOT
### Breaking Changes
- New `EXECUTION_HALTED` error returned if there is an error executing or simulating a transaction, with the reason for execution being halted. Replaces the generic `INTERNAL_ERROR` return code in certain cases which some applications may be checking for [#6343](https://github.com/hyperledger/besu/pull/6343)
### Deprecations
- Forest pruning (`pruning-enabled` options) is deprecated and will be removed soon. To save disk space consider switching to Bonsai data storage format [#6230](https://github.com/hyperledger/besu/pull/6230)
@ -13,16 +14,13 @@
- Set Ethereum Classic mainnet activation block for Spiral network upgrade [#6267](https://github.com/hyperledger/besu/pull/6267)
- Add custom genesis file name to config overview if specified [#6297](https://github.com/hyperledger/besu/pull/6297)
- Update Gradle plugins and replace unmaintained License Gradle Plugin with the actively maintained Gradle License Report [#6275](https://github.com/hyperledger/besu/pull/6275)
- Optimize RocksDB WAL files, allows for faster restart and a more linear disk space utilization [#6328](https://github.com/hyperledger/besu/pull/6328)
### Bug fixes
- INTERNAL_ERROR from `eth_estimateGas` JSON/RPC calls [#6344](https://github.com/hyperledger/besu/issues/6344)
## 23.10.3
### Breaking Changes
### Deprecations
### Additions and Improvements
- Implement debug_traceCall [#5885](https://github.com/hyperledger/besu/pull/5885)
- Transactions that takes too long to evaluate, during block creation, are dropped from the txpool [#6163](https://github.com/hyperledger/besu/pull/6163)
@ -34,11 +32,18 @@
- Update OpenJ9 Docker image to latest version [#6226](https://github.com/hyperledger/besu/pull/6226)
- Add error messages on authentication failures with username and password [#6212](https://github.com/hyperledger/besu/pull/6212)
- Add `rocksdb usage` to the `storage` subcommand to allow users and dev to check columns families usage [#6185](https://github.com/hyperledger/besu/pull/6185)
- Ethereum Classic Spiral network upgrade [#6078](https://github.com/hyperledger/besu/pull/6078)
- Fix self destruct collision [#6205](https://github.com/hyperledger/besu/pull/6205)
- Mark deleted storage on cleared [#6305](https://github.com/hyperledger/besu/pull/6305)
### Bug fixes
- Fix Docker image name clash between Besu and evmtool [#6194](https://github.com/hyperledger/besu/pull/6194)
- Fix `logIndex` in `eth_getTransactionReceipt` JSON RPC method [#6206](https://github.com/hyperledger/besu/pull/6206)
### Download Links
https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/23.10.3/besu-23.10.3.zip / sha256 da7ef8a6ceb88d3e327cacddcdb32218d1750b464c14165a74068f6dc6e0871a
https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/23.10.3/besu-23.10.3.tar.gz / sha256 73c834cf32c7bbe255d7d8cc7ca5d1eb0df8430b9114935c8dcf3a675b2acbc2
## 23.10.2
### Breaking Changes

@ -47,7 +47,7 @@ Instructions for how to get started with developing on the Besu codebase. Please
* [Checking Out and Building](https://wiki.hyperledger.org/display/BESU/Building+from+source)
* [Running Developer Builds](https://wiki.hyperledger.org/display/BESU/Building+from+source#running-developer-builds)
* [Code Coverage](https://wiki.hyperledger.org/display/BESU/Code+coverage)
* [Logging](https://wiki.hyperledger.org/display/BESU/Logging) or the [Documentation's Logging section](https://besu.hyperledger.org/en/stable/HowTo/Monitor/Logging/)
* [Logging](https://wiki.hyperledger.org/display/BESU/Logging) or the [Documentation's Logging section](https://besu.hyperledger.org/public-networks/how-to/monitor/logging)
## Release Notes

@ -47,4 +47,5 @@ dependencies {
implementation 'org.web3j:crypto'
implementation 'org.testcontainers:testcontainers'
implementation 'org.junit.jupiter:junit-jupiter'
}

@ -49,7 +49,6 @@ import org.hyperledger.besu.tests.acceptance.dsl.transaction.txpool.TxPoolTransa
import org.hyperledger.besu.tests.acceptance.dsl.transaction.web3.Web3Transactions;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.ProcessBuilder.Redirect;
@ -58,14 +57,15 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.junit.After;
import org.junit.Rule;
import org.junit.rules.TestName;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import org.junit.jupiter.api.extension.ExtendWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
/**
* Superclass for acceptance tests. For now (transition to junit5 is ongoing) this class supports
* junit4 format.
*/
@ExtendWith(AcceptanceTestBaseTestWatcher.class)
public class AcceptanceTestBase {
private static final Logger LOG = LoggerFactory.getLogger(AcceptanceTestBase.class);
@ -131,8 +131,6 @@ public class AcceptanceTestBase {
exitedSuccessfully = new ExitedWithCode(0);
}
@Rule public final TestName name = new TestName();
@After
public void tearDownAcceptanceTestBase() {
reportMemory();
@ -178,49 +176,6 @@ public class AcceptanceTestBase {
}
}
@Rule
public TestWatcher logEraser =
new TestWatcher() {
@Override
protected void starting(final Description description) {
MDC.put("test", description.getMethodName());
MDC.put("class", description.getClassName());
final String errorMessage = "Uncaught exception in thread \"{}\"";
Thread.currentThread()
.setUncaughtExceptionHandler(
(thread, error) -> LOG.error(errorMessage, thread.getName(), error));
Thread.setDefaultUncaughtExceptionHandler(
(thread, error) -> LOG.error(errorMessage, thread.getName(), error));
}
@Override
protected void failed(final Throwable e, final Description description) {
// add the result at the end of the log so it is self-sufficient
LOG.error(
"==========================================================================================");
LOG.error("Test failed. Reported Throwable at the point of failure:", e);
LOG.error(e.getMessage());
}
@Override
protected void succeeded(final Description description) {
// if so configured, delete logs of successful tests
if (!Boolean.getBoolean("acctests.keepLogsOfPassingTests")) {
String pathname =
"build/acceptanceTestLogs/"
+ description.getClassName()
+ "."
+ description.getMethodName()
+ ".log";
LOG.info("Test successful, deleting log at {}", pathname);
File file = new File(pathname);
file.delete();
}
}
};
protected void waitForBlockHeight(final Node node, final long blockchainHeight) {
WaitUtils.waitFor(
120,

@ -0,0 +1,200 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.tests.acceptance.dsl;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.assertj.core.api.Assertions.assertThat;
import org.hyperledger.besu.tests.acceptance.dsl.account.Accounts;
import org.hyperledger.besu.tests.acceptance.dsl.blockchain.Blockchain;
import org.hyperledger.besu.tests.acceptance.dsl.condition.admin.AdminConditions;
import org.hyperledger.besu.tests.acceptance.dsl.condition.bft.BftConditions;
import org.hyperledger.besu.tests.acceptance.dsl.condition.clique.CliqueConditions;
import org.hyperledger.besu.tests.acceptance.dsl.condition.eth.EthConditions;
import org.hyperledger.besu.tests.acceptance.dsl.condition.login.LoginConditions;
import org.hyperledger.besu.tests.acceptance.dsl.condition.net.NetConditions;
import org.hyperledger.besu.tests.acceptance.dsl.condition.perm.PermissioningConditions;
import org.hyperledger.besu.tests.acceptance.dsl.condition.priv.PrivConditions;
import org.hyperledger.besu.tests.acceptance.dsl.condition.process.ExitedWithCode;
import org.hyperledger.besu.tests.acceptance.dsl.condition.txpool.TxPoolConditions;
import org.hyperledger.besu.tests.acceptance.dsl.condition.web3.Web3Conditions;
import org.hyperledger.besu.tests.acceptance.dsl.contract.ContractVerifier;
import org.hyperledger.besu.tests.acceptance.dsl.node.Node;
import org.hyperledger.besu.tests.acceptance.dsl.node.cluster.Cluster;
import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.BesuNodeFactory;
import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.permissioning.PermissionedNodeBuilder;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.account.AccountTransactions;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.admin.AdminTransactions;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.bft.BftTransactions;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.clique.CliqueTransactions;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.contract.ContractTransactions;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.eth.EthTransactions;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.miner.MinerTransactions;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.net.NetTransactions;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.perm.PermissioningTransactions;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.privacy.PrivacyTransactions;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.txpool.TxPoolTransactions;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.web3.Web3Transactions;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.math.BigInteger;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.logging.log4j.ThreadContext;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.TestInfo;
import org.junit.jupiter.api.extension.ExtendWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Superclass for acceptance tests. For now (transition to junit5 is ongoing) this class supports
* junit5 format. Once the transition is complete, this class can be removed and recombined with
* AcceptanceTestBase (original).
*/
@ExtendWith(AcceptanceTestBaseTestWatcher.class)
public class AcceptanceTestBaseJunit5 {
private static final Logger LOG = LoggerFactory.getLogger(AcceptanceTestBaseJunit5.class);
protected final Accounts accounts;
protected final AccountTransactions accountTransactions;
protected final AdminConditions admin;
protected final AdminTransactions adminTransactions;
protected final Blockchain blockchain;
protected final CliqueConditions clique;
protected final CliqueTransactions cliqueTransactions;
protected final Cluster cluster;
protected final ContractVerifier contractVerifier;
protected final ContractTransactions contractTransactions;
protected final EthConditions eth;
protected final EthTransactions ethTransactions;
protected final BftTransactions bftTransactions;
protected final BftConditions bft;
protected final LoginConditions login;
protected final NetConditions net;
protected final BesuNodeFactory besu;
protected final PermissioningConditions perm;
protected final PermissionedNodeBuilder permissionedNodeBuilder;
protected final PermissioningTransactions permissioningTransactions;
protected final MinerTransactions minerTransactions;
protected final Web3Conditions web3;
protected final PrivConditions priv;
protected final PrivacyTransactions privacyTransactions;
protected final TxPoolConditions txPoolConditions;
protected final TxPoolTransactions txPoolTransactions;
protected final ExitedWithCode exitedSuccessfully;
private final ExecutorService outputProcessorExecutor = Executors.newCachedThreadPool();
protected AcceptanceTestBaseJunit5() {
ethTransactions = new EthTransactions();
accounts = new Accounts(ethTransactions);
adminTransactions = new AdminTransactions();
cliqueTransactions = new CliqueTransactions();
bftTransactions = new BftTransactions();
accountTransactions = new AccountTransactions(accounts);
permissioningTransactions = new PermissioningTransactions();
privacyTransactions = new PrivacyTransactions();
contractTransactions = new ContractTransactions();
minerTransactions = new MinerTransactions();
blockchain = new Blockchain(ethTransactions);
clique = new CliqueConditions(ethTransactions, cliqueTransactions);
eth = new EthConditions(ethTransactions);
bft = new BftConditions(bftTransactions);
login = new LoginConditions();
net = new NetConditions(new NetTransactions());
cluster = new Cluster(net);
perm = new PermissioningConditions(permissioningTransactions);
priv = new PrivConditions(privacyTransactions);
admin = new AdminConditions(adminTransactions);
web3 = new Web3Conditions(new Web3Transactions());
besu = new BesuNodeFactory();
txPoolTransactions = new TxPoolTransactions();
txPoolConditions = new TxPoolConditions(txPoolTransactions);
contractVerifier = new ContractVerifier(accounts.getPrimaryBenefactor());
permissionedNodeBuilder = new PermissionedNodeBuilder();
exitedSuccessfully = new ExitedWithCode(0);
}
@BeforeEach
public void setUp(final TestInfo testInfo) {
// log4j is configured to create a file per test
// build/acceptanceTestLogs/${ctx:class}.${ctx:test}.log
ThreadContext.put("class", this.getClass().getSimpleName());
ThreadContext.put("test", testInfo.getTestMethod().get().getName());
}
@AfterEach
public void tearDownAcceptanceTestBase() {
reportMemory();
cluster.close();
}
public void reportMemory() {
String os = System.getProperty("os.name");
String[] command = null;
if (os.contains("Linux")) {
command = new String[] {"/usr/bin/top", "-n", "1", "-o", "%MEM", "-b", "-c", "-w", "180"};
}
if (os.contains("Mac")) {
command = new String[] {"/usr/bin/top", "-l", "1", "-o", "mem", "-n", "20"};
}
if (command != null) {
LOG.info("Memory usage at end of test:");
final ProcessBuilder processBuilder =
new ProcessBuilder(command)
.redirectErrorStream(true)
.redirectInput(ProcessBuilder.Redirect.INHERIT);
try {
final Process memInfoProcess = processBuilder.start();
outputProcessorExecutor.execute(() -> printOutput(memInfoProcess));
memInfoProcess.waitFor();
LOG.debug("Memory info process exited with code {}", memInfoProcess.exitValue());
} catch (final Exception e) {
LOG.warn("Error running memory information process", e);
}
} else {
LOG.info("Don't know how to report memory for OS {}", os);
}
}
private void printOutput(final Process process) {
try (final BufferedReader in =
new BufferedReader(new InputStreamReader(process.getInputStream(), UTF_8))) {
String line = in.readLine();
while (line != null) {
LOG.info(line);
line = in.readLine();
}
} catch (final IOException e) {
LOG.warn("Failed to read output from memory information process: ", e);
}
}
protected void waitForBlockHeight(final Node node, final long blockchainHeight) {
WaitUtils.waitFor(
120,
() ->
assertThat(node.execute(ethTransactions.blockNumber()))
.isGreaterThanOrEqualTo(BigInteger.valueOf(blockchainHeight)));
}
}

@ -0,0 +1,58 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.tests.acceptance.dsl;
import java.io.File;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.TestWatcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AcceptanceTestBaseTestWatcher implements TestWatcher {
private static final Logger LOG = LoggerFactory.getLogger(AcceptanceTestBaseTestWatcher.class);
@Override
public void testFailed(final ExtensionContext extensionContext, final Throwable e) {
// add the result at the end of the log, so it is self-sufficient
LOG.error(
"==========================================================================================");
LOG.error("Test failed. Reported Throwable at the point of failure:", e);
LOG.error(e.getMessage());
}
@Override
public void testSuccessful(final ExtensionContext extensionContext) {
// if so configured, delete logs of successful tests
if (!Boolean.getBoolean("acctests.keepLogsOfPassingTests")) {
try {
// log4j is configured to create a file per test
// build/acceptanceTestLogs/${ctx:class}.${ctx:test}.log
String pathname =
"build/acceptanceTestLogs/"
+ extensionContext.getTestClass().get().getSimpleName()
+ "."
+ extensionContext.getTestMethod().get().getName()
+ ".log";
LOG.info("Test successful, deleting log at {}", pathname);
final File file = new File(pathname);
file.delete();
} catch (final Exception e) {
LOG.error("could not delete test file", e);
}
}
}
}

@ -17,26 +17,22 @@ package org.hyperledger.besu.tests.acceptance.bft;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.BesuNodeFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Stream;
import org.junit.jupiter.params.provider.Arguments;
public class BftAcceptanceTestParameterization {
public static List<Object[]> getFactories() {
final List<Object[]> ret = new ArrayList<>();
ret.addAll(
List.of(
new Object[] {
"ibft2",
new BftAcceptanceTestParameterization(
BesuNodeFactory::createIbft2Node, BesuNodeFactory::createIbft2NodeWithValidators)
},
new Object[] {
"qbft",
new BftAcceptanceTestParameterization(
BesuNodeFactory::createQbftNode, BesuNodeFactory::createQbftNodeWithValidators)
}));
return ret;
public static Stream<Arguments> getFactories() {
return Stream.of(
Arguments.of(
"ibft2",
new BftAcceptanceTestParameterization(
BesuNodeFactory::createIbft2Node, BesuNodeFactory::createIbft2NodeWithValidators)),
Arguments.of(
"qbft",
new BftAcceptanceTestParameterization(
BesuNodeFactory::createQbftNode, BesuNodeFactory::createQbftNodeWithValidators)));
}
@FunctionalInterface

@ -32,19 +32,18 @@ import java.util.NavigableMap;
import java.util.Optional;
import java.util.TreeMap;
import org.junit.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
public class BftBlockRewardPaymentAcceptanceTest extends ParameterizedBftTestBase {
private static final Amount BLOCK_REWARD = Amount.wei(new BigInteger("5000000000000000000", 10));
public BftBlockRewardPaymentAcceptanceTest(
final String testName, final BftAcceptanceTestParameterization nodeFactory) {
super(testName, nodeFactory);
}
@Test
public void validatorsArePaidBlockReward() throws Exception {
@ParameterizedTest(name = "{0} bft node factory type")
@MethodSource("factoryFunctions")
public void validatorsArePaidBlockReward(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final String[] validators = {"validator"};
final BesuNode validator = nodeFactory.createNodeWithValidators(besu, "validator", validators);
final BesuNode nonValidator =
@ -61,8 +60,11 @@ public class BftBlockRewardPaymentAcceptanceTest extends ParameterizedBftTestBas
Amount.ether(blockRewardEth * blockToCheck), BigInteger.valueOf(blockToCheck)));
}
@Test
public void payBlockRewardToConfiguredNode() throws Exception {
@ParameterizedTest(name = "{0} bft node factory type")
@MethodSource("factoryFunctions")
public void payBlockRewardToConfiguredNode(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final String[] validators = {"validator1"};
final BesuNode validator1 =
nodeFactory.createNodeWithValidators(besu, "validator1", validators);
@ -90,9 +92,11 @@ public class BftBlockRewardPaymentAcceptanceTest extends ParameterizedBftTestBas
Amount.ether(blockRewardEth * blockToCheck), BigInteger.valueOf(blockToCheck)));
}
@Test
public void payBlockRewardAccordingToTransitions_defaultInitialMiningBeneficiary()
throws Exception {
@ParameterizedTest(name = "{0} bft node factory type")
@MethodSource("factoryFunctions")
public void payBlockRewardAccordingToTransitions_defaultInitialMiningBeneficiary(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final List<Address> addresses = generateAddresses(2);
final Map<Long, Optional<Address>> transitions =
Map.of(
@ -103,9 +107,11 @@ public class BftBlockRewardPaymentAcceptanceTest extends ParameterizedBftTestBas
testMiningBeneficiaryTransitions(Optional.empty(), transitions);
}
@Test
public void payBlockRewardAccordingToTransitions_customInitialMiningBeneficiary()
throws Exception {
@ParameterizedTest(name = "{0} bft node factory type")
@MethodSource("factoryFunctions")
public void payBlockRewardAccordingToTransitions_customInitialMiningBeneficiary(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final List<Address> addresses = generateAddresses(4);
final Map<Long, Optional<Address>> transitions =
Map.of(

@ -16,17 +16,16 @@ package org.hyperledger.besu.tests.acceptance.bft;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import org.junit.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
public class BftDiscardRpcAcceptanceTest extends ParameterizedBftTestBase {
public BftDiscardRpcAcceptanceTest(
final String testName, final BftAcceptanceTestParameterization nodeFactory) {
super(testName, nodeFactory);
}
@Test
public void shouldDiscardVotes() throws Exception {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void shouldDiscardVotes(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final String[] validators = {"validator1", "validator3"};
final BesuNode validator1 =
nodeFactory.createNodeWithValidators(besu, "validator1", validators);

@ -28,17 +28,16 @@ import java.util.List;
import java.util.Optional;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
public class BftMiningAcceptanceTest extends ParameterizedBftTestBase {
public BftMiningAcceptanceTest(
final String testName, final BftAcceptanceTestParameterization nodeFactory) {
super(testName, nodeFactory);
}
@Test
public void shouldMineOnSingleNodeWithPaidGas_Berlin() throws Exception {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void shouldMineOnSingleNodeWithPaidGas_Berlin(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final BesuNode minerNode = nodeFactory.createNode(besu, "miner1");
cluster.start(minerNode);
@ -57,8 +56,11 @@ public class BftMiningAcceptanceTest extends ParameterizedBftTestBase {
cluster.verify(receiver.balanceEquals(3));
}
@Test
public void shouldMineOnSingleNodeWithFreeGas_Berlin() throws Exception {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void shouldMineOnSingleNodeWithFreeGas_Berlin(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final BesuNode minerNode = nodeFactory.createNode(besu, "miner1");
final MiningParameters zeroGasMiningParams =
ImmutableMiningParameters.builder()
@ -90,8 +92,11 @@ public class BftMiningAcceptanceTest extends ParameterizedBftTestBase {
cluster.verify(receiver.balanceEquals(3));
}
@Test
public void shouldMineOnSingleNodeWithPaidGas_London() throws Exception {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void shouldMineOnSingleNodeWithPaidGas_London(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final BesuNode minerNode = nodeFactory.createNode(besu, "miner1");
updateGenesisConfigToLondon(minerNode, false);
@ -115,8 +120,11 @@ public class BftMiningAcceptanceTest extends ParameterizedBftTestBase {
cluster.verify(receiver.balanceEquals(3));
}
@Test
public void shouldMineOnSingleNodeWithFreeGas_London() throws Exception {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void shouldMineOnSingleNodeWithFreeGas_London(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final BesuNode minerNode = nodeFactory.createNode(besu, "miner1");
updateGenesisConfigToLondon(minerNode, true);
@ -142,8 +150,11 @@ public class BftMiningAcceptanceTest extends ParameterizedBftTestBase {
cluster.verify(receiver.balanceEquals(3));
}
@Test
public void shouldMineOnMultipleNodes() throws Exception {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void shouldMineOnMultipleNodes(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final BesuNode minerNode1 = nodeFactory.createNode(besu, "miner1");
final BesuNode minerNode2 = nodeFactory.createNode(besu, "miner2");
final BesuNode minerNode3 = nodeFactory.createNode(besu, "miner3");
@ -168,8 +179,11 @@ public class BftMiningAcceptanceTest extends ParameterizedBftTestBase {
cluster.verify(receiver.balanceEquals(6));
}
@Test
public void shouldMineOnMultipleNodesEvenWhenClusterContainsNonValidator() throws Exception {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void shouldMineOnMultipleNodesEvenWhenClusterContainsNonValidator(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final String[] validators = {"validator1", "validator2", "validator3"};
final BesuNode validator1 =
nodeFactory.createNodeWithValidators(besu, "validator1", validators);
@ -196,9 +210,11 @@ public class BftMiningAcceptanceTest extends ParameterizedBftTestBase {
cluster.verify(receiver.balanceEquals(3));
}
@Test
public void shouldStillMineWhenANonProposerNodeFailsAndHasSufficientValidators()
throws Exception {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void shouldStillMineWhenANonProposerNodeFailsAndHasSufficientValidators(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final BesuNode minerNode1 = nodeFactory.createNode(besu, "miner1");
final BesuNode minerNode2 = nodeFactory.createNode(besu, "miner2");
final BesuNode minerNode3 = nodeFactory.createNode(besu, "miner3");

@ -16,17 +16,16 @@ package org.hyperledger.besu.tests.acceptance.bft;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import org.junit.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
public class BftProposalRpcAcceptanceTest extends ParameterizedBftTestBase {
public BftProposalRpcAcceptanceTest(
final String testName, final BftAcceptanceTestParameterization nodeFactory) {
super(testName, nodeFactory);
}
@Test
public void shouldReturnProposals() throws Exception {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void shouldReturnProposals(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final String[] validators = {"validator1", "validator2", "validator3"};
final BesuNode validator1 =
nodeFactory.createNodeWithValidators(besu, "validator1", validators);

@ -17,19 +17,18 @@ package org.hyperledger.besu.tests.acceptance.bft;
import org.hyperledger.besu.tests.acceptance.dsl.condition.Condition;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import org.junit.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
// These tests prove the ibft_proposeValidatorVote and ibft_getValidatorsByBlockNumber (implicitly)
// JSON RPC calls.
public class BftProposeRpcAcceptanceTest extends ParameterizedBftTestBase {
public BftProposeRpcAcceptanceTest(
final String testName, final BftAcceptanceTestParameterization nodeFactory) {
super(testName, nodeFactory);
}
@Test
public void validatorsCanBeAddedAndThenRemoved() throws Exception {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void validatorsCanBeAddedAndThenRemoved(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final String[] validators = {"validator1", "validator2", "validator3"};
final BesuNode validator1 =
nodeFactory.createNodeWithValidators(besu, "validator1", validators);

@ -16,17 +16,16 @@ package org.hyperledger.besu.tests.acceptance.bft;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import org.junit.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
public class BftZeroValidators extends ParameterizedBftTestBase {
public class BftZeroValidatorsAcceptanceTest extends ParameterizedBftTestBase {
public BftZeroValidators(
final String testName, final BftAcceptanceTestParameterization nodeFactory) {
super(testName, nodeFactory);
}
@Test
public void zeroValidatorsFormValidCluster() throws Exception {
@ParameterizedTest(name = "{0} bft node factory type")
@MethodSource("factoryFunctions")
public void zeroValidatorsFormValidCluster(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final String[] validators = {};
final BesuNode node1 = nodeFactory.createNodeWithValidators(besu, "node1", validators);
final BesuNode node2 = nodeFactory.createNodeWithValidators(besu, "node2", validators);

@ -14,29 +14,23 @@
*/
package org.hyperledger.besu.tests.acceptance.bft;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBase;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5;
import java.util.Collection;
import java.util.stream.Stream;
import org.junit.Ignore;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.params.provider.Arguments;
@RunWith(Parameterized.class)
@Ignore("This is not a test class, it offers BFT parameterization only.")
public abstract class ParameterizedBftTestBase extends AcceptanceTestBase {
@Disabled("This is not a test class, it offers BFT parameterization only.")
public abstract class ParameterizedBftTestBase extends AcceptanceTestBaseJunit5 {
protected String bftType;
protected BftAcceptanceTestParameterization nodeFactory;
protected final String bftType;
protected final BftAcceptanceTestParameterization nodeFactory;
@Parameters(name = "{0}")
public static Collection<Object[]> factoryFunctions() {
public static Stream<Arguments> factoryFunctions() {
return BftAcceptanceTestParameterization.getFactories();
}
protected ParameterizedBftTestBase(
final String bftType, final BftAcceptanceTestParameterization input) {
protected void setUp(final String bftType, final BftAcceptanceTestParameterization input) {
this.bftType = bftType;
this.nodeFactory = input;
}

@ -14,28 +14,16 @@
*/
package org.hyperledger.besu.tests.acceptance.bft.pki;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBase;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5;
import java.util.Collection;
import java.util.stream.Stream;
import org.junit.Ignore;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.params.provider.Arguments;
@RunWith(Parameterized.class)
@Ignore("This is not a test class, it offers PKI QBFT parameterization only.")
public abstract class ParameterizedPkiQbftTestBase extends AcceptanceTestBase {
protected final PkiQbftAcceptanceTestParameterization nodeFactory;
@Parameters(name = "{0}")
public static Collection<Object[]> factoryFunctions() {
@Disabled("This is not a test class, it offers PKI QBFT parameterization only.")
public abstract class ParameterizedPkiQbftTestBase extends AcceptanceTestBaseJunit5 {
public static Stream<Arguments> factoryFunctions() {
return PkiQbftAcceptanceTestParameterization.getFactories();
}
protected ParameterizedPkiQbftTestBase(
final String testName, final PkiQbftAcceptanceTestParameterization input) {
this.nodeFactory = input;
}
}

@ -20,17 +20,16 @@ package org.hyperledger.besu.tests.acceptance.bft.pki;
import org.hyperledger.besu.tests.acceptance.dsl.account.Account;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import org.junit.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
public class PkiQbftAcceptanceTest extends ParameterizedPkiQbftTestBase {
public PkiQbftAcceptanceTest(
final String testName, final PkiQbftAcceptanceTestParameterization input) {
super(testName, input);
}
@Test
public void shouldMineOnSingleNode() throws Exception {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void shouldMineOnSingleNode(
final String testName, final PkiQbftAcceptanceTestParameterization nodeFactory)
throws Exception {
final BesuNode minerNode = nodeFactory.createNode(besu, "miner1");
cluster.start(minerNode);
@ -49,8 +48,11 @@ public class PkiQbftAcceptanceTest extends ParameterizedPkiQbftTestBase {
cluster.verify(receiver.balanceEquals(3));
}
@Test
public void shouldMineOnMultipleNodes() throws Exception {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void shouldMineOnMultipleNodes(
final String testName, final PkiQbftAcceptanceTestParameterization nodeFactory)
throws Exception {
final BesuNode minerNode1 = nodeFactory.createNode(besu, "miner1");
final BesuNode minerNode2 = nodeFactory.createNode(besu, "miner2");
final BesuNode minerNode3 = nodeFactory.createNode(besu, "miner3");
@ -75,8 +77,11 @@ public class PkiQbftAcceptanceTest extends ParameterizedPkiQbftTestBase {
cluster.verify(receiver.balanceEquals(6));
}
@Test
public void shouldMineWithIgnoringANodeInCRL() throws Exception {
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void shouldMineWithIgnoringANodeInCRL(
final String testName, final PkiQbftAcceptanceTestParameterization nodeFactory)
throws Exception {
final BesuNode minerNode1 = nodeFactory.createNode(besu, "miner1");
final BesuNode minerNode2 = nodeFactory.createNode(besu, "miner2");
final BesuNode minerNode3 = nodeFactory.createNode(besu, "miner3");

@ -22,73 +22,70 @@ import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.BesuNodeFact
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Stream;
import org.junit.jupiter.params.provider.Arguments;
public class PkiQbftAcceptanceTestParameterization {
public static List<Object[]> getFactories() {
final List<Object[]> ret = new ArrayList<>();
public static Stream<Arguments> getFactories() {
List<Arguments> args = new ArrayList<>();
/*
BLOCK CREATION
*/
ret.add(
new Object[] {
"qbft-pki-jks",
new PkiQbftAcceptanceTestParameterization(
BesuNodeFactory::createPkiQbftJKSNode,
BesuNodeFactory::createPkiQbftJKSNodeWithValidators)
});
ret.add(
new Object[] {
"qbft-pki-pkcs12",
new PkiQbftAcceptanceTestParameterization(
BesuNodeFactory::createPkiQbftPKCS12Node,
BesuNodeFactory::createPkiQbftPKCS12NodeWithValidators)
});
args.add(
Arguments.of(
"qbft-pki-jks",
new PkiQbftAcceptanceTestParameterization(
BesuNodeFactory::createPkiQbftJKSNode,
BesuNodeFactory::createPkiQbftJKSNodeWithValidators)));
if (Boolean.getBoolean("acctests.runBesuAsProcess")) {
ret.add(
new Object[] {
"qbft-pki-pkcs11",
args.add(
Arguments.of(
"qbft-pki-pkcs12",
new PkiQbftAcceptanceTestParameterization(
BesuNodeFactory::createPkiQbftPKCS11Node,
BesuNodeFactory::createPkiQbftPKCS11NodeWithValidators)
});
BesuNodeFactory::createPkiQbftPKCS12Node,
BesuNodeFactory::createPkiQbftPKCS12NodeWithValidators)));
if (Boolean.getBoolean("acctests.runBesuAsProcess")) {
args.add(
Arguments.of(
"qbft-pki-pkcs11",
new PkiQbftAcceptanceTestParameterization(
BesuNodeFactory::createPkiQbftPKCS11Node,
BesuNodeFactory::createPkiQbftPKCS11NodeWithValidators)));
}
/*
TLS
*/
ret.add(
new Object[] {
"qbft-tls-jks",
new PkiQbftAcceptanceTestParameterization(
BesuNodeFactory::createQbftNodeWithTLSJKS,
BesuNodeFactory::createQbftTLSJKSNodeWithValidators)
});
ret.add(
new Object[] {
"qbft-tls-pkcs12",
new PkiQbftAcceptanceTestParameterization(
BesuNodeFactory::createQbftNodeWithTLSPKCS12,
BesuNodeFactory::createQbftTLSPKCS12NodeWithValidators)
});
args.add(
Arguments.of(
"qbft-tls-jks",
new PkiQbftAcceptanceTestParameterization(
BesuNodeFactory::createQbftNodeWithTLSJKS,
BesuNodeFactory::createQbftTLSJKSNodeWithValidators)));
if (Boolean.getBoolean("acctests.runBesuAsProcess")) {
ret.add(
new Object[] {
"qbft-tls-pkcs11",
args.add(
Arguments.of(
"qbft-tls-pkcs12",
new PkiQbftAcceptanceTestParameterization(
BesuNodeFactory::createQbftNodeWithTLSPKCS11,
BesuNodeFactory::createQbftTLSPKCS11NodeWithValidators)
});
BesuNodeFactory::createQbftNodeWithTLSPKCS12,
BesuNodeFactory::createQbftTLSPKCS12NodeWithValidators)));
if (Boolean.getBoolean("acctests.runBesuAsProcess")) {
args.add(
Arguments.of(
"qbft-tls-pkcs11",
new PkiQbftAcceptanceTestParameterization(
BesuNodeFactory::createQbftNodeWithTLSPKCS11,
BesuNodeFactory::createQbftTLSPKCS11NodeWithValidators)));
}
return ret;
return args.stream();
}
@FunctionalInterface

@ -14,13 +14,13 @@
*/
package org.hyperledger.besu.tests.acceptance.bft.qbft;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBase;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5;
import org.hyperledger.besu.tests.acceptance.dsl.account.Account;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import org.junit.Test;
import org.junit.jupiter.api.Test;
public class QbftContractAcceptanceTest extends AcceptanceTestBase {
public class QbftContractAcceptanceTest extends AcceptanceTestBaseJunit5 {
@Test
public void shouldMineOnMultipleNodesEvenWhenClusterContainsNonValidator() throws Exception {

@ -14,14 +14,14 @@
*/
package org.hyperledger.besu.tests.acceptance.clique;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBase;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import java.io.IOException;
import org.junit.Test;
import org.junit.jupiter.api.Test;
public class CliqueDiscardRpcAcceptanceTest extends AcceptanceTestBase {
public class CliqueDiscardRpcAcceptanceTest extends AcceptanceTestBaseJunit5 {
@Test
public void shouldDiscardVotes() throws IOException {

@ -16,19 +16,19 @@ package org.hyperledger.besu.tests.acceptance.clique;
import static org.hyperledger.besu.tests.acceptance.dsl.transaction.clique.CliqueTransactions.LATEST;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBase;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
@Ignore
public class CliqueGetSignersRpcTest extends AcceptanceTestBase {
@Disabled("flaky test due to hardcoded block numbers")
public class CliqueGetSignersRpcAcceptanceTest extends AcceptanceTestBaseJunit5 {
private BesuNode minerNode1;
private BesuNode minerNode2;
@Before
@BeforeEach
public void setUp() throws Exception {
final String[] validators = {"miner1"};
minerNode1 = besu.createCliqueNodeWithValidators("miner1", validators);

@ -14,16 +14,16 @@
*/
package org.hyperledger.besu.tests.acceptance.clique;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBase;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5;
import org.hyperledger.besu.tests.acceptance.dsl.account.Account;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.genesis.GenesisConfigurationFactory.CliqueOptions;
import java.io.IOException;
import org.junit.Test;
import org.junit.jupiter.api.Test;
public class CliqueMiningAcceptanceTest extends AcceptanceTestBase {
public class CliqueMiningAcceptanceTest extends AcceptanceTestBaseJunit5 {
@Test
public void shouldMineTransactionsOnSingleNode() throws IOException {

@ -14,14 +14,14 @@
*/
package org.hyperledger.besu.tests.acceptance.clique;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBase;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import java.io.IOException;
import org.junit.Test;
import org.junit.jupiter.api.Test;
public class CliqueProposalRpcAcceptanceTest extends AcceptanceTestBase {
public class CliqueProposalRpcAcceptanceTest extends AcceptanceTestBaseJunit5 {
@Test
public void shouldReturnProposals() throws IOException {

@ -14,16 +14,16 @@
*/
package org.hyperledger.besu.tests.acceptance.clique;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBase;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5;
import org.hyperledger.besu.tests.acceptance.dsl.condition.Condition;
import org.hyperledger.besu.tests.acceptance.dsl.condition.clique.ExpectNonceVote.CLIQUE_NONCE_VOTE;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import java.io.IOException;
import org.junit.Test;
import org.junit.jupiter.api.Test;
public class CliqueProposeRpcAcceptanceTest extends AcceptanceTestBase {
public class CliqueProposeRpcAcceptanceTest extends AcceptanceTestBaseJunit5 {
@Test
public void shouldAddValidators() throws IOException {

@ -14,14 +14,14 @@
*/
package org.hyperledger.besu.tests.acceptance.clique;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBase;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import java.io.IOException;
import org.junit.Test;
import org.junit.jupiter.api.Test;
public class CliqueZeroValidators extends AcceptanceTestBase {
public class CliqueZeroValidatorsAcceptanceTest extends AcceptanceTestBaseJunit5 {
@Test
public void zeroValidatorsFormValidCluster() throws IOException {

@ -17,7 +17,7 @@ package org.hyperledger.besu.tests.acceptance.plugins;
import static org.assertj.core.api.Assertions.assertThat;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBase;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import java.io.File;
@ -33,7 +33,7 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
public class BadCLIOptionsPluginTest extends AcceptanceTestBase {
public class BadCLIOptionsPluginTest extends AcceptanceTestBaseJunit5 {
private BesuNode node;
@BeforeEach

@ -14,7 +14,7 @@
*/
package org.hyperledger.besu.tests.acceptance.plugins;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBase;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import java.io.File;
@ -28,7 +28,7 @@ import org.awaitility.Awaitility;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class BesuEventsPluginTest extends AcceptanceTestBase {
public class BesuEventsPluginTest extends AcceptanceTestBaseJunit5 {
private BesuNode pluginNode;
private BesuNode minerNode;

@ -16,7 +16,7 @@
package org.hyperledger.besu.tests.acceptance.plugins;
import org.hyperledger.besu.datatypes.Hash;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBase;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5;
import org.hyperledger.besu.tests.acceptance.dsl.account.Account;
import org.hyperledger.besu.tests.acceptance.dsl.blockchain.Amount;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
@ -28,7 +28,7 @@ import java.util.List;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class PermissioningPluginTest extends AcceptanceTestBase {
public class PermissioningPluginTest extends AcceptanceTestBaseJunit5 {
private BesuNode minerNode;
private BesuNode aliceNode;

@ -608,6 +608,8 @@ public class RunnerBuilder {
bootstrap = ethNetworkConfig.getBootNodes();
}
discoveryConfiguration.setBootnodes(bootstrap);
LOG.info("Resolved {} bootnodes.", bootstrap.size());
LOG.debug("Bootnodes = {}", bootstrap);
discoveryConfiguration.setDnsDiscoveryURL(ethNetworkConfig.getDnsDiscoveryUrl());
discoveryConfiguration.setDiscoveryV5Enabled(
networkingConfiguration.getDiscovery().isDiscoveryV5Enabled());

@ -520,11 +520,11 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
private SyncMode syncMode = null;
@Option(
names = {"--fast-sync-min-peers"},
names = {"--sync-min-peers", "--fast-sync-min-peers"},
paramLabel = MANDATORY_INTEGER_FORMAT_HELP,
description =
"Minimum number of peers required before starting fast sync. Has only effect on PoW networks. (default: ${DEFAULT-VALUE})")
private final Integer fastSyncMinPeerCount = FAST_SYNC_MIN_PEER_COUNT;
"Minimum number of peers required before starting sync. Has effect only on non-PoS networks. (default: ${DEFAULT-VALUE})")
private final Integer syncMinPeerCount = SYNC_MIN_PEER_COUNT;
@Option(
names = {"--network"},
@ -1437,7 +1437,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
if (network != null && network.isDeprecated()) {
logger.warn(NetworkDeprecationMessage.generate(network));
}
try {
configureLogging(true);
@ -1795,7 +1794,7 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
if (kzgTrustedSetupFile != null) {
KZGPointEvalPrecompiledContract.init(kzgTrustedSetupFile);
} else {
KZGPointEvalPrecompiledContract.init(network.name());
KZGPointEvalPrecompiledContract.init();
}
} else if (kzgTrustedSetupFile != null) {
throw new ParameterException(
@ -2867,7 +2866,7 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
return unstableSynchronizerOptions
.toDomainObject()
.syncMode(syncMode)
.fastSyncMinimumPeerCount(fastSyncMinPeerCount)
.fastSyncMinimumPeerCount(syncMinPeerCount)
.build();
}
@ -3120,14 +3119,9 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
if (listBootNodes != null) {
if (!p2PDiscoveryOptionGroup.peerDiscoveryEnabled) {
logger.warn("Discovery disabled: bootnodes will be ignored.");
} else {
logger.info("Configured {} bootnodes.", listBootNodes.size());
logger.debug("Bootnodes = {}", listBootNodes);
}
DiscoveryConfiguration.assertValidBootnodes(listBootNodes);
builder.setBootNodes(listBootNodes);
} else {
logger.info("0 Bootnodes configured");
}
return builder.build();
}

@ -66,8 +66,8 @@ public interface DefaultCommandValues {
NatMethod DEFAULT_NAT_METHOD = NatMethod.AUTO;
/** The constant DEFAULT_JWT_ALGORITHM. */
JwtAlgorithm DEFAULT_JWT_ALGORITHM = JwtAlgorithm.RS256;
/** The constant FAST_SYNC_MIN_PEER_COUNT. */
int FAST_SYNC_MIN_PEER_COUNT = 5;
/** The constant SYNC_MIN_PEER_COUNT. */
int SYNC_MIN_PEER_COUNT = 5;
/** The constant DEFAULT_MAX_PEERS. */
int DEFAULT_MAX_PEERS = 25;
/** The constant DEFAULT_P2P_PEER_LOWER_BOUND. */

@ -61,7 +61,7 @@ import picocli.CommandLine.ParentCommand;
@Command(
name = "generate-blockchain-config",
description = "Generates node keypairs and genesis file with RLP encoded extra data.",
description = "Generate node keypairs and genesis file with RLP encoded extra data.",
mixinStandardHelpOptions = true,
versionProvider = VersionProvider.class)
class GenerateBlockchainConfig implements Runnable {

@ -34,17 +34,17 @@ public class RocksDbUsageHelper {
final RocksDB rocksdb, final ColumnFamilyHandle cfHandle, final PrintWriter out)
throws RocksDBException, NumberFormatException {
final String size = rocksdb.getProperty(cfHandle, "rocksdb.estimate-live-data-size");
final String numberOfKeys = rocksdb.getProperty(cfHandle, "rocksdb.estimate-num-keys");
boolean emptyColumnFamily = false;
if (!size.isEmpty() && !size.isBlank()) {
if (!size.isBlank() && !numberOfKeys.isBlank()) {
try {
final long sizeLong = Long.parseLong(size);
final long numberOfKeysLong = Long.parseLong(numberOfKeys);
final String totalSstFilesSize =
rocksdb.getProperty(cfHandle, "rocksdb.total-sst-files-size");
final long totalSstFilesSizeLong =
!totalSstFilesSize.isEmpty() && !totalSstFilesSize.isBlank()
? Long.parseLong(totalSstFilesSize)
: 0;
if (sizeLong == 0) {
!totalSstFilesSize.isBlank() ? Long.parseLong(totalSstFilesSize) : 0;
if (sizeLong == 0 && numberOfKeysLong == 0) {
emptyColumnFamily = true;
}

@ -45,7 +45,11 @@ import picocli.CommandLine.Spec;
description = "This command provides storage related actions.",
mixinStandardHelpOptions = true,
versionProvider = VersionProvider.class,
subcommands = {StorageSubCommand.RevertVariablesStorage.class, RocksDbSubCommand.class})
subcommands = {
StorageSubCommand.RevertVariablesStorage.class,
RocksDbSubCommand.class,
TrieLogSubCommand.class
})
public class StorageSubCommand implements Runnable {
/** The constant COMMAND_NAME. */

@ -0,0 +1,361 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.subcommands.storage;
import static com.google.common.base.Preconditions.checkArgument;
import static org.hyperledger.besu.controller.BesuController.DATABASE_PATH;
import org.hyperledger.besu.datatypes.Hash;
import org.hyperledger.besu.ethereum.chain.Blockchain;
import org.hyperledger.besu.ethereum.chain.MutableBlockchain;
import org.hyperledger.besu.ethereum.core.BlockHeader;
import org.hyperledger.besu.ethereum.trie.bonsai.storage.BonsaiWorldStateKeyValueStorage;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.PrintWriter;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.tuweni.bytes.Bytes32;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Helper class for counting and pruning trie logs */
public class TrieLogHelper {
private static final String TRIE_LOG_FILE = "trieLogsToRetain";
private static final long BATCH_SIZE = 20_000;
private static final int ROCKSDB_MAX_INSERTS_PER_TRANSACTION = 1000;
private static final Logger LOG = LoggerFactory.getLogger(TrieLogHelper.class);
static void prune(
final DataStorageConfiguration config,
final BonsaiWorldStateKeyValueStorage rootWorldStateStorage,
final MutableBlockchain blockchain,
final Path dataDirectoryPath) {
final String batchFileNameBase =
dataDirectoryPath.resolve(DATABASE_PATH).resolve(TRIE_LOG_FILE).toString();
validatePruneConfiguration(config);
final long layersToRetain = config.getUnstable().getBonsaiTrieLogRetentionThreshold();
final long chainHeight = blockchain.getChainHeadBlockNumber();
final long lastBlockNumberToRetainTrieLogsFor = chainHeight - layersToRetain + 1;
if (!validPruneRequirements(blockchain, chainHeight, lastBlockNumberToRetainTrieLogsFor)) {
return;
}
final long numberOfBatches = calculateNumberofBatches(layersToRetain);
processTrieLogBatches(
rootWorldStateStorage,
blockchain,
chainHeight,
lastBlockNumberToRetainTrieLogsFor,
numberOfBatches,
batchFileNameBase);
if (rootWorldStateStorage.streamTrieLogKeys(layersToRetain).count() == layersToRetain) {
deleteFiles(batchFileNameBase, numberOfBatches);
LOG.info("Prune ran successfully. Enjoy some disk space back! \uD83D\uDE80");
} else {
LOG.error("Prune failed. Re-run the subcommand to load the trie logs from file.");
}
}
private static void processTrieLogBatches(
final BonsaiWorldStateKeyValueStorage rootWorldStateStorage,
final MutableBlockchain blockchain,
final long chainHeight,
final long lastBlockNumberToRetainTrieLogsFor,
final long numberOfBatches,
final String batchFileNameBase) {
for (long batchNumber = 1; batchNumber <= numberOfBatches; batchNumber++) {
final long firstBlockOfBatch = chainHeight - ((batchNumber - 1) * BATCH_SIZE);
final long lastBlockOfBatch =
Math.max(chainHeight - (batchNumber * BATCH_SIZE), lastBlockNumberToRetainTrieLogsFor);
final List<Hash> trieLogKeys =
getTrieLogKeysForBlocks(blockchain, firstBlockOfBatch, lastBlockOfBatch);
saveTrieLogBatches(batchFileNameBase, rootWorldStateStorage, batchNumber, trieLogKeys);
}
LOG.info("Clear trie logs...");
rootWorldStateStorage.clearTrieLog();
for (long batchNumber = 1; batchNumber <= numberOfBatches; batchNumber++) {
restoreTrieLogBatches(rootWorldStateStorage, batchNumber, batchFileNameBase);
}
}
private static void saveTrieLogBatches(
final String batchFileNameBase,
final BonsaiWorldStateKeyValueStorage rootWorldStateStorage,
final long batchNumber,
final List<Hash> trieLogKeys) {
LOG.info("Saving trie logs to retain in file (batch {})...", batchNumber);
try {
saveTrieLogsInFile(trieLogKeys, rootWorldStateStorage, batchNumber, batchFileNameBase);
} catch (IOException e) {
LOG.error("Error saving trie logs to file: {}", e.getMessage());
throw new RuntimeException(e);
}
}
private static void restoreTrieLogBatches(
final BonsaiWorldStateKeyValueStorage rootWorldStateStorage,
final long batchNumber,
final String batchFileNameBase) {
try {
LOG.info("Restoring trie logs retained from batch {}...", batchNumber);
recreateTrieLogs(rootWorldStateStorage, batchNumber, batchFileNameBase);
} catch (IOException e) {
LOG.error("Error recreating trie logs from batch {}: {}", batchNumber, e.getMessage());
throw new RuntimeException(e);
}
}
private static void deleteFiles(final String batchFileNameBase, final long numberOfBatches) {
LOG.info("Deleting files...");
for (long batchNumber = 1; batchNumber <= numberOfBatches; batchNumber++) {
File file = new File(batchFileNameBase + "-" + batchNumber);
if (file.exists()) {
file.delete();
}
}
}
private static List<Hash> getTrieLogKeysForBlocks(
final MutableBlockchain blockchain,
final long firstBlockOfBatch,
final long lastBlockOfBatch) {
final List<Hash> trieLogKeys = new ArrayList<>();
for (long i = firstBlockOfBatch; i >= lastBlockOfBatch; i--) {
final Optional<BlockHeader> header = blockchain.getBlockHeader(i);
header.ifPresentOrElse(
blockHeader -> trieLogKeys.add(blockHeader.getHash()),
() -> LOG.error("Error retrieving block"));
}
return trieLogKeys;
}
private static long calculateNumberofBatches(final long layersToRetain) {
return layersToRetain / BATCH_SIZE + ((layersToRetain % BATCH_SIZE == 0) ? 0 : 1);
}
private static boolean validPruneRequirements(
final MutableBlockchain blockchain,
final long chainHeight,
final long lastBlockNumberToRetainTrieLogsFor) {
if (lastBlockNumberToRetainTrieLogsFor < 0) {
throw new IllegalArgumentException(
"Trying to retain more trie logs than chain length ("
+ chainHeight
+ "), skipping pruning");
}
final Optional<Hash> finalizedBlockHash = blockchain.getFinalized();
if (finalizedBlockHash.isEmpty()) {
throw new RuntimeException("No finalized block present, can't safely run trie log prune");
} else {
final Hash finalizedHash = finalizedBlockHash.get();
final Optional<BlockHeader> finalizedBlockHeader = blockchain.getBlockHeader(finalizedHash);
if (finalizedBlockHeader.isPresent()
&& finalizedBlockHeader.get().getNumber() < lastBlockNumberToRetainTrieLogsFor) {
throw new IllegalArgumentException(
"Trying to prune more layers than the finalized block height, skipping pruning");
}
}
return true;
}
private static void recreateTrieLogs(
final BonsaiWorldStateKeyValueStorage rootWorldStateStorage,
final long batchNumber,
final String batchFileNameBase)
throws IOException {
// process in chunk to avoid OOM
IdentityHashMap<byte[], byte[]> trieLogsToRetain =
readTrieLogsFromFile(batchFileNameBase, batchNumber);
final int chunkSize = ROCKSDB_MAX_INSERTS_PER_TRANSACTION;
List<byte[]> keys = new ArrayList<>(trieLogsToRetain.keySet());
for (int startIndex = 0; startIndex < keys.size(); startIndex += chunkSize) {
processTransactionChunk(startIndex, chunkSize, keys, trieLogsToRetain, rootWorldStateStorage);
}
}
private static void processTransactionChunk(
final int startIndex,
final int chunkSize,
final List<byte[]> keys,
final IdentityHashMap<byte[], byte[]> trieLogsToRetain,
final BonsaiWorldStateKeyValueStorage rootWorldStateStorage) {
var updater = rootWorldStateStorage.updater();
int endIndex = Math.min(startIndex + chunkSize, keys.size());
for (int i = startIndex; i < endIndex; i++) {
byte[] key = keys.get(i);
byte[] value = trieLogsToRetain.get(key);
updater.getTrieLogStorageTransaction().put(key, value);
LOG.info("Key({}): {}", i, Bytes32.wrap(key).toShortHexString());
}
updater.getTrieLogStorageTransaction().commit();
}
private static void validatePruneConfiguration(final DataStorageConfiguration config) {
checkArgument(
config.getUnstable().getBonsaiTrieLogRetentionThreshold()
>= config.getBonsaiMaxLayersToLoad(),
String.format(
"--Xbonsai-trie-log-retention-threshold minimum value is %d",
config.getBonsaiMaxLayersToLoad()));
checkArgument(
config.getUnstable().getBonsaiTrieLogPruningLimit() > 0,
String.format(
"--Xbonsai-trie-log-pruning-limit=%d must be greater than 0",
config.getUnstable().getBonsaiTrieLogPruningLimit()));
checkArgument(
config.getUnstable().getBonsaiTrieLogPruningLimit()
> config.getUnstable().getBonsaiTrieLogRetentionThreshold(),
String.format(
"--Xbonsai-trie-log-pruning-limit=%d must greater than --Xbonsai-trie-log-retention-threshold=%d",
config.getUnstable().getBonsaiTrieLogPruningLimit(),
config.getUnstable().getBonsaiTrieLogRetentionThreshold()));
}
private static void saveTrieLogsInFile(
final List<Hash> trieLogsKeys,
final BonsaiWorldStateKeyValueStorage rootWorldStateStorage,
final long batchNumber,
final String batchFileNameBase)
throws IOException {
File file = new File(batchFileNameBase + "-" + batchNumber);
if (file.exists()) {
LOG.error("File already exists, skipping file creation");
return;
}
try (FileOutputStream fos = new FileOutputStream(file)) {
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(getTrieLogs(trieLogsKeys, rootWorldStateStorage));
} catch (IOException e) {
LOG.error(e.getMessage());
throw new RuntimeException(e);
}
}
@SuppressWarnings("unchecked")
private static IdentityHashMap<byte[], byte[]> readTrieLogsFromFile(
final String batchFileNameBase, final long batchNumber) {
IdentityHashMap<byte[], byte[]> trieLogs;
try (FileInputStream fis = new FileInputStream(batchFileNameBase + "-" + batchNumber);
ObjectInputStream ois = new ObjectInputStream(fis)) {
trieLogs = (IdentityHashMap<byte[], byte[]>) ois.readObject();
} catch (IOException | ClassNotFoundException e) {
LOG.error(e.getMessage());
throw new RuntimeException(e);
}
return trieLogs;
}
private static IdentityHashMap<byte[], byte[]> getTrieLogs(
final List<Hash> trieLogKeys, final BonsaiWorldStateKeyValueStorage rootWorldStateStorage) {
IdentityHashMap<byte[], byte[]> trieLogsToRetain = new IdentityHashMap<>();
LOG.info("Obtaining trielogs from db, this may take a few minutes...");
trieLogKeys.forEach(
hash ->
rootWorldStateStorage
.getTrieLog(hash)
.ifPresent(trieLog -> trieLogsToRetain.put(hash.toArrayUnsafe(), trieLog)));
return trieLogsToRetain;
}
static TrieLogCount getCount(
final BonsaiWorldStateKeyValueStorage rootWorldStateStorage,
final int limit,
final Blockchain blockchain) {
final AtomicInteger total = new AtomicInteger();
final AtomicInteger canonicalCount = new AtomicInteger();
final AtomicInteger forkCount = new AtomicInteger();
final AtomicInteger orphanCount = new AtomicInteger();
rootWorldStateStorage
.streamTrieLogKeys(limit)
.map(Bytes32::wrap)
.map(Hash::wrap)
.forEach(
hash -> {
total.getAndIncrement();
blockchain
.getBlockHeader(hash)
.ifPresentOrElse(
(header) -> {
long number = header.getNumber();
final Optional<BlockHeader> headerByNumber =
blockchain.getBlockHeader(number);
if (headerByNumber.isPresent()
&& headerByNumber.get().getHash().equals(hash)) {
canonicalCount.getAndIncrement();
} else {
forkCount.getAndIncrement();
}
},
orphanCount::getAndIncrement);
});
return new TrieLogCount(total.get(), canonicalCount.get(), forkCount.get(), orphanCount.get());
}
static void printCount(final PrintWriter out, final TrieLogCount count) {
out.printf(
"trieLog count: %s\n - canonical count: %s\n - fork count: %s\n - orphaned count: %s\n",
count.total, count.canonicalCount, count.forkCount, count.orphanCount);
}
record TrieLogCount(int total, int canonicalCount, int forkCount, int orphanCount) {}
}

@ -0,0 +1,147 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.subcommands.storage;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import org.hyperledger.besu.cli.util.VersionProvider;
import org.hyperledger.besu.controller.BesuController;
import org.hyperledger.besu.ethereum.chain.MutableBlockchain;
import org.hyperledger.besu.ethereum.storage.StorageProvider;
import org.hyperledger.besu.ethereum.trie.bonsai.storage.BonsaiWorldStateKeyValueStorage;
import org.hyperledger.besu.ethereum.trie.bonsai.trielog.TrieLogPruner;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageFormat;
import java.io.PrintWriter;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.core.config.Configurator;
import org.slf4j.LoggerFactory;
import picocli.CommandLine;
import picocli.CommandLine.Command;
import picocli.CommandLine.ParentCommand;
/** The Trie Log subcommand. */
@Command(
name = "x-trie-log",
description = "Manipulate trie logs",
mixinStandardHelpOptions = true,
versionProvider = VersionProvider.class,
subcommands = {TrieLogSubCommand.CountTrieLog.class, TrieLogSubCommand.PruneTrieLog.class})
public class TrieLogSubCommand implements Runnable {
@SuppressWarnings("UnusedVariable")
@ParentCommand
private static StorageSubCommand parentCommand;
@SuppressWarnings("unused")
@CommandLine.Spec
private CommandLine.Model.CommandSpec spec; // Picocli injects reference to command spec
@Override
public void run() {
final PrintWriter out = spec.commandLine().getOut();
spec.commandLine().usage(out);
}
private static BesuController createBesuController() {
return parentCommand.parentCommand.buildController();
}
@Command(
name = "count",
description = "This command counts all the trie logs",
mixinStandardHelpOptions = true,
versionProvider = VersionProvider.class)
static class CountTrieLog implements Runnable {
@SuppressWarnings("unused")
@ParentCommand
private TrieLogSubCommand parentCommand;
@SuppressWarnings("unused")
@CommandLine.Spec
private CommandLine.Model.CommandSpec spec; // Picocli injects reference to command spec
@Override
public void run() {
TrieLogContext context = getTrieLogContext();
final PrintWriter out = spec.commandLine().getOut();
out.println("Counting trie logs...");
TrieLogHelper.printCount(
out,
TrieLogHelper.getCount(
context.rootWorldStateStorage, Integer.MAX_VALUE, context.blockchain));
}
}
@Command(
name = "prune",
description =
"This command prunes all trie log layers below the retention threshold, including orphaned trie logs.",
mixinStandardHelpOptions = true,
versionProvider = VersionProvider.class)
static class PruneTrieLog implements Runnable {
@SuppressWarnings("unused")
@ParentCommand
private TrieLogSubCommand parentCommand;
@SuppressWarnings("unused")
@CommandLine.Spec
private CommandLine.Model.CommandSpec spec; // Picocli injects reference to command spec
@Override
public void run() {
TrieLogContext context = getTrieLogContext();
final Path dataDirectoryPath =
Paths.get(
TrieLogSubCommand.parentCommand.parentCommand.dataDir().toAbsolutePath().toString());
TrieLogHelper.prune(
context.config(),
context.rootWorldStateStorage(),
context.blockchain(),
dataDirectoryPath);
}
}
record TrieLogContext(
DataStorageConfiguration config,
BonsaiWorldStateKeyValueStorage rootWorldStateStorage,
MutableBlockchain blockchain) {}
private static TrieLogContext getTrieLogContext() {
Configurator.setLevel(LoggerFactory.getLogger(TrieLogPruner.class).getName(), Level.DEBUG);
checkNotNull(parentCommand);
BesuController besuController = createBesuController();
final DataStorageConfiguration config = besuController.getDataStorageConfiguration();
checkArgument(
DataStorageFormat.BONSAI.equals(config.getDataStorageFormat()),
"Subcommand only works with data-storage-format=BONSAI");
final StorageProvider storageProvider = besuController.getStorageProvider();
final BonsaiWorldStateKeyValueStorage rootWorldStateStorage =
(BonsaiWorldStateKeyValueStorage)
storageProvider.createWorldStateStorage(DataStorageFormat.BONSAI);
final MutableBlockchain blockchain = besuController.getProtocolContext().getBlockchain();
return new TrieLogContext(config, rootWorldStateStorage, blockchain);
}
}

@ -37,6 +37,7 @@ import org.hyperledger.besu.ethereum.eth.transactions.TransactionPool;
import org.hyperledger.besu.ethereum.mainnet.ProtocolSchedule;
import org.hyperledger.besu.ethereum.p2p.config.SubProtocolConfiguration;
import org.hyperledger.besu.ethereum.storage.StorageProvider;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import java.io.Closeable;
import java.io.IOException;
@ -77,6 +78,7 @@ public class BesuController implements java.io.Closeable {
private final SyncState syncState;
private final EthPeers ethPeers;
private final StorageProvider storageProvider;
private final DataStorageConfiguration dataStorageConfiguration;
/**
* Instantiates a new Besu controller.
@ -96,6 +98,9 @@ public class BesuController implements java.io.Closeable {
* @param nodeKey the node key
* @param closeables the closeables
* @param additionalPluginServices the additional plugin services
* @param ethPeers the eth peers
* @param storageProvider the storage provider
* @param dataStorageConfiguration the data storage configuration
*/
BesuController(
final ProtocolSchedule protocolSchedule,
@ -114,7 +119,8 @@ public class BesuController implements java.io.Closeable {
final List<Closeable> closeables,
final PluginServiceFactory additionalPluginServices,
final EthPeers ethPeers,
final StorageProvider storageProvider) {
final StorageProvider storageProvider,
final DataStorageConfiguration dataStorageConfiguration) {
this.protocolSchedule = protocolSchedule;
this.protocolContext = protocolContext;
this.ethProtocolManager = ethProtocolManager;
@ -132,6 +138,7 @@ public class BesuController implements java.io.Closeable {
this.additionalPluginServices = additionalPluginServices;
this.ethPeers = ethPeers;
this.storageProvider = storageProvider;
this.dataStorageConfiguration = dataStorageConfiguration;
}
/**
@ -293,6 +300,15 @@ public class BesuController implements java.io.Closeable {
return additionalPluginServices;
}
/**
* Gets data storage configuration.
*
* @return the data storage configuration
*/
public DataStorageConfiguration getDataStorageConfiguration() {
return dataStorageConfiguration;
}
/** The type Builder. */
public static class Builder {

@ -803,7 +803,8 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
closeables,
additionalPluginServices,
ethPeers,
storageProvider);
storageProvider,
dataStorageConfiguration);
}
/**

@ -45,183 +45,173 @@ import java.util.stream.Stream;
import com.google.common.collect.Streams;
import org.apache.tuweni.bytes.Bytes;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Enclosed.class)
@RunWith(Parameterized.class)
public class ForkIdsNetworkConfigTest {
public static class NotParameterized {
@Test
public void testFromRaw() {
final ForkId forkId = new ForkId(Bytes.ofUnsignedInt(0xfe3366e7L), 1735371L);
final List<List<Bytes>> forkIdAsBytesList = List.of(forkId.getForkIdAsBytesList());
assertThat(ForkId.fromRawForkId(forkIdAsBytesList).get()).isEqualTo(forkId);
}
@Parameterized.Parameter public NetworkName chainName;
@Parameterized.Parameter(1)
public List<ForkId> expectedForkIds;
@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> parameters() {
return List.of(
new Object[] {
NetworkName.SEPOLIA,
List.of(
new ForkId(Bytes.ofUnsignedInt(0xfe3366e7L), 1735371L),
new ForkId(Bytes.ofUnsignedInt(0xb96cbd13L), 1677557088L),
new ForkId(Bytes.ofUnsignedInt(0xf7f9bc08L), 1706655072L),
new ForkId(Bytes.ofUnsignedInt(0x88cf81d9L), 0L),
new ForkId(Bytes.ofUnsignedInt(0x88cf81d9L), 0L))
},
new Object[] {
NetworkName.HOLESKY,
List.of(
new ForkId(Bytes.ofUnsignedInt(0xc61a6098L), 1696000704L),
new ForkId(Bytes.ofUnsignedInt(0xfd4f016bL), 1707305664L),
new ForkId(Bytes.ofUnsignedInt(0x9b192ad0L), 0L),
new ForkId(Bytes.ofUnsignedInt(0x9b192ad0L), 0L))
},
new Object[] {
NetworkName.GOERLI,
List.of(
new ForkId(Bytes.ofUnsignedInt(0xa3f5ab08L), 1561651L),
new ForkId(Bytes.ofUnsignedInt(0xc25efa5cL), 4460644L),
new ForkId(Bytes.ofUnsignedInt(0x757a1c47L), 5062605L),
new ForkId(Bytes.ofUnsignedInt(0xb8c6299dL), 1678832736L),
new ForkId(Bytes.ofUnsignedInt(0xf9843abfL), 1705473120),
new ForkId(Bytes.ofUnsignedInt(0x70cc14e2L), 0L),
new ForkId(Bytes.ofUnsignedInt(0x70cc14e2L), 0L))
},
new Object[] {
NetworkName.MAINNET,
List.of(
new ForkId(Bytes.ofUnsignedInt(0xfc64ec04L), 1150000L),
new ForkId(Bytes.ofUnsignedInt(0x97c2c34cL), 1920000L),
new ForkId(Bytes.ofUnsignedInt(0x91d1f948L), 2463000L),
new ForkId(Bytes.ofUnsignedInt(0x91d1f948L), 2463000L),
new ForkId(Bytes.ofUnsignedInt(0x91d1f948L), 2463000L),
new ForkId(Bytes.ofUnsignedInt(0x7a64da13L), 2675000L),
new ForkId(Bytes.ofUnsignedInt(0x3edd5b10L), 4370000L),
new ForkId(Bytes.ofUnsignedInt(0xa00bc324L), 7280000L),
new ForkId(Bytes.ofUnsignedInt(0x668db0afL), 9069000L),
new ForkId(Bytes.ofUnsignedInt(0x879d6e30L), 9200000L),
new ForkId(Bytes.ofUnsignedInt(0xe029e991L), 12244000L),
new ForkId(Bytes.ofUnsignedInt(0xeb440f6L), 12965000L),
new ForkId(Bytes.ofUnsignedInt(0xb715077dL), 13773000L),
new ForkId(Bytes.ofUnsignedInt(0x20c327fcL), 15050000L),
new ForkId(Bytes.ofUnsignedInt(0xf0afd0e3L), 1681338455L),
new ForkId(Bytes.ofUnsignedInt(0xdce96c2dL), 0L),
new ForkId(Bytes.ofUnsignedInt(0xdce96c2dL), 0L))
},
new Object[] {
NetworkName.MORDOR,
List.of(
new ForkId(Bytes.ofUnsignedInt(0x175782aaL), 301243L),
new ForkId(Bytes.ofUnsignedInt(0x604f6ee1L), 999983L),
new ForkId(Bytes.ofUnsignedInt(0xf42f5539L), 2520000L),
new ForkId(Bytes.ofUnsignedInt(0x66b5c286L), 3985893),
new ForkId(Bytes.ofUnsignedInt(0x92b323e0L), 5520000L),
new ForkId(Bytes.ofUnsignedInt(0x8c9b1797L), 9957000L),
new ForkId(Bytes.ofUnsignedInt(0x3a6b00d7L), 0L),
new ForkId(Bytes.ofUnsignedInt(0x3a6b00d7L), 0L))
},
new Object[] {
NetworkName.CLASSIC,
List.of(
new ForkId(Bytes.ofUnsignedInt(0xfc64ec04L), 1150000L),
new ForkId(Bytes.ofUnsignedInt(0x97c2c34cL), 2500000L),
new ForkId(Bytes.ofUnsignedInt(0x97c2c34cL), 2500000L),
new ForkId(Bytes.ofUnsignedInt(0x97c2c34cL), 2500000L),
new ForkId(Bytes.ofUnsignedInt(0xdb06803fL), 3000000L),
new ForkId(Bytes.ofUnsignedInt(0xaff4bed4L), 5000000L),
new ForkId(Bytes.ofUnsignedInt(0xf79a63c0L), 5900000L),
new ForkId(Bytes.ofUnsignedInt(0x744899d6L), 8772000L),
new ForkId(Bytes.ofUnsignedInt(0x518b59c6L), 9573000L),
new ForkId(Bytes.ofUnsignedInt(0x7ba22882L), 10500839L),
new ForkId(Bytes.ofUnsignedInt(0x9007bfccL), 11700000L),
new ForkId(Bytes.ofUnsignedInt(0xdb63a1caL), 13189133),
new ForkId(Bytes.ofUnsignedInt(0x0f6bf187L), 14525000L),
new ForkId(Bytes.ofUnsignedInt(0x7fd1bb25L), 19250000L),
new ForkId(Bytes.ofUnsignedInt(0xbe46d57cL), 0L),
new ForkId(Bytes.ofUnsignedInt(0xbe46d57cL), 0L))
});
}
@RunWith(Parameterized.class)
public static class ParametrizedForkIdTest {
@Parameterized.Parameter public NetworkName chainName;
@Parameterized.Parameter(1)
public List<ForkId> expectedForkIds;
@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> parameters() {
return List.of(
new Object[] {
NetworkName.SEPOLIA,
List.of(
new ForkId(Bytes.ofUnsignedInt(0xfe3366e7L), 1735371L),
new ForkId(Bytes.ofUnsignedInt(0xb96cbd13L), 1677557088L),
new ForkId(Bytes.ofUnsignedInt(0xf7f9bc08L), 0L),
new ForkId(Bytes.ofUnsignedInt(0xf7f9bc08L), 0L))
},
new Object[] {
NetworkName.HOLESKY,
List.of(
new ForkId(Bytes.ofUnsignedInt(0xc61a6098L), 1696000704L),
new ForkId(Bytes.ofUnsignedInt(0xfd4f016bL), 0L),
new ForkId(Bytes.ofUnsignedInt(0xfd4f016bL), 0L))
},
new Object[] {
NetworkName.GOERLI,
List.of(
new ForkId(Bytes.ofUnsignedInt(0xa3f5ab08L), 1561651L),
new ForkId(Bytes.ofUnsignedInt(0xc25efa5cL), 4460644L),
new ForkId(Bytes.ofUnsignedInt(0x757a1c47L), 5062605L),
new ForkId(Bytes.ofUnsignedInt(0xb8c6299dL), 1678832736L),
new ForkId(Bytes.ofUnsignedInt(0xf9843abfL), 0L),
new ForkId(Bytes.ofUnsignedInt(0xf9843abfL), 0L))
},
new Object[] {
NetworkName.MAINNET,
List.of(
new ForkId(Bytes.ofUnsignedInt(0xfc64ec04L), 1150000L),
new ForkId(Bytes.ofUnsignedInt(0x97c2c34cL), 1920000L),
new ForkId(Bytes.ofUnsignedInt(0x91d1f948L), 2463000L),
new ForkId(Bytes.ofUnsignedInt(0x91d1f948L), 2463000L),
new ForkId(Bytes.ofUnsignedInt(0x91d1f948L), 2463000L),
new ForkId(Bytes.ofUnsignedInt(0x7a64da13L), 2675000L),
new ForkId(Bytes.ofUnsignedInt(0x3edd5b10L), 4370000L),
new ForkId(Bytes.ofUnsignedInt(0xa00bc324L), 7280000L),
new ForkId(Bytes.ofUnsignedInt(0x668db0afL), 9069000L),
new ForkId(Bytes.ofUnsignedInt(0x879d6e30L), 9200000L),
new ForkId(Bytes.ofUnsignedInt(0xe029e991L), 12244000L),
new ForkId(Bytes.ofUnsignedInt(0xeb440f6L), 12965000L),
new ForkId(Bytes.ofUnsignedInt(0xb715077dL), 13773000L),
new ForkId(Bytes.ofUnsignedInt(0x20c327fcL), 15050000L),
new ForkId(Bytes.ofUnsignedInt(0xf0afd0e3L), 1681338455L),
new ForkId(Bytes.ofUnsignedInt(0xdce96c2dL), 0L),
new ForkId(Bytes.ofUnsignedInt(0xdce96c2dL), 0L))
},
new Object[] {
NetworkName.MORDOR,
List.of(
new ForkId(Bytes.ofUnsignedInt(0x175782aaL), 301243L),
new ForkId(Bytes.ofUnsignedInt(0x604f6ee1L), 999983L),
new ForkId(Bytes.ofUnsignedInt(0xf42f5539L), 2520000L),
new ForkId(Bytes.ofUnsignedInt(0x66b5c286L), 3985893),
new ForkId(Bytes.ofUnsignedInt(0x92b323e0L), 5520000L),
new ForkId(Bytes.ofUnsignedInt(0x8c9b1797L), 9957000L),
new ForkId(Bytes.ofUnsignedInt(0x3a6b00d7L), 0L),
new ForkId(Bytes.ofUnsignedInt(0x3a6b00d7L), 0L))
},
new Object[] {
NetworkName.CLASSIC,
List.of(
new ForkId(Bytes.ofUnsignedInt(0xfc64ec04L), 1150000L),
new ForkId(Bytes.ofUnsignedInt(0x97c2c34cL), 2500000L),
new ForkId(Bytes.ofUnsignedInt(0x97c2c34cL), 2500000L),
new ForkId(Bytes.ofUnsignedInt(0x97c2c34cL), 2500000L),
new ForkId(Bytes.ofUnsignedInt(0xdb06803fL), 3000000L),
new ForkId(Bytes.ofUnsignedInt(0xaff4bed4L), 5000000L),
new ForkId(Bytes.ofUnsignedInt(0xf79a63c0L), 5900000L),
new ForkId(Bytes.ofUnsignedInt(0x744899d6L), 8772000L),
new ForkId(Bytes.ofUnsignedInt(0x518b59c6L), 9573000L),
new ForkId(Bytes.ofUnsignedInt(0x7ba22882L), 10500839L),
new ForkId(Bytes.ofUnsignedInt(0x9007bfccL), 11700000L),
new ForkId(Bytes.ofUnsignedInt(0xdb63a1caL), 13189133),
new ForkId(Bytes.ofUnsignedInt(0x0f6bf187L), 14525000L),
new ForkId(Bytes.ofUnsignedInt(0x7fd1bb25L), 19250000L),
new ForkId(Bytes.ofUnsignedInt(0xbe46d57cL), 0L),
new ForkId(Bytes.ofUnsignedInt(0xbe46d57cL), 0L))
});
}
@Test
public void testForkId() {
final GenesisConfigFile genesisConfigFile =
GenesisConfigFile.fromConfig(EthNetworkConfig.jsonConfig(chainName));
final MilestoneStreamingTransitionProtocolSchedule schedule =
createSchedule(genesisConfigFile);
final GenesisState genesisState = GenesisState.fromConfig(genesisConfigFile, schedule);
final Blockchain mockBlockchain = mock(Blockchain.class);
final BlockHeader mockBlockHeader = mock(BlockHeader.class);
when(mockBlockchain.getGenesisBlock()).thenReturn(genesisState.getBlock());
final AtomicLong blockNumber = new AtomicLong();
when(mockBlockchain.getChainHeadHeader()).thenReturn(mockBlockHeader);
when(mockBlockHeader.getNumber()).thenAnswer(o -> blockNumber.get());
when(mockBlockHeader.getTimestamp()).thenAnswer(o -> blockNumber.get());
final ForkIdManager forkIdManager =
new ForkIdManager(
mockBlockchain,
genesisConfigFile.getForkBlockNumbers(),
genesisConfigFile.getForkTimestamps(),
false);
final List<ForkId> actualForkIds =
Streams.concat(schedule.streamMilestoneBlocks(), Stream.of(Long.MAX_VALUE))
.map(
block -> {
blockNumber.set(block);
return forkIdManager.getForkIdForChainHead();
})
.collect(Collectors.toList());
assertThat(actualForkIds).containsExactlyElementsOf(expectedForkIds);
}
@ParameterizedTest
@MethodSource("parameters")
public void testForkId(final NetworkName chainName, final List<ForkId> expectedForkIds) {
final GenesisConfigFile genesisConfigFile =
GenesisConfigFile.fromConfig(EthNetworkConfig.jsonConfig(chainName));
final MilestoneStreamingTransitionProtocolSchedule schedule = createSchedule(genesisConfigFile);
final GenesisState genesisState = GenesisState.fromConfig(genesisConfigFile, schedule);
final Blockchain mockBlockchain = mock(Blockchain.class);
final BlockHeader mockBlockHeader = mock(BlockHeader.class);
when(mockBlockchain.getGenesisBlock()).thenReturn(genesisState.getBlock());
final AtomicLong blockNumber = new AtomicLong();
when(mockBlockchain.getChainHeadHeader()).thenReturn(mockBlockHeader);
when(mockBlockHeader.getNumber()).thenAnswer(o -> blockNumber.get());
when(mockBlockHeader.getTimestamp()).thenAnswer(o -> blockNumber.get());
final ForkIdManager forkIdManager =
new ForkIdManager(
mockBlockchain,
genesisConfigFile.getForkBlockNumbers(),
genesisConfigFile.getForkTimestamps(),
false);
final List<ForkId> actualForkIds =
Streams.concat(schedule.streamMilestoneBlocks(), Stream.of(Long.MAX_VALUE))
.map(
block -> {
blockNumber.set(block);
return forkIdManager.getForkIdForChainHead();
})
.collect(Collectors.toList());
assertThat(actualForkIds).containsExactlyElementsOf(expectedForkIds);
}
private static MilestoneStreamingTransitionProtocolSchedule createSchedule(
final GenesisConfigFile genesisConfigFile) {
final GenesisConfigOptions configOptions = genesisConfigFile.getConfigOptions();
MilestoneStreamingProtocolSchedule preMergeProtocolSchedule =
new MilestoneStreamingProtocolSchedule(
(DefaultProtocolSchedule) MainnetProtocolSchedule.fromConfig(configOptions));
MilestoneStreamingProtocolSchedule postMergeProtocolSchedule =
new MilestoneStreamingProtocolSchedule(
(DefaultProtocolSchedule) MergeProtocolSchedule.create(configOptions, false));
final MilestoneStreamingTransitionProtocolSchedule schedule =
new MilestoneStreamingTransitionProtocolSchedule(
preMergeProtocolSchedule, postMergeProtocolSchedule);
return schedule;
}
private static MilestoneStreamingTransitionProtocolSchedule createSchedule(
final GenesisConfigFile genesisConfigFile) {
final GenesisConfigOptions configOptions = genesisConfigFile.getConfigOptions();
MilestoneStreamingProtocolSchedule preMergeProtocolSchedule =
new MilestoneStreamingProtocolSchedule(
(DefaultProtocolSchedule) MainnetProtocolSchedule.fromConfig(configOptions));
MilestoneStreamingProtocolSchedule postMergeProtocolSchedule =
new MilestoneStreamingProtocolSchedule(
(DefaultProtocolSchedule) MergeProtocolSchedule.create(configOptions, false));
final MilestoneStreamingTransitionProtocolSchedule schedule =
new MilestoneStreamingTransitionProtocolSchedule(
preMergeProtocolSchedule, postMergeProtocolSchedule);
return schedule;
}
public static class MilestoneStreamingTransitionProtocolSchedule
extends TransitionProtocolSchedule {
public static class MilestoneStreamingTransitionProtocolSchedule
extends TransitionProtocolSchedule {
private final TransitionUtils<MilestoneStreamingProtocolSchedule> transitionUtils;
private final TransitionUtils<MilestoneStreamingProtocolSchedule> transitionUtils;
public MilestoneStreamingTransitionProtocolSchedule(
final MilestoneStreamingProtocolSchedule preMergeProtocolSchedule,
final MilestoneStreamingProtocolSchedule postMergeProtocolSchedule) {
super(preMergeProtocolSchedule, postMergeProtocolSchedule, PostMergeContext.get());
transitionUtils =
new TransitionUtils<>(
preMergeProtocolSchedule, postMergeProtocolSchedule, PostMergeContext.get());
}
public MilestoneStreamingTransitionProtocolSchedule(
final MilestoneStreamingProtocolSchedule preMergeProtocolSchedule,
final MilestoneStreamingProtocolSchedule postMergeProtocolSchedule) {
super(preMergeProtocolSchedule, postMergeProtocolSchedule, PostMergeContext.get());
transitionUtils =
new TransitionUtils<>(
preMergeProtocolSchedule, postMergeProtocolSchedule, PostMergeContext.get());
}
public Stream<Long> streamMilestoneBlocks() {
return transitionUtils.dispatchFunctionAccordingToMergeState(
MilestoneStreamingProtocolSchedule::streamMilestoneBlocks);
}
public Stream<Long> streamMilestoneBlocks() {
return transitionUtils.dispatchFunctionAccordingToMergeState(
MilestoneStreamingProtocolSchedule::streamMilestoneBlocks);
}
}
}

@ -0,0 +1,34 @@
/*
* Copyright Hyperledger Besu contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*
*/
package org.hyperledger.besu;
import static org.assertj.core.api.Assertions.assertThat;
import org.hyperledger.besu.ethereum.forkid.ForkId;
import java.util.List;
import org.apache.tuweni.bytes.Bytes;
import org.junit.jupiter.api.Test;
public class RawForkIdTest {
@Test
public void testFromRaw() {
final ForkId forkId = new ForkId(Bytes.ofUnsignedInt(0xfe3366e7L), 1735371L);
final List<List<Bytes>> forkIdAsBytesList = List.of(forkId.getForkIdAsBytesList());
assertThat(ForkId.fromRawForkId(forkIdAsBytesList).get()).isEqualTo(forkId);
}
}

@ -1881,6 +1881,30 @@ public class BesuCommandTest extends CommandTestAbstract {
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void parsesValidSnapSyncMinPeersOption() {
parseCommand("--sync-mode", "X_SNAP", "--sync-min-peers", "11");
verify(mockControllerBuilder).synchronizerConfiguration(syncConfigurationCaptor.capture());
final SynchronizerConfiguration syncConfig = syncConfigurationCaptor.getValue();
assertThat(syncConfig.getSyncMode()).isEqualTo(SyncMode.X_SNAP);
assertThat(syncConfig.getFastSyncMinimumPeerCount()).isEqualTo(11);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void parsesValidSyncMinPeersOption() {
parseCommand("--sync-mode", "FAST", "--sync-min-peers", "11");
verify(mockControllerBuilder).synchronizerConfiguration(syncConfigurationCaptor.capture());
final SynchronizerConfiguration syncConfig = syncConfigurationCaptor.getValue();
assertThat(syncConfig.getSyncMode()).isEqualTo(SyncMode.FAST);
assertThat(syncConfig.getFastSyncMinimumPeerCount()).isEqualTo(11);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void parsesInvalidFastSyncMinPeersOptionWrongFormatShouldFail() {

@ -12,7 +12,7 @@
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.operator;
package org.hyperledger.besu.cli.subcommands.operator;
import static java.lang.String.format;
import static java.lang.System.currentTimeMillis;
@ -22,11 +22,10 @@ import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.contentOf;
import static org.hyperledger.besu.cli.operator.OperatorSubCommandTest.Cmd.cmd;
import static org.hyperledger.besu.cli.subcommands.operator.OperatorSubCommandTest.Cmd.cmd;
import org.hyperledger.besu.BesuInfo;
import org.hyperledger.besu.cli.CommandTestAbstract;
import org.hyperledger.besu.cli.subcommands.operator.OperatorSubCommand;
import org.hyperledger.besu.crypto.SECP256K1;
import org.hyperledger.besu.crypto.SECP256R1;
import org.hyperledger.besu.crypto.SECPPrivateKey;
@ -72,7 +71,7 @@ public class OperatorSubCommandTest extends CommandTestAbstract {
+ System.lineSeparator()
+ "Commands:"
+ System.lineSeparator()
+ " generate-blockchain-config Generates node keypairs and genesis file with RLP"
+ " generate-blockchain-config Generate node keypairs and genesis file with RLP"
+ System.lineSeparator()
+ " encoded extra data."
+ System.lineSeparator()

@ -12,7 +12,7 @@
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.rlp;
package org.hyperledger.besu.cli.subcommands.rlp;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.assertj.core.api.Assertions.assertThat;

@ -0,0 +1,265 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.subcommands.storage;
import static org.hyperledger.besu.ethereum.worldstate.DataStorageFormat.BONSAI;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
import org.hyperledger.besu.datatypes.Hash;
import org.hyperledger.besu.ethereum.chain.MutableBlockchain;
import org.hyperledger.besu.ethereum.core.BlockHeader;
import org.hyperledger.besu.ethereum.core.BlockHeaderTestFixture;
import org.hyperledger.besu.ethereum.core.InMemoryKeyValueStorageProvider;
import org.hyperledger.besu.ethereum.storage.StorageProvider;
import org.hyperledger.besu.ethereum.trie.bonsai.storage.BonsaiWorldStateKeyValueStorage;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.ethereum.worldstate.ImmutableDataStorageConfiguration;
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Optional;
import org.apache.tuweni.bytes.Bytes;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.io.TempDir;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
class TrieLogHelperTest {
private static final StorageProvider storageProvider = new InMemoryKeyValueStorageProvider();
private static BonsaiWorldStateKeyValueStorage inMemoryWorldState;
@Mock private MutableBlockchain blockchain;
@TempDir static Path dataDir;
Path test;
static BlockHeader blockHeader1;
static BlockHeader blockHeader2;
static BlockHeader blockHeader3;
static BlockHeader blockHeader4;
static BlockHeader blockHeader5;
@BeforeAll
public static void setup() throws IOException {
blockHeader1 = new BlockHeaderTestFixture().number(1).buildHeader();
blockHeader2 = new BlockHeaderTestFixture().number(2).buildHeader();
blockHeader3 = new BlockHeaderTestFixture().number(3).buildHeader();
blockHeader4 = new BlockHeaderTestFixture().number(4).buildHeader();
blockHeader5 = new BlockHeaderTestFixture().number(5).buildHeader();
inMemoryWorldState =
new BonsaiWorldStateKeyValueStorage(storageProvider, new NoOpMetricsSystem());
var updater = inMemoryWorldState.updater();
updater
.getTrieLogStorageTransaction()
.put(blockHeader1.getHash().toArrayUnsafe(), Bytes.fromHexString("0x01").toArrayUnsafe());
updater
.getTrieLogStorageTransaction()
.put(blockHeader2.getHash().toArrayUnsafe(), Bytes.fromHexString("0x02").toArrayUnsafe());
updater
.getTrieLogStorageTransaction()
.put(blockHeader3.getHash().toArrayUnsafe(), Bytes.fromHexString("0x03").toArrayUnsafe());
updater
.getTrieLogStorageTransaction()
.put(blockHeader4.getHash().toArrayUnsafe(), Bytes.fromHexString("0x04").toArrayUnsafe());
updater
.getTrieLogStorageTransaction()
.put(blockHeader5.getHash().toArrayUnsafe(), Bytes.fromHexString("0x05").toArrayUnsafe());
updater.getTrieLogStorageTransaction().commit();
}
@BeforeEach
void createDirectory() throws IOException {
Files.createDirectories(dataDir.resolve("database"));
}
@AfterEach
void deleteDirectory() throws IOException {
Files.deleteIfExists(dataDir.resolve("database"));
}
void mockBlockchainBase() {
when(blockchain.getChainHeadBlockNumber()).thenReturn(5L);
when(blockchain.getFinalized()).thenReturn(Optional.of(blockHeader3.getBlockHash()));
when(blockchain.getBlockHeader(any(Hash.class))).thenReturn(Optional.of(blockHeader3));
}
@Test
public void prune() {
DataStorageConfiguration dataStorageConfiguration =
ImmutableDataStorageConfiguration.builder()
.dataStorageFormat(BONSAI)
.bonsaiMaxLayersToLoad(2L)
.unstable(
ImmutableDataStorageConfiguration.Unstable.builder()
.bonsaiTrieLogRetentionThreshold(3)
.build()
.withBonsaiTrieLogRetentionThreshold(3))
.build();
mockBlockchainBase();
when(blockchain.getBlockHeader(5)).thenReturn(Optional.of(blockHeader5));
when(blockchain.getBlockHeader(4)).thenReturn(Optional.of(blockHeader4));
when(blockchain.getBlockHeader(3)).thenReturn(Optional.of(blockHeader3));
// assert trie logs that will be pruned exist before prune call
assertArrayEquals(
inMemoryWorldState.getTrieLog(blockHeader1.getHash()).get(),
Bytes.fromHexString("0x01").toArrayUnsafe());
assertArrayEquals(
inMemoryWorldState.getTrieLog(blockHeader2.getHash()).get(),
Bytes.fromHexString("0x02").toArrayUnsafe());
assertArrayEquals(
inMemoryWorldState.getTrieLog(blockHeader3.getHash()).get(),
Bytes.fromHexString("0x03").toArrayUnsafe());
TrieLogHelper.prune(dataStorageConfiguration, inMemoryWorldState, blockchain, dataDir);
// assert pruned trie logs are not in the DB
assertEquals(inMemoryWorldState.getTrieLog(blockHeader1.getHash()), Optional.empty());
assertEquals(inMemoryWorldState.getTrieLog(blockHeader2.getHash()), Optional.empty());
// assert retained trie logs are in the DB
assertArrayEquals(
inMemoryWorldState.getTrieLog(blockHeader3.getHash()).get(),
Bytes.fromHexString("0x03").toArrayUnsafe());
assertArrayEquals(
inMemoryWorldState.getTrieLog(blockHeader4.getHash()).get(),
Bytes.fromHexString("0x04").toArrayUnsafe());
assertArrayEquals(
inMemoryWorldState.getTrieLog(blockHeader5.getHash()).get(),
Bytes.fromHexString("0x05").toArrayUnsafe());
}
@Test
public void cantPruneIfNoFinalizedIsFound() {
DataStorageConfiguration dataStorageConfiguration =
ImmutableDataStorageConfiguration.builder()
.dataStorageFormat(BONSAI)
.bonsaiMaxLayersToLoad(2L)
.unstable(
ImmutableDataStorageConfiguration.Unstable.builder()
.bonsaiTrieLogRetentionThreshold(2)
.build()
.withBonsaiTrieLogRetentionThreshold(2))
.build();
when(blockchain.getChainHeadBlockNumber()).thenReturn(5L);
when(blockchain.getFinalized()).thenReturn(Optional.empty());
assertThrows(
RuntimeException.class,
() ->
TrieLogHelper.prune(dataStorageConfiguration, inMemoryWorldState, blockchain, dataDir));
}
@Test
public void cantPruneIfUserRetainsMoreLayerThanExistingChainLength() {
DataStorageConfiguration dataStorageConfiguration =
ImmutableDataStorageConfiguration.builder()
.dataStorageFormat(BONSAI)
.bonsaiMaxLayersToLoad(2L)
.unstable(
ImmutableDataStorageConfiguration.Unstable.builder()
.bonsaiTrieLogRetentionThreshold(10)
.build()
.withBonsaiTrieLogRetentionThreshold(10))
.build();
when(blockchain.getChainHeadBlockNumber()).thenReturn(5L);
assertThrows(
IllegalArgumentException.class,
() ->
TrieLogHelper.prune(dataStorageConfiguration, inMemoryWorldState, blockchain, dataDir));
}
@Test
public void cantPruneIfUserRequiredFurtherThanFinalized() {
DataStorageConfiguration dataStorageConfiguration =
ImmutableDataStorageConfiguration.builder()
.dataStorageFormat(BONSAI)
.bonsaiMaxLayersToLoad(2L)
.unstable(
ImmutableDataStorageConfiguration.Unstable.builder()
.bonsaiTrieLogRetentionThreshold(2)
.build()
.withBonsaiTrieLogRetentionThreshold(2))
.build();
mockBlockchainBase();
assertThrows(
IllegalArgumentException.class,
() ->
TrieLogHelper.prune(dataStorageConfiguration, inMemoryWorldState, blockchain, dataDir));
}
@Test
public void exceptionWhileSavingFileStopsPruneProcess() throws IOException {
Files.delete(dataDir.resolve("database"));
DataStorageConfiguration dataStorageConfiguration =
ImmutableDataStorageConfiguration.builder()
.dataStorageFormat(BONSAI)
.bonsaiMaxLayersToLoad(2L)
.unstable(
ImmutableDataStorageConfiguration.Unstable.builder()
.bonsaiTrieLogRetentionThreshold(2)
.build()
.withBonsaiTrieLogRetentionThreshold(2))
.build();
assertThrows(
RuntimeException.class,
() ->
TrieLogHelper.prune(dataStorageConfiguration, inMemoryWorldState, blockchain, dataDir));
// assert all trie logs are still in the DB
assertArrayEquals(
inMemoryWorldState.getTrieLog(blockHeader1.getHash()).get(),
Bytes.fromHexString("0x01").toArrayUnsafe());
assertArrayEquals(
inMemoryWorldState.getTrieLog(blockHeader2.getHash()).get(),
Bytes.fromHexString("0x02").toArrayUnsafe());
assertArrayEquals(
inMemoryWorldState.getTrieLog(blockHeader3.getHash()).get(),
Bytes.fromHexString("0x03").toArrayUnsafe());
assertArrayEquals(
inMemoryWorldState.getTrieLog(blockHeader4.getHash()).get(),
Bytes.fromHexString("0x04").toArrayUnsafe());
assertArrayEquals(
inMemoryWorldState.getTrieLog(blockHeader5.getHash()).get(),
Bytes.fromHexString("0x05").toArrayUnsafe());
}
}

@ -16,6 +16,7 @@ package org.hyperledger.besu.controller;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
@ -58,21 +59,21 @@ import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
import org.hyperledger.besu.services.kvstore.InMemoryKeyValueStorage;
import java.math.BigInteger;
import java.nio.file.Path;
import java.time.Clock;
import java.util.OptionalLong;
import com.google.common.collect.Range;
import org.apache.tuweni.bytes.Bytes;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.io.TempDir;
import org.mockito.Answers;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.junit.jupiter.MockitoExtension;
@RunWith(MockitoJUnitRunner.class)
@ExtendWith(MockitoExtension.class)
public class BesuControllerBuilderTest {
private BesuControllerBuilder besuControllerBuilder;
@ -100,9 +101,9 @@ public class BesuControllerBuilderTest {
BigInteger networkId = BigInteger.ONE;
@Rule public final TemporaryFolder tempDirRule = new TemporaryFolder();
@TempDir Path tempDir;
@Before
@BeforeEach
public void setup() {
when(genesisConfigFile.getParentHash()).thenReturn(Hash.ZERO.toHexString());
when(genesisConfigFile.getDifficulty()).thenReturn(Bytes.of(0).toHexString());
@ -129,14 +130,18 @@ public class BesuControllerBuilderTest {
when(synchronizerConfiguration.getBlockPropagationRange()).thenReturn(Range.closed(1L, 2L));
when(storageProvider.createWorldStateStorage(DataStorageFormat.FOREST))
lenient()
.when(storageProvider.createWorldStateStorage(DataStorageFormat.FOREST))
.thenReturn(worldStateStorage);
when(storageProvider.createWorldStatePreimageStorage()).thenReturn(worldStatePreimageStorage);
lenient()
.when(storageProvider.createWorldStatePreimageStorage())
.thenReturn(worldStatePreimageStorage);
when(worldStateStorage.isWorldStateAvailable(any(), any())).thenReturn(true);
when(worldStatePreimageStorage.updater())
lenient().when(worldStateStorage.isWorldStateAvailable(any(), any())).thenReturn(true);
lenient()
.when(worldStatePreimageStorage.updater())
.thenReturn(mock(WorldStatePreimageStorage.Updater.class));
when(worldStateStorage.updater()).thenReturn(mock(WorldStateStorage.Updater.class));
lenient().when(worldStateStorage.updater()).thenReturn(mock(WorldStateStorage.Updater.class));
besuControllerBuilder = spy(visitWithMockConfigs(new MainnetBesuControllerBuilder()));
}
@ -149,7 +154,7 @@ public class BesuControllerBuilderTest {
.miningParameters(miningParameters)
.metricsSystem(observableMetricsSystem)
.privacyParameters(privacyParameters)
.dataDirectory(tempDirRule.getRoot().toPath())
.dataDirectory(tempDir)
.clock(clock)
.transactionPoolConfiguration(poolConfiguration)
.nodeKey(nodeKey)

@ -37,13 +37,13 @@ import java.util.Map;
import java.util.OptionalLong;
import com.google.common.io.Resources;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.junit.jupiter.MockitoExtension;
@RunWith(MockitoJUnitRunner.class)
@ExtendWith(MockitoExtension.class)
public class BesuControllerTest {
@Spy private GenesisConfigFile genesisConfigFile = GenesisConfigFile.mainnet();

@ -54,13 +54,13 @@ import java.util.TreeSet;
import java.util.function.BiFunction;
import org.assertj.core.api.SoftAssertions;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.junit.jupiter.MockitoExtension;
@RunWith(MockitoJUnitRunner.class)
@ExtendWith(MockitoExtension.class)
public class ConsensusScheduleBesuControllerBuilderTest {
private @Mock BiFunction<
NavigableSet<ForkSpec<ProtocolSchedule>>, Optional<BigInteger>, ProtocolSchedule>

@ -18,6 +18,7 @@ import static org.assertj.core.api.Assertions.assertThat;
import static org.hyperledger.besu.ethereum.core.InMemoryKeyValueStorageProvider.createInMemoryBlockchain;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
@ -61,6 +62,7 @@ import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
import org.hyperledger.besu.services.kvstore.InMemoryKeyValueStorage;
import java.math.BigInteger;
import java.nio.file.Path;
import java.time.Clock;
import java.util.Collections;
import java.util.Optional;
@ -70,16 +72,15 @@ import com.google.common.collect.Range;
import org.apache.tuweni.bytes.Bytes;
import org.apache.tuweni.bytes.Bytes32;
import org.apache.tuweni.units.bigints.UInt256;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.io.TempDir;
import org.mockito.Answers;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.junit.jupiter.MockitoExtension;
@RunWith(MockitoJUnitRunner.class)
@ExtendWith(MockitoExtension.class)
public class MergeBesuControllerBuilderTest {
private MergeBesuControllerBuilder besuControllerBuilder;
@ -108,46 +109,54 @@ public class MergeBesuControllerBuilderTest {
TransactionPoolConfiguration.DEFAULT;
private final ObservableMetricsSystem observableMetricsSystem = new NoOpMetricsSystem();
@Rule public final TemporaryFolder tempDirRule = new TemporaryFolder();
@TempDir Path tempDir;
@Before
@BeforeEach
public void setup() {
when(genesisConfigFile.getParentHash()).thenReturn(Hash.ZERO.toHexString());
when(genesisConfigFile.getDifficulty()).thenReturn(Bytes.of(0).toHexString());
when(genesisConfigFile.getExtraData()).thenReturn(Bytes.EMPTY.toHexString());
when(genesisConfigFile.getMixHash()).thenReturn(Hash.ZERO.toHexString());
when(genesisConfigFile.getNonce()).thenReturn(Long.toHexString(1));
when(genesisConfigFile.getConfigOptions(any())).thenReturn(genesisConfigOptions);
when(genesisConfigFile.getConfigOptions()).thenReturn(genesisConfigOptions);
when(genesisConfigOptions.getCheckpointOptions()).thenReturn(checkpointConfigOptions);
lenient().when(genesisConfigFile.getParentHash()).thenReturn(Hash.ZERO.toHexString());
lenient().when(genesisConfigFile.getDifficulty()).thenReturn(Bytes.of(0).toHexString());
lenient().when(genesisConfigFile.getExtraData()).thenReturn(Bytes.EMPTY.toHexString());
lenient().when(genesisConfigFile.getMixHash()).thenReturn(Hash.ZERO.toHexString());
lenient().when(genesisConfigFile.getNonce()).thenReturn(Long.toHexString(1));
lenient().when(genesisConfigFile.getConfigOptions(any())).thenReturn(genesisConfigOptions);
lenient().when(genesisConfigFile.getConfigOptions()).thenReturn(genesisConfigOptions);
lenient().when(genesisConfigOptions.getCheckpointOptions()).thenReturn(checkpointConfigOptions);
when(genesisConfigOptions.getTerminalTotalDifficulty())
.thenReturn((Optional.of(UInt256.valueOf(100L))));
when(genesisConfigOptions.getThanosBlockNumber()).thenReturn(OptionalLong.empty());
when(genesisConfigOptions.getTerminalBlockHash()).thenReturn(Optional.of(Hash.ZERO));
when(genesisConfigOptions.getTerminalBlockNumber()).thenReturn(OptionalLong.of(1L));
when(storageProvider.createBlockchainStorage(any(), any()))
lenient().when(genesisConfigOptions.getTerminalBlockNumber()).thenReturn(OptionalLong.of(1L));
lenient()
.when(storageProvider.createBlockchainStorage(any(), any()))
.thenReturn(
new KeyValueStoragePrefixedKeyBlockchainStorage(
new InMemoryKeyValueStorage(),
new VariablesKeyValueStorage(new InMemoryKeyValueStorage()),
new MainnetBlockHeaderFunctions()));
when(storageProvider.getStorageBySegmentIdentifier(any()))
lenient()
.when(storageProvider.getStorageBySegmentIdentifier(any()))
.thenReturn(new InMemoryKeyValueStorage());
when(synchronizerConfiguration.getDownloaderParallelism()).thenReturn(1);
when(synchronizerConfiguration.getTransactionsParallelism()).thenReturn(1);
when(synchronizerConfiguration.getComputationParallelism()).thenReturn(1);
lenient().when(synchronizerConfiguration.getDownloaderParallelism()).thenReturn(1);
lenient().when(synchronizerConfiguration.getTransactionsParallelism()).thenReturn(1);
lenient().when(synchronizerConfiguration.getComputationParallelism()).thenReturn(1);
when(synchronizerConfiguration.getBlockPropagationRange()).thenReturn(Range.closed(1L, 2L));
lenient()
.when(synchronizerConfiguration.getBlockPropagationRange())
.thenReturn(Range.closed(1L, 2L));
when(storageProvider.createWorldStateStorage(DataStorageFormat.FOREST))
lenient()
.when(storageProvider.createWorldStateStorage(DataStorageFormat.FOREST))
.thenReturn(worldStateStorage);
when(storageProvider.createWorldStatePreimageStorage()).thenReturn(worldStatePreimageStorage);
lenient()
.when(storageProvider.createWorldStatePreimageStorage())
.thenReturn(worldStatePreimageStorage);
when(worldStateStorage.isWorldStateAvailable(any(), any())).thenReturn(true);
when(worldStatePreimageStorage.updater())
lenient().when(worldStateStorage.isWorldStateAvailable(any(), any())).thenReturn(true);
lenient()
.when(worldStatePreimageStorage.updater())
.thenReturn(mock(WorldStatePreimageStorage.Updater.class));
when(worldStateStorage.updater()).thenReturn(mock(WorldStateStorage.Updater.class));
when(miningParameters.getTargetGasLimit()).thenReturn(OptionalLong.empty());
lenient().when(worldStateStorage.updater()).thenReturn(mock(WorldStateStorage.Updater.class));
lenient().when(miningParameters.getTargetGasLimit()).thenReturn(OptionalLong.empty());
besuControllerBuilder = visitWithMockConfigs(new MergeBesuControllerBuilder());
}
@ -162,7 +171,7 @@ public class MergeBesuControllerBuilderTest {
.miningParameters(miningParameters)
.metricsSystem(observableMetricsSystem)
.privacyParameters(privacyParameters)
.dataDirectory(tempDirRule.getRoot().toPath())
.dataDirectory(tempDir)
.clock(clock)
.transactionPoolConfiguration(poolConfiguration)
.nodeKey(nodeKey)

@ -17,6 +17,7 @@ package org.hyperledger.besu.controller;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ -56,20 +57,20 @@ import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
import org.hyperledger.besu.services.kvstore.InMemoryKeyValueStorage;
import java.math.BigInteger;
import java.nio.file.Path;
import java.time.Clock;
import java.util.List;
import com.google.common.collect.Range;
import org.apache.tuweni.bytes.Bytes;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.io.TempDir;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.junit.jupiter.MockitoExtension;
@RunWith(MockitoJUnitRunner.class)
@ExtendWith(MockitoExtension.class)
public class QbftBesuControllerBuilderTest {
private BesuControllerBuilder qbftBesuControllerBuilder;
@ -92,43 +93,54 @@ public class QbftBesuControllerBuilderTest {
TransactionPoolConfiguration.DEFAULT;
private final ObservableMetricsSystem observableMetricsSystem = new NoOpMetricsSystem();
@Rule public final TemporaryFolder tempDirRule = new TemporaryFolder();
@TempDir Path tempDir;
@Before
@BeforeEach
public void setup() {
// besu controller setup
when(genesisConfigFile.getParentHash()).thenReturn(Hash.ZERO.toHexString());
when(genesisConfigFile.getDifficulty()).thenReturn(Bytes.of(0).toHexString());
lenient().when(genesisConfigFile.getParentHash()).thenReturn(Hash.ZERO.toHexString());
lenient().when(genesisConfigFile.getDifficulty()).thenReturn(Bytes.of(0).toHexString());
when(genesisConfigFile.getExtraData()).thenReturn(Bytes.EMPTY.toHexString());
when(genesisConfigFile.getMixHash()).thenReturn(Hash.ZERO.toHexString());
when(genesisConfigFile.getNonce()).thenReturn(Long.toHexString(1));
when(genesisConfigFile.getConfigOptions(any())).thenReturn(genesisConfigOptions);
when(genesisConfigFile.getConfigOptions()).thenReturn(genesisConfigOptions);
when(genesisConfigOptions.getCheckpointOptions()).thenReturn(checkpointConfigOptions);
when(storageProvider.createBlockchainStorage(any(), any()))
lenient().when(genesisConfigFile.getMixHash()).thenReturn(Hash.ZERO.toHexString());
lenient().when(genesisConfigFile.getNonce()).thenReturn(Long.toHexString(1));
lenient().when(genesisConfigFile.getConfigOptions(any())).thenReturn(genesisConfigOptions);
lenient().when(genesisConfigFile.getConfigOptions()).thenReturn(genesisConfigOptions);
lenient().when(genesisConfigOptions.getCheckpointOptions()).thenReturn(checkpointConfigOptions);
lenient()
.when(storageProvider.createBlockchainStorage(any(), any()))
.thenReturn(
new KeyValueStoragePrefixedKeyBlockchainStorage(
new InMemoryKeyValueStorage(),
new VariablesKeyValueStorage(new InMemoryKeyValueStorage()),
new MainnetBlockHeaderFunctions()));
when(storageProvider.createWorldStateStorage(DataStorageFormat.FOREST))
lenient()
.when(storageProvider.createWorldStateStorage(DataStorageFormat.FOREST))
.thenReturn(worldStateStorage);
when(worldStateStorage.isWorldStateAvailable(any(), any())).thenReturn(true);
when(worldStateStorage.updater()).thenReturn(mock(WorldStateStorage.Updater.class));
when(worldStatePreimageStorage.updater())
lenient().when(worldStateStorage.isWorldStateAvailable(any(), any())).thenReturn(true);
lenient().when(worldStateStorage.updater()).thenReturn(mock(WorldStateStorage.Updater.class));
lenient()
.when(worldStatePreimageStorage.updater())
.thenReturn(mock(WorldStatePreimageStorage.Updater.class));
when(storageProvider.createWorldStatePreimageStorage()).thenReturn(worldStatePreimageStorage);
when(synchronizerConfiguration.getDownloaderParallelism()).thenReturn(1);
when(synchronizerConfiguration.getTransactionsParallelism()).thenReturn(1);
when(synchronizerConfiguration.getComputationParallelism()).thenReturn(1);
lenient()
.when(storageProvider.createWorldStatePreimageStorage())
.thenReturn(worldStatePreimageStorage);
lenient().when(synchronizerConfiguration.getDownloaderParallelism()).thenReturn(1);
lenient().when(synchronizerConfiguration.getTransactionsParallelism()).thenReturn(1);
lenient().when(synchronizerConfiguration.getComputationParallelism()).thenReturn(1);
when(synchronizerConfiguration.getBlockPropagationRange()).thenReturn(Range.closed(1L, 2L));
lenient()
.when(synchronizerConfiguration.getBlockPropagationRange())
.thenReturn(Range.closed(1L, 2L));
// qbft prepForBuild setup
when(genesisConfigOptions.getQbftConfigOptions())
lenient()
.when(genesisConfigOptions.getQbftConfigOptions())
.thenReturn(new MutableQbftConfigOptions(JsonQbftConfigOptions.DEFAULT));
when(genesisConfigOptions.getTransitions()).thenReturn(mock(TransitionsConfigOptions.class));
when(genesisConfigFile.getExtraData())
lenient()
.when(genesisConfigOptions.getTransitions())
.thenReturn(mock(TransitionsConfigOptions.class));
lenient()
.when(genesisConfigFile.getExtraData())
.thenReturn(
QbftExtraDataCodec.createGenesisExtraDataString(List.of(Address.fromHexString("1"))));
@ -141,7 +153,7 @@ public class QbftBesuControllerBuilderTest {
.miningParameters(miningParameters)
.metricsSystem(observableMetricsSystem)
.privacyParameters(privacyParameters)
.dataDirectory(tempDirRule.getRoot().toPath())
.dataDirectory(tempDir)
.clock(clock)
.transactionPoolConfiguration(poolConfiguration)
.nodeKey(nodeKey)

@ -16,6 +16,7 @@ package org.hyperledger.besu.controller;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
@ -53,18 +54,18 @@ import org.hyperledger.besu.testutil.DeterministicEthScheduler;
import java.util.Optional;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Answers;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.junit.jupiter.MockitoExtension;
/**
* We only bother testing transitionControllerBuilder for PoW and Clique since those are the only
* network types that are transitioning to PoS.
*/
@RunWith(MockitoJUnitRunner.class)
@ExtendWith(MockitoExtension.class)
public class TransitionControllerBuilderTest {
@Mock ProtocolSchedule preMergeProtocolSchedule;
@ -87,7 +88,7 @@ public class TransitionControllerBuilderTest {
TransitionProtocolSchedule transitionProtocolSchedule;
@Before
@BeforeEach
public void setup() {
transitionProtocolSchedule =
spy(
@ -99,13 +100,22 @@ public class TransitionControllerBuilderTest {
powBuilder.genesisConfigFile(GenesisConfigFile.DEFAULT);
postMergeBuilder.genesisConfigFile(GenesisConfigFile.DEFAULT);
postMergeBuilder.storageProvider(storageProvider);
when(protocolContext.getBlockchain()).thenReturn(mockBlockchain);
when(transitionProtocolSchedule.getPostMergeSchedule()).thenReturn(postMergeProtocolSchedule);
when(transitionProtocolSchedule.getPreMergeSchedule()).thenReturn(preMergeProtocolSchedule);
when(protocolContext.getConsensusContext(CliqueContext.class))
lenient().when(protocolContext.getBlockchain()).thenReturn(mockBlockchain);
lenient()
.when(transitionProtocolSchedule.getPostMergeSchedule())
.thenReturn(postMergeProtocolSchedule);
lenient()
.when(transitionProtocolSchedule.getPreMergeSchedule())
.thenReturn(preMergeProtocolSchedule);
lenient()
.when(protocolContext.getConsensusContext(CliqueContext.class))
.thenReturn(mock(CliqueContext.class));
when(protocolContext.getConsensusContext(PostMergeContext.class)).thenReturn(mergeContext);
when(protocolContext.getConsensusContext(MergeContext.class)).thenReturn(mergeContext);
lenient()
.when(protocolContext.getConsensusContext(PostMergeContext.class))
.thenReturn(mergeContext);
lenient()
.when(protocolContext.getConsensusContext(MergeContext.class))
.thenReturn(mergeContext);
when(ethProtocolManager.ethContext().getScheduler())
.thenReturn(new DeterministicEthScheduler());
miningParameters = MiningParameters.newDefault();

@ -7,6 +7,7 @@
"londonBlock":5062605,
"terminalTotalDifficulty": 10790000,
"shanghaiTime": 1678832736,
"cancunTime": 1705473120,
"clique":{
"blockperiodseconds":15,
"epochlength":30000

@ -14,6 +14,7 @@
"preMergeForkBlock": 0,
"terminalTotalDifficulty": 0,
"shanghaiTime": 1696000704,
"cancunTime": 1707305664,
"ethash": {},
"discovery": {
"bootnodes": [

@ -14,6 +14,7 @@
"mergeNetSplitBlock": 1735371,
"terminalTotalDifficulty": 17000000000000000,
"shanghaiTime": 1677557088,
"cancunTime": 1706655072,
"ethash":{},
"discovery": {
"dns": "enrtree://AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE@all.sepolia.ethdisco.net",

@ -34,6 +34,6 @@ Besu reports only the actual cost of the precompiled contract call in the
### Out of Gas
Besu reports the operation that causes out fo gas exceptions, including
Besu reports the operation that causes out of gas exceptions, including
calculated gas cost. The operation is not executed so no `ex` values are
reported.

@ -24,6 +24,7 @@ import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequest;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequestContext;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods.JsonRpcMethod;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.parameters.JsonCallParameter;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcError;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcErrorResponse;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcResponse;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcSuccessResponse;
@ -161,8 +162,12 @@ public class EthEstimateGasIntegrationTest {
true,
null);
final JsonRpcRequestContext request = requestWithParams(callParameter);
final JsonRpcResponse expectedResponse =
new JsonRpcErrorResponse(null, RpcErrorType.TRANSACTION_UPFRONT_COST_EXCEEDS_BALANCE);
final RpcErrorType rpcErrorType = RpcErrorType.TRANSACTION_UPFRONT_COST_EXCEEDS_BALANCE;
final JsonRpcError rpcError = new JsonRpcError(rpcErrorType);
rpcError.setReason(
"transaction up-front cost 0x1cc31b3333167018 exceeds transaction sender account balance 0x140");
final JsonRpcResponse expectedResponse = new JsonRpcErrorResponse(null, rpcError);
final JsonRpcResponse response = method.response(request);
assertThat(response).usingRecursiveComparison().isEqualTo(expectedResponse);

@ -79,6 +79,8 @@ public class JsonRpcErrorConverter {
return RpcErrorType.PLUGIN_TX_VALIDATOR;
case INVALID_BLOBS:
return RpcErrorType.INVALID_BLOBS;
case EXECUTION_HALTED:
return RpcErrorType.EXECUTION_HALTED;
default:
return RpcErrorType.INTERNAL_ERROR;
}

@ -23,6 +23,7 @@ import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcError;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcErrorResponse;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.RpcErrorType;
import org.hyperledger.besu.ethereum.api.query.BlockchainQueries;
import org.hyperledger.besu.ethereum.chain.Blockchain;
import org.hyperledger.besu.ethereum.core.BlockHeader;
import org.hyperledger.besu.ethereum.mainnet.ValidationResult;
import org.hyperledger.besu.ethereum.processing.TransactionProcessingResult;
@ -48,8 +49,16 @@ public abstract class AbstractEstimateGas implements JsonRpcMethod {
}
protected BlockHeader blockHeader() {
final long headBlockNumber = blockchainQueries.headBlockNumber();
return blockchainQueries.getBlockchain().getBlockHeader(headBlockNumber).orElse(null);
final Blockchain theChain = blockchainQueries.getBlockchain();
// Optimistically get the block header for the chain head without taking a lock,
// but revert to the safe implementation if it returns an empty optional. (It's
// possible the chain head has been updated but the block is still being persisted
// to storage/cache under the lock).
return theChain
.getBlockHeader(theChain.getChainHeadHash())
.or(() -> theChain.getBlockHeaderSafe(theChain.getChainHeadHash()))
.orElse(null);
}
protected CallParameter overrideGasLimitAndPrice(
@ -101,6 +110,14 @@ public abstract class AbstractEstimateGas implements JsonRpcMethod {
final ValidationResult<TransactionInvalidReason> validationResult =
result.getValidationResult();
if (validationResult != null && !validationResult.isValid()) {
if (validationResult.getErrorMessage().length() > 0) {
final RpcErrorType rpcErrorType =
JsonRpcErrorConverter.convertTransactionInvalidReason(
validationResult.getInvalidReason());
final JsonRpcError rpcError = new JsonRpcError(rpcErrorType);
rpcError.setReason(validationResult.getErrorMessage());
return errorResponse(request, rpcError);
}
return errorResponse(
request,
JsonRpcErrorConverter.convertTransactionInvalidReason(

@ -32,8 +32,13 @@ import org.hyperledger.besu.evm.tracing.EstimateGasOperationTracer;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class EthEstimateGas extends AbstractEstimateGas {
private static final Logger LOG = LoggerFactory.getLogger(EthEstimateGas.class);
public EthEstimateGas(
final BlockchainQueries blockchainQueries, final TransactionSimulator transactionSimulator) {
super(blockchainQueries, transactionSimulator);
@ -50,6 +55,7 @@ public class EthEstimateGas extends AbstractEstimateGas {
final BlockHeader blockHeader = blockHeader();
if (blockHeader == null) {
LOG.error("Chain head block not found");
return errorResponse(requestContext, RpcErrorType.INTERNAL_ERROR);
}
if (!blockchainQueries
@ -70,6 +76,7 @@ public class EthEstimateGas extends AbstractEstimateGas {
blockHeader, modifiedCallParams, operationTracer, isAllowExceedingBalance);
if (gasUsed.isEmpty()) {
LOG.error("gasUsed is empty after simulating transaction.");
return errorResponse(requestContext, RpcErrorType.INTERNAL_ERROR);
}

@ -73,6 +73,10 @@ public class JsonRpcError {
return data;
}
public void setReason(final String reason) {
this.reason = reason;
}
@Override
public boolean equals(final Object o) {
if (this == o) {

@ -75,6 +75,7 @@ public enum RpcErrorType {
-32000, "An invalid transaction with a lower nonce exists"),
TOTAL_BLOB_GAS_TOO_HIGH(-32000, "Total blob gas too high"),
PLUGIN_TX_VALIDATOR(-32000, "Plugin has marked the transaction as invalid"),
EXECUTION_HALTED(-32000, "Transaction processing could not be completed due to an exception"),
// Execution engine failures
UNKNOWN_PAYLOAD(-32001, "Payload does not exist / is not available"),

@ -17,7 +17,6 @@ package org.hyperledger.besu.ethereum.api.jsonrpc.internal.methods;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@ -25,6 +24,7 @@ import static org.mockito.Mockito.when;
import org.hyperledger.besu.datatypes.AccessListEntry;
import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.datatypes.Hash;
import org.hyperledger.besu.datatypes.Wei;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequest;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequestContext;
@ -78,10 +78,16 @@ public class EthCreateAccessListTest {
@BeforeEach
public void setUp() {
when(blockchainQueries.headBlockNumber()).thenReturn(1L);
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
when(blockchainQueries.getWorldStateArchive()).thenReturn(worldStateArchive);
when(blockchain.getBlockHeader(eq(1L))).thenReturn(Optional.of(blockHeader));
when(blockchain.getChainHeadHash())
.thenReturn(
Hash.fromHexString(
"0x3f07a9c83155594c000642e7d60e8a8a00038d03e9849171a05ed0e2d47acbb3"));
when(blockchain.getBlockHeader(
Hash.fromHexString(
"0x3f07a9c83155594c000642e7d60e8a8a00038d03e9849171a05ed0e2d47acbb3")))
.thenReturn(Optional.of(blockHeader));
when(blockHeader.getGasLimit()).thenReturn(Long.MAX_VALUE);
when(blockHeader.getNumber()).thenReturn(1L);
when(worldStateArchive.isWorldStateAvailable(any(), any())).thenReturn(true);

@ -22,6 +22,7 @@ import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.datatypes.Hash;
import org.hyperledger.besu.datatypes.Wei;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequest;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequestContext;
@ -73,10 +74,16 @@ public class EthEstimateGasTest {
@BeforeEach
public void setUp() {
when(blockchainQueries.headBlockNumber()).thenReturn(1L);
when(blockchainQueries.getBlockchain()).thenReturn(blockchain);
when(blockchainQueries.getWorldStateArchive()).thenReturn(worldStateArchive);
when(blockchain.getBlockHeader(eq(1L))).thenReturn(Optional.of(blockHeader));
when(blockchain.getChainHeadHash())
.thenReturn(
Hash.fromHexString(
"0x3f07a9c83155594c000642e7d60e8a8a00038d03e9849171a05ed0e2d47acbb3"));
when(blockchain.getBlockHeader(
Hash.fromHexString(
"0x3f07a9c83155594c000642e7d60e8a8a00038d03e9849171a05ed0e2d47acbb3")))
.thenReturn(Optional.of(blockHeader));
when(blockHeader.getGasLimit()).thenReturn(Long.MAX_VALUE);
when(blockHeader.getNumber()).thenReturn(1L);
when(worldStateArchive.isWorldStateAvailable(any(), any())).thenReturn(true);
@ -209,10 +216,13 @@ public class EthEstimateGasTest {
final JsonRpcRequestContext request =
ethEstimateGasRequest(defaultLegacyTransactionCallParameter(Wei.ZERO));
mockTransientProcessorResultTxInvalidReason(
TransactionInvalidReason.UPFRONT_COST_EXCEEDS_BALANCE);
TransactionInvalidReason.UPFRONT_COST_EXCEEDS_BALANCE,
"transaction up-front cost 10 exceeds transaction sender account balance 5");
final JsonRpcResponse expectedResponse =
new JsonRpcErrorResponse(null, RpcErrorType.TRANSACTION_UPFRONT_COST_EXCEEDS_BALANCE);
final RpcErrorType rpcErrorType = RpcErrorType.TRANSACTION_UPFRONT_COST_EXCEEDS_BALANCE;
final JsonRpcError rpcError = new JsonRpcError(rpcErrorType);
rpcError.setReason("transaction up-front cost 10 exceeds transaction sender account balance 5");
final JsonRpcResponse expectedResponse = new JsonRpcErrorResponse(null, rpcError);
Assertions.assertThat(method.response(request))
.usingRecursiveComparison()
@ -223,10 +233,13 @@ public class EthEstimateGasTest {
public void shouldReturnErrorWhenEip1559TransactionProcessorReturnsTxInvalidReason() {
final JsonRpcRequestContext request = ethEstimateGasRequest(eip1559TransactionCallParameter());
mockTransientProcessorResultTxInvalidReason(
TransactionInvalidReason.UPFRONT_COST_EXCEEDS_BALANCE);
TransactionInvalidReason.UPFRONT_COST_EXCEEDS_BALANCE,
"transaction up-front cost 10 exceeds transaction sender account balance 5");
final JsonRpcResponse expectedResponse =
new JsonRpcErrorResponse(null, RpcErrorType.TRANSACTION_UPFRONT_COST_EXCEEDS_BALANCE);
final RpcErrorType rpcErrorType = RpcErrorType.TRANSACTION_UPFRONT_COST_EXCEEDS_BALANCE;
final JsonRpcError rpcError = new JsonRpcError(rpcErrorType);
rpcError.setReason("transaction up-front cost 10 exceeds transaction sender account balance 5");
final JsonRpcResponse expectedResponse = new JsonRpcErrorResponse(null, rpcError);
Assertions.assertThat(method.response(request))
.usingRecursiveComparison()
@ -243,9 +256,9 @@ public class EthEstimateGasTest {
final JsonRpcResponse expectedResponse =
new JsonRpcErrorResponse(null, RpcErrorType.WORLD_STATE_UNAVAILABLE);
Assertions.assertThat(method.response(request))
.usingRecursiveComparison()
.isEqualTo(expectedResponse);
JsonRpcResponse theResponse = method.response(request);
Assertions.assertThat(theResponse).usingRecursiveComparison().isEqualTo(expectedResponse);
}
@Test
@ -364,10 +377,32 @@ public class EthEstimateGasTest {
eq(1L));
}
private void mockTransientProcessorResultTxInvalidReason(final TransactionInvalidReason reason) {
@Test
public void shouldIncludeHaltReasonWhenExecutionHalts() {
final JsonRpcRequestContext request =
ethEstimateGasRequest(defaultLegacyTransactionCallParameter(Wei.ZERO));
mockTransientProcessorResultTxInvalidReason(
TransactionInvalidReason.EXECUTION_HALTED, "INVALID_OPERATION");
final RpcErrorType rpcErrorType = RpcErrorType.EXECUTION_HALTED;
final JsonRpcError rpcError = new JsonRpcError(rpcErrorType);
rpcError.setReason("INVALID_OPERATION");
final JsonRpcResponse expectedResponse = new JsonRpcErrorResponse(null, rpcError);
Assertions.assertThat(method.response(request))
.usingRecursiveComparison()
.isEqualTo(expectedResponse);
}
private void mockTransientProcessorResultTxInvalidReason(
final TransactionInvalidReason reason, final String validationFailedErrorMessage) {
final TransactionSimulatorResult mockTxSimResult =
getMockTransactionSimulatorResult(false, 0, Wei.ZERO, Optional.empty());
when(mockTxSimResult.getValidationResult()).thenReturn(ValidationResult.invalid(reason));
when(mockTxSimResult.getValidationResult())
.thenReturn(
validationFailedErrorMessage == null
? ValidationResult.invalid(reason)
: ValidationResult.invalid(reason, validationFailedErrorMessage));
}
private void mockTransientProcessorTxReverted(

@ -14,8 +14,8 @@
"jsonrpc": "2.0",
"id": 3,
"error": {
"code": -32603,
"message": "Internal error"
"code": -32000,
"message": "Transaction processing could not be completed due to an exception: INVALID_OPERATION"
}
},
"statusCode": 200

@ -84,7 +84,10 @@ public class BlockHeaderValidator {
rule -> {
boolean worked = rule.validate(header, parent, protocolContext);
if (!worked) {
LOG.debug("{} rule failed", rule.innerRuleClass().getCanonicalName());
String canonicalName = rule.innerRuleClass().getCanonicalName();
LOG.debug(
"{} rule failed",
canonicalName == null ? rule.innerRuleClass().getName() : canonicalName);
}
return worked;
});

@ -426,6 +426,13 @@ public class MainnetTransactionProcessor {
if (initialFrame.getState() == MessageFrame.State.COMPLETED_SUCCESS) {
worldUpdater.commit();
} else {
if (initialFrame.getExceptionalHaltReason().isPresent()) {
validationResult =
ValidationResult.invalid(
TransactionInvalidReason.EXECUTION_HALTED,
initialFrame.getExceptionalHaltReason().get().toString());
}
}
if (LOG.isTraceEnabled()) {

@ -49,6 +49,7 @@ public enum TransactionInvalidReason {
TX_POOL_DISABLED,
INVALID_BLOBS,
PLUGIN_TX_VALIDATOR,
EXECUTION_HALTED,
// Private Transaction Invalid Reasons
PRIVATE_TRANSACTION_INVALID,
PRIVATE_TRANSACTION_FAILED,

@ -24,18 +24,22 @@ import org.apache.commons.lang3.builder.HashCodeBuilder;
public class BonsaiValue<T> implements TrieLog.LogTuple<T> {
private T prior;
private T updated;
private boolean cleared;
private boolean lastStepCleared;
private boolean clearedAtLeastOnce;
public BonsaiValue(final T prior, final T updated) {
this.prior = prior;
this.updated = updated;
this.cleared = false;
this.lastStepCleared = false;
this.clearedAtLeastOnce = false;
}
public BonsaiValue(final T prior, final T updated, final boolean cleared) {
public BonsaiValue(final T prior, final T updated, final boolean lastStepCleared) {
this.prior = prior;
this.updated = updated;
this.cleared = cleared;
this.lastStepCleared = lastStepCleared;
this.clearedAtLeastOnce = lastStepCleared;
}
@Override
@ -54,18 +58,27 @@ public class BonsaiValue<T> implements TrieLog.LogTuple<T> {
}
public BonsaiValue<T> setUpdated(final T updated) {
this.cleared = updated == null;
this.lastStepCleared = updated == null;
if (lastStepCleared) {
this.clearedAtLeastOnce = true;
}
this.updated = updated;
return this;
}
public void setCleared() {
this.cleared = true;
this.lastStepCleared = true;
this.clearedAtLeastOnce = true;
}
@Override
public boolean isLastStepCleared() {
return lastStepCleared;
}
@Override
public boolean isCleared() {
return cleared;
public boolean isClearedAtLeastOnce() {
return clearedAtLeastOnce;
}
@Override
@ -76,7 +89,7 @@ public class BonsaiValue<T> implements TrieLog.LogTuple<T> {
+ ", updated="
+ updated
+ ", cleared="
+ cleared
+ lastStepCleared
+ '}';
}
@ -90,7 +103,7 @@ public class BonsaiValue<T> implements TrieLog.LogTuple<T> {
}
BonsaiValue<?> that = (BonsaiValue<?>) o;
return new EqualsBuilder()
.append(cleared, that.cleared)
.append(lastStepCleared, that.lastStepCleared)
.append(prior, that.prior)
.append(updated, that.updated)
.isEquals();
@ -98,6 +111,10 @@ public class BonsaiValue<T> implements TrieLog.LogTuple<T> {
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37).append(prior).append(updated).append(cleared).toHashCode();
return new HashCodeBuilder(17, 37)
.append(prior)
.append(updated)
.append(lastStepCleared)
.toHashCode();
}
}

@ -204,7 +204,7 @@ public class BonsaiWorldStateKeyValueStorage implements WorldStateStorage, AutoC
return trieLogStorage.get(blockHash.toArrayUnsafe());
}
public Stream<byte[]> streamTrieLogKeys(final int limit) {
public Stream<byte[]> streamTrieLogKeys(final long limit) {
return trieLogStorage.streamKeys().limit(limit);
}

@ -248,7 +248,7 @@ public class TrieLogFactoryImpl implements TrieLogFactory {
} else {
writer.accept(output, value.getUpdated());
}
if (!value.isCleared()) {
if (!value.isLastStepCleared()) {
output.writeNull();
} else {
output.writeInt(1);

@ -61,33 +61,37 @@ public class TrieLogPruner {
this.requireFinalizedBlock = requireFinalizedBlock;
}
public void initialize() {
preloadQueue();
public int initialize() {
return preloadQueue();
}
private void preloadQueue() {
private int preloadQueue() {
LOG.atInfo()
.setMessage("Loading first {} trie logs from database...")
.addArgument(loadingLimit)
.log();
try (final Stream<byte[]> trieLogKeys = rootWorldStateStorage.streamTrieLogKeys(loadingLimit)) {
final AtomicLong count = new AtomicLong();
final AtomicLong orphansPruned = new AtomicLong();
trieLogKeys.forEach(
blockHashAsBytes -> {
final Hash blockHash = Hash.wrap(Bytes32.wrap(blockHashAsBytes));
final Optional<BlockHeader> header = blockchain.getBlockHeader(blockHash);
if (header.isPresent()) {
trieLogBlocksAndForksByDescendingBlockNumber.put(header.get().getNumber(), blockHash);
addToPruneQueue(header.get().getNumber(), blockHash);
count.getAndIncrement();
} else {
// prune orphaned blocks (sometimes created during block production)
rootWorldStateStorage.pruneTrieLog(blockHash);
orphansPruned.getAndIncrement();
}
});
LOG.atDebug().log("Pruned {} orphaned trie logs from database...", orphansPruned.intValue());
LOG.atInfo().log("Loaded {} trie logs from database", count);
pruneFromQueue();
return pruneFromQueue() + orphansPruned.intValue();
} catch (Exception e) {
LOG.error("Error loading trie logs from database, nothing pruned", e);
return 0;
}
}
@ -176,8 +180,9 @@ public class TrieLogPruner {
}
@Override
public void initialize() {
public int initialize() {
// no-op
return -1;
}
@Override

@ -59,6 +59,7 @@ import javax.annotation.Nonnull;
import org.apache.tuweni.bytes.Bytes;
import org.apache.tuweni.bytes.Bytes32;
import org.apache.tuweni.rlp.RLP;
import org.apache.tuweni.units.bigints.UInt256;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -340,49 +341,55 @@ public class BonsaiWorldState
final Optional<BonsaiWorldStateKeyValueStorage.BonsaiUpdater> maybeStateUpdater,
final BonsaiWorldStateUpdateAccumulator worldStateUpdater) {
maybeStateUpdater.ifPresent(
bonsaiUpdater -> {
for (final Address address : worldStateUpdater.getStorageToClear()) {
// because we are clearing persisted values we need the account root as persisted
final BonsaiAccount oldAccount =
worldStateStorage
.getAccount(address.addressHash())
.map(
bytes -> BonsaiAccount.fromRLP(BonsaiWorldState.this, address, bytes, true))
.orElse(null);
if (oldAccount == null) {
// This is when an account is both created and deleted within the scope of the same
// block. A not-uncommon DeFi bot pattern.
continue;
}
final Hash addressHash = address.addressHash();
final MerkleTrie<Bytes, Bytes> storageTrie =
createTrie(
(location, key) -> getStorageTrieNode(addressHash, location, key),
oldAccount.getStorageRoot());
try {
Map<Bytes32, Bytes> entriesToDelete = storageTrie.entriesFrom(Bytes32.ZERO, 256);
while (!entriesToDelete.isEmpty()) {
entriesToDelete
.keySet()
.forEach(
k ->
bonsaiUpdater.removeStorageValueBySlotHash(
address.addressHash(), Hash.wrap(k)));
entriesToDelete.keySet().forEach(storageTrie::remove);
if (entriesToDelete.size() == 256) {
entriesToDelete = storageTrie.entriesFrom(Bytes32.ZERO, 256);
} else {
break;
}
}
} catch (MerkleTrieException e) {
// need to throw to trigger the heal
throw new MerkleTrieException(
e.getMessage(), Optional.of(Address.wrap(address)), e.getHash(), e.getLocation());
}
for (final Address address : worldStateUpdater.getStorageToClear()) {
// because we are clearing persisted values we need the account root as persisted
final BonsaiAccount oldAccount =
worldStateStorage
.getAccount(address.addressHash())
.map(bytes -> BonsaiAccount.fromRLP(BonsaiWorldState.this, address, bytes, true))
.orElse(null);
if (oldAccount == null) {
// This is when an account is both created and deleted within the scope of the same
// block. A not-uncommon DeFi bot pattern.
continue;
}
final Hash addressHash = address.addressHash();
final MerkleTrie<Bytes, Bytes> storageTrie =
createTrie(
(location, key) -> getStorageTrieNode(addressHash, location, key),
oldAccount.getStorageRoot());
try {
final StorageConsumingMap<StorageSlotKey, BonsaiValue<UInt256>> storageToDelete =
worldStateUpdater.getStorageToUpdate().get(address);
Map<Bytes32, Bytes> entriesToDelete = storageTrie.entriesFrom(Bytes32.ZERO, 256);
while (!entriesToDelete.isEmpty()) {
entriesToDelete.forEach(
(k, v) -> {
final StorageSlotKey storageSlotKey =
new StorageSlotKey(Hash.wrap(k), Optional.empty());
final UInt256 slotValue = UInt256.fromBytes(Bytes32.leftPad(RLP.decodeValue(v)));
maybeStateUpdater.ifPresent(
bonsaiUpdater ->
bonsaiUpdater.removeStorageValueBySlotHash(
address.addressHash(), storageSlotKey.getSlotHash()));
storageToDelete
.computeIfAbsent(
storageSlotKey, key -> new BonsaiValue<>(slotValue, null, true))
.setPrior(slotValue);
});
entriesToDelete.keySet().forEach(storageTrie::remove);
if (entriesToDelete.size() == 256) {
entriesToDelete = storageTrie.entriesFrom(Bytes32.ZERO, 256);
} else {
break;
}
});
}
} catch (MerkleTrieException e) {
// need to throw to trigger the heal
throw new MerkleTrieException(
e.getMessage(), Optional.of(Address.wrap(address)), e.getHash(), e.getLocation());
}
}
}
@Override

@ -459,7 +459,7 @@ public class BonsaiWorldStateUpdateAccumulator
if (localAccountStorage != null) {
final BonsaiValue<UInt256> value = localAccountStorage.get(storageSlotKey);
if (value != null) {
if (value.isCleared()) {
if (value.isLastStepCleared()) {
return UInt256.ZERO;
}
final UInt256 updated = value.getUpdated();

@ -197,7 +197,7 @@ public class EthPeers {
peer.handleDisconnect();
abortPendingRequestsAssignedToDisconnectedPeers();
if (peer.getReputation().getScore() > USEFULL_PEER_SCORE_THRESHOLD) {
LOG.debug("Disonnected USEFULL peer {}", peer);
LOG.debug("Disconnected USEFULL peer {}", peer);
} else {
LOG.debug("Disconnected EthPeer {}", peer.getShortNodeId());
}

@ -48,7 +48,7 @@ public enum EvmSpecVersion {
/** Shanghai evm spec version. */
SHANGHAI(0, true, "Shanghai", "Finalized"),
/** Cancun evm spec version. */
CANCUN(0, false, "Cancun", "In Development"),
CANCUN(0, true, "Cancun", "Finalized"),
/** Prague evm spec version. */
PRAGUE(0, false, "Prague", "Placeholder"),
/** Osaka evm spec version. */

@ -39,7 +39,7 @@ public class KZGPointEvalPrecompiledContract implements PrecompiledContract {
private static Bytes successResult;
private static void init() {
private static void loadLib() {
CKZG4844JNI.loadNativeLibrary();
Bytes fieldElementsPerBlob =
Bytes32.wrap(Words.intBytes(CKZG4844JNI.FIELD_ELEMENTS_PER_BLOB).xor(Bytes32.ZERO));
@ -57,7 +57,7 @@ public class KZGPointEvalPrecompiledContract implements PrecompiledContract {
*/
public static void init(final Path trustedSetupFile) {
if (loaded.compareAndSet(false, true)) {
init();
loadLib();
final String trustedSetupResourceName = trustedSetupFile.toAbsolutePath().toString();
LOG.info("Loading trusted setup from user-specified resource {}", trustedSetupResourceName);
CKZG4844JNI.loadTrustedSetup(trustedSetupResourceName);
@ -67,17 +67,14 @@ public class KZGPointEvalPrecompiledContract implements PrecompiledContract {
}
/**
* Init the C-KZG native lib using a resource identified by the passed network name as trusted
* setup
* Init the C-KZG native lib using mainnet trusted setup
*
* @param networkName used to select the resource in /kzg-trusted-setups/ to use.
* @throws IllegalStateException is the trusted setup was already loaded
*/
public static void init(final String networkName) {
public static void init() {
if (loaded.compareAndSet(false, true)) {
init();
final String trustedSetupResourceName =
"/kzg-trusted-setups/" + networkName.toLowerCase() + ".txt";
loadLib();
final String trustedSetupResourceName = "/kzg-trusted-setups/mainnet.txt";
LOG.info(
"Loading network trusted setup from classpath resource {}", trustedSetupResourceName);
CKZG4844JNI.loadTrustedSetupFromResource(

@ -50,7 +50,7 @@ class EVMExecutorTest {
@Test
void currentEVM() {
var subject = EVMExecutor.evm();
assertThat(subject.getEVMVersion()).isEqualTo(EvmSpecVersion.SHANGHAI);
assertThat(subject.getEVMVersion()).isEqualTo(EvmSpecVersion.CANCUN);
}
@ParameterizedTest

@ -44,7 +44,7 @@ public class KZGPointEvalPrecompileContractTest {
@BeforeAll
public static void init() {
KZGPointEvalPrecompiledContract.init("mainnet");
KZGPointEvalPrecompiledContract.init();
contract = new KZGPointEvalPrecompiledContract();
}
@ -55,7 +55,7 @@ public class KZGPointEvalPrecompileContractTest {
@ParameterizedTest(name = "{index}")
@MethodSource("getPointEvaluationPrecompileTestVectors")
public void testComputePrecompile(final PrecompileTestParameters parameters) {
void testComputePrecompile(final PrecompileTestParameters parameters) {
when(toRun.getVersionedHashes()).thenReturn(Optional.of(List.of(parameters.versionedHash)));
PrecompiledContract.PrecompileContractResult result =
contract.computePrecompile(parameters.input, toRun);

@ -1,4 +1,4 @@
version=23.10.4-SNAPSHOT
version=24.1.0-SNAPSHOT
org.gradle.welcome=never
# Set exports/opens flags required by Google Java Format and ErrorProne plugins. (JEP-396)
@ -15,4 +15,4 @@ org.gradle.jvmargs=-Xmx4g \
--add-opens jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED \
--add-opens jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED
# Could be moved to sonar properties after https://sonarsource.atlassian.net/browse/SONARGRADL-134
systemProp.sonar.gradle.skipCompile=true
systemProp.sonar.gradle.skipCompile=true

@ -69,7 +69,7 @@ Calculated : ${currentHash}
tasks.register('checkAPIChanges', FileStateChecker) {
description = "Checks that the API for the Plugin-API project does not change without deliberate thought"
files = sourceSets.main.allJava.files
knownHash = 'nB1LhUpMWYFQpBdNJ/3Q79c8kLgUgPmEFzlRMlLUl1Y='
knownHash = 'N583pqJipDs4kJkgL0cPq9PBsYdsLzvUlu2I8Kk+w7g='
}
check.dependsOn('checkAPIChanges')

@ -167,10 +167,17 @@ public interface TrieLog {
}
/**
* Checks if the updated value represents a cleared state.
* Checks if the last step performed a 'clear'.
*
* @return true if the updated value is cleared, false otherwise
* @return true if the last step performed a 'clear', false otherwise.
*/
boolean isCleared();
boolean isLastStepCleared();
/**
* Checks if a 'clear' has been performed at least once.
*
* @return true if a 'clear' has been performed at least once, false otherwise.
*/
boolean isClearedAtLeastOnce();
}
}

@ -77,6 +77,11 @@ public abstract class RocksDBColumnarKeyValueStorage implements SegmentedKeyValu
protected static final long ROCKSDB_BLOCKCACHE_SIZE_HIGH_SPEC = 1_073_741_824L;
/** RocksDb memtable size when using the high spec option */
protected static final long ROCKSDB_MEMTABLE_SIZE_HIGH_SPEC = 1_073_741_824L;
/** Max total size of all WAL file, after which a flush is triggered */
protected static final long WAL_MAX_TOTAL_SIZE = 1_073_741_824L;
/** Expected size of a single WAL file, to determine how many WAL files to keep around */
protected static final long EXPECTED_WAL_FILE_SIZE = 67_108_864L;
/** RocksDb number of log files to keep on disk */
private static final long NUMBER_OF_LOG_FILES_TO_KEEP = 7;
/** RocksDb Time to roll a log file (1 day = 3600 * 24 seconds) */
@ -237,6 +242,8 @@ public abstract class RocksDBColumnarKeyValueStorage implements SegmentedKeyValu
options
.setCreateIfMissing(true)
.setMaxOpenFiles(configuration.getMaxOpenFiles())
.setMaxTotalWalSize(WAL_MAX_TOTAL_SIZE)
.setRecycleLogFileNum(WAL_MAX_TOTAL_SIZE / EXPECTED_WAL_FILE_SIZE)
.setStatistics(stats)
.setCreateMissingColumnFamilies(true)
.setLogFileTimeToRoll(TIME_TO_ROLL_LOG_FILE)

Loading…
Cancel
Save