verkle implementation

Signed-off-by: Karim Taam <karim.t2am@gmail.com>
pull/6920/head
Karim Taam 8 months ago
parent c7d263afd8
commit 28ca47c4b4
  1. 3
      .circleci/config.yml
  2. 2
      .github/issue_template.md
  3. 19
      .github/pull_request_template.md
  4. 89
      .github/workflows/acceptance-tests.yml
  5. 76
      .github/workflows/artifacts.yml
  6. 27
      .github/workflows/codeql.yml
  7. 80
      .github/workflows/docker.yml
  8. 69
      .github/workflows/integration-tests.yml
  9. 121
      .github/workflows/nightly.yml
  10. 49
      .github/workflows/parallel-unit-tests.yml
  11. 22
      .github/workflows/pr-checklist-on-open.yml
  12. 102
      .github/workflows/pre-review.yml
  13. 154
      .github/workflows/reference-tests.yml
  14. 23
      .github/workflows/release.yml
  15. 14
      .github/workflows/sonarcloud.yml
  16. 108
      CHANGELOG.md
  17. 9
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/AcceptanceTestBase.java
  18. 1
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/BlockUtils.java
  19. 29
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/node/BesuNode.java
  20. 28
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/node/ProcessBesuNodeRunner.java
  21. 140
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/node/ThreadBesuNodeRunner.java
  22. 8
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/node/configuration/BesuNodeConfiguration.java
  23. 10
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/node/configuration/BesuNodeConfigurationBuilder.java
  24. 20
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/node/configuration/BesuNodeFactory.java
  25. 8
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/node/configuration/NodeConfigurationFactory.java
  26. 8
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/privacy/PrivacyNode.java
  27. 2
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/pubsub/WebSocket.java
  28. 3
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/transaction/DeploySmartContractTransaction.java
  29. 9
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/transaction/NodeRequests.java
  30. 46
      acceptance-tests/tests/build.gradle
  31. 2
      acceptance-tests/tests/contracts/SimpleStorage.sol
  32. 42
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/bft/BftMiningAcceptanceTest.java
  33. 180
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/clique/CliqueMiningAcceptanceTest.java
  34. 12
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/jsonrpc/EthSendRawTransactionAcceptanceTest.java
  35. 48
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/jsonrpc/ExecutionEngineEip6110AcceptanceTest.java
  36. 14
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/permissioning/NodeSmartContractPermissioningV2AcceptanceTest.java
  37. 27
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/plugins/PermissioningPluginTest.java
  38. 9
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PrivacyClusterAcceptanceTest.java
  39. 10
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/pubsub/NewPendingTransactionAcceptanceTest.java
  40. 77
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/genesis.json
  41. 34
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/01_cancun_prepare_payload.json
  42. 44
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/02_cancun_getPayloadV3.json
  43. 40
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/03_cancun_newPayloadV3.json
  44. 28
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/04_cancun_forkchoiceUpdatedV3.json
  45. 34
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/05_eip6110_forkchoiceUpdatedV3.json
  46. 45
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/06_eip6110_getPayloadV6110.json
  47. 14
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/07_eip6110_send_raw_transaction.json
  48. 41
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/08_eip6110_invalid_null_deposits_execute_payload.json
  49. 45
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/09_eip6110_newPayloadV6110.json
  50. 34
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/10_eip6110_forkchoiceUpdatedV3.json
  51. 45
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/11_eip6110_getPayloadV6110.json
  52. 2
      besu/build.gradle
  53. 7
      besu/src/main/java/org/hyperledger/besu/RunnerBuilder.java
  54. 385
      besu/src/main/java/org/hyperledger/besu/cli/BesuCommand.java
  55. 16
      besu/src/main/java/org/hyperledger/besu/cli/ConfigurationOverviewBuilder.java
  56. 2
      besu/src/main/java/org/hyperledger/besu/cli/DefaultCommandValues.java
  57. 16
      besu/src/main/java/org/hyperledger/besu/cli/config/NetworkName.java
  58. 19
      besu/src/main/java/org/hyperledger/besu/cli/config/ProfileName.java
  59. 3
      besu/src/main/java/org/hyperledger/besu/cli/converter/MetricCategoryConverter.java
  60. 26
      besu/src/main/java/org/hyperledger/besu/cli/options/MiningOptions.java
  61. 27
      besu/src/main/java/org/hyperledger/besu/cli/options/TransactionPoolOptions.java
  62. 38
      besu/src/main/java/org/hyperledger/besu/cli/options/stable/DataStorageOptions.java
  63. 5
      besu/src/main/java/org/hyperledger/besu/cli/options/stable/LoggingLevelOption.java
  64. 6
      besu/src/main/java/org/hyperledger/besu/cli/options/unstable/NetworkingOptions.java
  65. 61
      besu/src/main/java/org/hyperledger/besu/cli/options/unstable/SynchronizerOptions.java
  66. 13
      besu/src/main/java/org/hyperledger/besu/cli/subcommands/operator/GenerateBlockchainConfig.java
  67. 106
      besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/RocksDbSubCommand.java
  68. 105
      besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/RocksDbUsageHelper.java
  69. 16
      besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/StorageSubCommand.java
  70. 21
      besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogHelper.java
  71. 88
      besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogSubCommand.java
  72. 170
      besu/src/main/java/org/hyperledger/besu/controller/BesuControllerBuilder.java
  73. 29
      besu/src/main/java/org/hyperledger/besu/controller/CliqueBesuControllerBuilder.java
  74. 25
      besu/src/main/java/org/hyperledger/besu/controller/ConsensusScheduleBesuControllerBuilder.java
  75. 19
      besu/src/main/java/org/hyperledger/besu/controller/IbftBesuControllerBuilder.java
  76. 7
      besu/src/main/java/org/hyperledger/besu/controller/MainnetBesuControllerBuilder.java
  77. 7
      besu/src/main/java/org/hyperledger/besu/controller/MergeBesuControllerBuilder.java
  78. 19
      besu/src/main/java/org/hyperledger/besu/controller/QbftBesuControllerBuilder.java
  79. 27
      besu/src/main/java/org/hyperledger/besu/controller/TransitionBesuControllerBuilder.java
  80. 67
      besu/src/main/java/org/hyperledger/besu/services/BesuConfigurationImpl.java
  81. 51
      besu/src/main/java/org/hyperledger/besu/services/BlockchainServiceImpl.java
  82. 35
      besu/src/main/java/org/hyperledger/besu/services/PluginTransactionValidatorServiceImpl.java
  83. 9
      besu/src/main/java/org/hyperledger/besu/services/RpcEndpointServiceImpl.java
  84. 13
      besu/src/main/java/org/hyperledger/besu/services/TransactionSelectionServiceImpl.java
  85. 6
      besu/src/main/resources/log4j2.xml
  86. 6
      besu/src/main/resources/org/hyperledger/besu/cli/launcher.json
  87. 15
      besu/src/test/java/org/hyperledger/besu/ForkIdsNetworkConfigTest.java
  88. 19
      besu/src/test/java/org/hyperledger/besu/PrivacyTest.java
  89. 32
      besu/src/test/java/org/hyperledger/besu/RunnerTest.java
  90. 3
      besu/src/test/java/org/hyperledger/besu/chainimport/JsonBlockImporterTest.java
  91. 1547
      besu/src/test/java/org/hyperledger/besu/cli/BesuCommandTest.java
  92. 32
      besu/src/test/java/org/hyperledger/besu/cli/CascadingDefaultProviderTest.java
  93. 39
      besu/src/test/java/org/hyperledger/besu/cli/CommandTestAbstract.java
  94. 11
      besu/src/test/java/org/hyperledger/besu/cli/options/AbstractCLIOptionsTest.java
  95. 23
      besu/src/test/java/org/hyperledger/besu/cli/options/MiningOptionsTest.java
  96. 1
      besu/src/test/java/org/hyperledger/besu/cli/options/NetworkingOptionsTest.java
  97. 30
      besu/src/test/java/org/hyperledger/besu/cli/options/TransactionPoolOptionsTest.java
  98. 42
      besu/src/test/java/org/hyperledger/besu/cli/options/stable/DataStorageOptionsTest.java
  99. 25
      besu/src/test/java/org/hyperledger/besu/cli/subcommands/storage/StorageSubCommandTest.java
  100. 10
      besu/src/test/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogHelperTest.java
  101. Some files were not shown because too many files have changed in this diff Show More

@ -500,6 +500,3 @@ workflows:
jobs: jobs:
- assemble - assemble
- dockerScan - dockerScan
- acceptanceTestsPrivacy:
requires:
- assemble

@ -3,7 +3,7 @@
<!-- comply with it, including treating everyone with respect: --> <!-- comply with it, including treating everyone with respect: -->
<!-- https://github.com/hyperledger/besu/blob/main/CODE_OF_CONDUCT.md --> <!-- https://github.com/hyperledger/besu/blob/main/CODE_OF_CONDUCT.md -->
<!-- * Reproduced the issue in the latest version of the software --> <!-- * Reproduced the issue in the latest version of the software -->
<!-- * Read the debugging docs: https://besu.hyperledger.org/en/stable/HowTo/Monitor/Logging/ --> <!-- * Read the debugging docs: https://besu.hyperledger.org/private-networks/how-to -->
<!-- * Duplicate Issue check: https://github.com/search?q=+is%3Aissue+repo%3Ahyperledger/Besu --> <!-- * Duplicate Issue check: https://github.com/search?q=+is%3Aissue+repo%3Ahyperledger/Besu -->
<!-- Note: Not all sections will apply to all issue types. --> <!-- Note: Not all sections will apply to all issue types. -->

@ -1,8 +1,21 @@
<!-- Thanks for sending a pull request! Please check out our contribution guidelines: -->
<!-- https://github.com/hyperledger/besu/blob/main/CONTRIBUTING.md -->
## PR description ## PR description
## Fixed Issue(s) ## Fixed Issue(s)
<!-- Please link to fixed issue(s) here using format: fixes #<issue number> --> <!-- Please link to fixed issue(s) here using format: fixes #<issue number> -->
<!-- Example: "fixes #2" --> <!-- Example: "fixes #2" -->
### Thanks for sending a pull request! Have you done the following?
- [ ] Checked out our [contribution guidelines](https://github.com/hyperledger/besu/blob/main/CONTRIBUTING.md)?
- [ ] Considered documentation and added the `doc-change-required` label to this PR [if updates are required](https://wiki.hyperledger.org/display/BESU/Documentation).
- [ ] Considered the changelog and included an [update if required](https://wiki.hyperledger.org/display/BESU/Changelog).
- [ ] For database changes (e.g. KeyValueSegmentIdentifier) considered compatibility and performed forwards and backwards compatibility tests
### Locally, you can run these tests to catch failures early:
- [ ] unit tests: `./gradlew build`
- [ ] acceptance tests: `./gradlew acceptanceTest`
- [ ] integration tests: `./gradlew integrationTest`
- [ ] reference tests: `./gradlew ethereum:referenceTests:referenceTests`

@ -1,72 +1,42 @@
name: acceptance-tests name: acceptance-tests
on: on:
workflow_dispatch:
pull_request: pull_request:
pull_request_review: branches:
types: [submitted] - main
- release-*
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env: env:
GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false" GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false"
total-runners: 16 total-runners: 16
jobs: jobs:
shouldRun:
name: checks to ensure we should run
# necessary because there is no single PR approved event, need to check all comments/approvals/denials
runs-on: ubuntu-22.04
outputs:
shouldRun: ${{steps.shouldRun.outputs.result}}
steps:
- name: required check
id: shouldRun
uses: actions/github-script@v7.0.1
env:
# fun fact, this changes based on incoming event, it will be different when we run this on pushes to main
RELEVANT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
with:
script: |
const { RELEVANT_SHA } = process.env;
const { data: { statuses } } = await github.rest.repos.getCombinedStatusForRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: RELEVANT_SHA,
});
const acceptanceTested = statuses && statuses.filter(({ context }) => context === 'acceptance-tests');
const alreadyRun = acceptanceTested && acceptanceTested.find(({ state }) => state === 'success') > 0;
const { data: reviews } = await github.rest.pulls.listReviews({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.issue.number,
});
const approvingReviews = reviews && reviews.filter(review => review.state === 'APPROVED');
const shouldRun = !alreadyRun && github.actor != 'dependabot[bot]' && (approvingReviews.length > 0);
console.log("tests should be run = %j", shouldRun);
console.log("alreadyRun = %j", alreadyRun);
console.log("approvingReviews = %j", approvingReviews.length);
return shouldRun;
acceptanceTestEthereum: acceptanceTestEthereum:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
name: "Acceptance Runner" name: "Acceptance Runner"
needs: shouldRun
permissions: permissions:
statuses: write statuses: write
checks: write checks: write
if: ${{ needs.shouldRun.outputs.shouldRun == 'true'}}
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
runner_index: [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15] runner_index: [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15]
steps: steps:
- name: Checkout Repo - name: Checkout Repo
uses: actions/checkout@v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
ref: ${{ github.event.pull_request.head.sha || github.ref }}
- name: Set up Java - name: Set up Java
uses: actions/setup-java@v4.0.0 uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: temurin distribution: temurin
java-version: 17 java-version: 17
- name: get acceptance test report - name: get acceptance test report
uses: dawidd6/action-download-artifact@v2 uses: dawidd6/action-download-artifact@e7466d1a7587ed14867642c2ca74b5bcc1e19a2d
with: with:
branch: main branch: main
name_is_regexp: true name_is_regexp: true
@ -74,10 +44,12 @@ jobs:
path: tmp/junit-xml-reports-downloaded path: tmp/junit-xml-reports-downloaded
if_no_artifact_found: true if_no_artifact_found: true
- name: setup gradle - name: setup gradle
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
with:
cache-disabled: true
- name: Split tests - name: Split tests
id: split-tests id: split-tests
uses: r7kamura/split-tests-by-timings@v0 uses: r7kamura/split-tests-by-timings@9322bd292d9423e2bc5a65bec548901801341e3f
with: with:
reports: tmp/junit-xml-reports-downloaded reports: tmp/junit-xml-reports-downloaded
glob: 'acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/**/*Test.java' glob: 'acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/**/*Test.java'
@ -90,25 +62,30 @@ jobs:
#then drop file extension, then insert --tests option between each. #then drop file extension, then insert --tests option between each.
run: cat testList.txt | sed -e 's@acceptance-tests/tests/src/test/java/@--tests\ @g;s@/@.@g;s/\.java//g' > gradleArgs.txt run: cat testList.txt | sed -e 's@acceptance-tests/tests/src/test/java/@--tests\ @g;s@/@.@g;s/\.java//g' > gradleArgs.txt
- name: run acceptance tests - name: run acceptance tests
run: ./gradlew acceptanceTest `cat gradleArgs.txt` -Dorg.gradle.parallel=true -Dorg.gradle.caching=true run: ./gradlew acceptanceTestNotPrivacy `cat gradleArgs.txt` -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: cleanup tempfiles - name: cleanup tempfiles
run: rm testList.txt gradleArgs.txt run: rm testList.txt gradleArgs.txt
- name: Upload Acceptance Test Results - name: Upload Acceptance Test Results
uses: actions/upload-artifact@v3.1.0 uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3
with: with:
name: acceptance-node-${{matrix.runner_index}}-test-results name: acceptance-node-${{matrix.runner_index}}-test-results
path: 'acceptance-tests/tests/build/test-results/acceptanceTest/TEST-*.xml' path: 'acceptance-tests/tests/build/test-results/**/TEST-*.xml'
- name: Publish Test Report accepttests-passed:
uses: mikepenz/action-junit-report@v4 name: "accepttests-passed"
if: (success() || failure()) # always run even if the build step fails
with:
report_paths: 'acceptance-tests/tests/build/test-results/acceptanceTest/TEST-*.xml'
acceptance-tests:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
needs: [ acceptanceTestEthereum ] needs: [ acceptanceTestEthereum ]
permissions: permissions:
checks: write checks: write
statuses: write statuses: write
if: always()
steps: steps:
- name: consolidation # Fail if any `needs` job was not a success.
run: echo "consolidating statuses" # Along with `if: always()`, this allows this job to act as a single required status check for the entire workflow.
- name: Fail on workflow error
run: exit 1
if: >-
${{
contains(needs.*.result, 'failure')
|| contains(needs.*.result, 'cancelled')
|| contains(needs.*.result, 'skipped')
}}

@ -1,10 +1,12 @@
name: artifacts name: release artifacts
on: on:
release: release:
types: types:
- prereleased - prereleased
env:
GRADLE_OPTS: "-Dorg.gradle.parallel=true -Dorg.gradle.caching=true"
jobs: jobs:
artifacts: artifacts:
@ -13,17 +15,19 @@ jobs:
contents: write contents: write
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- name: Set up JDK 17 - name: Set up JDK 17
uses: actions/setup-java@v4.0.0 uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: 'temurin' distribution: 'temurin'
java-version: '17' java-version: '17'
- name: setup gradle - name: setup gradle
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
- name: assemble distributions with:
cache-disabled: true
- name: assemble release
run: run:
./gradlew -Prelease.releaseVersion=${{github.ref_name}} assemble -Dorg.gradle.parallel=true -Dorg.gradle.caching=true ./gradlew -Prelease.releaseVersion=${{github.event.release.name}} -Pversion=${{github.event.release.name}} assemble
- name: hashes - name: hashes
id: hashes id: hashes
run: | run: |
@ -31,46 +35,68 @@ jobs:
echo "zipSha=$(shasum -a 256 besu*.zip)" >> $GITHUB_OUTPUT echo "zipSha=$(shasum -a 256 besu*.zip)" >> $GITHUB_OUTPUT
echo "tarSha=$(shasum -a 256 besu*.tar.gz)" >> $GITHUB_OUTPUT echo "tarSha=$(shasum -a 256 besu*.tar.gz)" >> $GITHUB_OUTPUT
- name: upload tarball - name: upload tarball
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3
with: with:
path: 'build/distributions/besu*.tar.gz' path: 'build/distributions/besu*.tar.gz'
name: besu-${{ github.ref_name }}.tar.gz name: besu-${{ github.event.release.name }}.tar.gz
compression-level: 0
- name: upload zipfile - name: upload zipfile
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3
with: with:
path: 'build/distributions/besu*.zip' path: 'build/distributions/besu*.zip'
name: besu-${{ github.ref_name }}.zip name: besu-${{ github.event.release.name }}.zip
- name: Upload Release assets compression-level: 0
uses: softprops/action-gh-release@v1
with:
append_body: true
files: |
build/distributions/besu*.tar.gz
build/distributions/besu*.zip
body: |
${{steps.hashes.outputs.tarSha}}
${{steps.hashes.outputs.zipSha}}
testWindows: testWindows:
runs-on: windows-2022 runs-on: windows-2022
needs: artifacts needs: artifacts
timeout-minutes: 10 timeout-minutes: 10
if: ${{ github.actor != 'dependabot[bot]' }}
steps: steps:
- name: Set up Java - name: Set up Java
uses: actions/setup-java@v4.0.0 uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: adopt distribution: adopt
java-version: 17 java-version: 17
- name: Download zip - name: Download zip
uses: actions/download-artifact@v3 uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe
with: with:
name: besu-${{ github.ref_name }}.zip pattern: besu-*.zip
merge-multiple: true
- name: test Besu - name: test Besu
run: | run: |
dir
unzip besu-*.zip -d besu-tmp unzip besu-*.zip -d besu-tmp
cd besu-tmp cd besu-tmp
mv besu-* ../besu mv besu-* ../besu
cd .. cd ..
besu\bin\besu.bat --help besu\bin\besu.bat --help
besu\bin\besu.bat --version besu\bin\besu.bat --version
publish:
runs-on: ubuntu-22.04
needs: testWindows
steps:
- name: Download archives
uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe
with:
pattern: besu-*
merge-multiple: true
path: 'build/distributions'
- name: Upload Release assets
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844
with:
append_body: true
files: |
build/distributions/besu*.tar.gz
build/distributions/besu*.zip
body: |
${{steps.hashes.outputs.tarSha}}
${{steps.hashes.outputs.zipSha}}
arifactoryPublish:
runs-on: ubuntu-22.04
needs: artifacts
steps:
- name: Artifactory Publish
env:
ARTIFACTORY_USER: ${{ secrets.BESU_ARTIFACTORY_USER }}
ARTIFACTORY_KEY: ${{ secrets.BESU_ARTIFACTORY_TOKEN }}
run: ./gradlew -Prelease.releaseVersion=${{ github.event.release.name }} -Pversion=${{github.event.release.name}} artifactoryPublish

@ -12,15 +12,12 @@
name: "CodeQL" name: "CodeQL"
on: on:
push: workflow_dispatch:
branches: [ main ] schedule:
pull_request: # * is a special character in YAML so you have to quote this string
branches: [ main ] # expression evaluates to midnight every night
paths-ignore: - cron: '0 0 * * *'
- '**/*.json'
- '**/*.md'
- '**/*.properties'
- '**/*.txt'
jobs: jobs:
analyze: analyze:
name: Analyze name: Analyze
@ -31,15 +28,15 @@ jobs:
security-events: write security-events: write
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- name: Set up Java - name: Set up Java
uses: actions/setup-java@v4.0.0 uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: 'temurin' distribution: 'temurin'
java-version: 17 java-version: 17
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@v2 uses: github/codeql-action/init@2f93e4319b2f04a2efc38fa7f78bd681bc3f7b2f
with: with:
languages: ${{ matrix.language }} languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file. # If you wish to specify custom queries, you can do so here or in a config file.
@ -48,9 +45,11 @@ jobs:
queries: security-and-quality,security-extended queries: security-and-quality,security-extended
- name: setup gradle - name: setup gradle
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
with:
cache-disabled: true
- name: compileJava noscan - name: compileJava noscan
run: | run: |
JAVA_OPTS="-Xmx2048M" ./gradlew --no-scan compileJava JAVA_OPTS="-Xmx2048M" ./gradlew --no-scan compileJava
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2 uses: github/codeql-action/analyze@2f93e4319b2f04a2efc38fa7f78bd681bc3f7b2f

@ -4,31 +4,25 @@ on:
types: types:
- prereleased - prereleased
env: env:
registry: ghcr.io registry: docker.io
jobs: jobs:
hadolint: hadolint:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
steps: steps:
- name: Checkout Repo - name: Checkout Repo
uses: actions/checkout@v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- name: Set up Java - name: Set up Java
uses: actions/setup-java@v4.0.0 uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: temurin distribution: temurin
java-version: 17 java-version: 17
- name: setup gradle - name: setup gradle
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
- name: hadoLint_openj9-jdk_17 with:
run: docker run --rm -i hadolint/hadolint < docker/openj9-jdk-17/Dockerfile cache-disabled: true
- name: hadoLint_openjdk_17 - name: hadoLint
run: docker run --rm -i hadolint/hadolint < docker/openjdk-17/Dockerfile run: docker run --rm -i hadolint/hadolint < docker/Dockerfile
- name: hadoLint_openjdk_17_debug
run: docker run --rm -i hadolint/hadolint < docker/openjdk-17-debug/Dockerfile
- name: hadoLint_openjdk_latest
run: docker run --rm -i hadolint/hadolint < docker/openjdk-latest/Dockerfile
- name: hadoLint_graalvm
run: docker run --rm -i hadolint/hadolint < docker/graalvm/Dockerfile
buildDocker: buildDocker:
needs: hadolint needs: hadolint
permissions: permissions:
@ -55,37 +49,40 @@ jobs:
echo "ARCH=arm64" >> $GITHUB_OUTPUT echo "ARCH=arm64" >> $GITHUB_OUTPUT
fi fi
- name: Checkout Repo - name: Checkout Repo
uses: actions/checkout@v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- name: short sha - name: short sha
id: shortSha id: shortSha
run: echo "sha=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT run: echo "sha=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Set up Java - name: Set up Java
uses: actions/setup-java@v4.0.0 uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: temurin distribution: temurin
java-version: 17 java-version: 17
- name: setup gradle - name: setup gradle
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
with:
cache-disabled: true
- name: install goss - name: install goss
run: | run: |
mkdir -p docker/reports mkdir -p docker/reports
curl -L https://github.com/aelsabbahy/goss/releases/download/v0.4.4/goss-${{ steps.prep.outputs.PLATFORM_PAIR }} -o ./docker/tests/goss-${{ steps.prep.outputs.PLATFORM_PAIR }} curl -L https://github.com/aelsabbahy/goss/releases/download/v0.4.4/goss-${{ steps.prep.outputs.PLATFORM_PAIR }} -o ./docker/tests/goss-${{ steps.prep.outputs.PLATFORM_PAIR }}
- name: login to ${{ env.registry }}
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
with:
registry: ${{ env.registry }}
username: ${{ secrets.DOCKER_USER_RW }}
password: ${{ secrets.DOCKER_PASSWORD_RW }}
- name: build and test docker - name: build and test docker
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
env: env:
architecture: ${{ steps.prep.outputs.ARCH }} architecture: ${{ steps.prep.outputs.ARCH }}
with: with:
arguments: testDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Prelease.releaseVersion=${{ github.ref_name }} cache-disabled: true
- name: login to ghcr arguments: testDocker -PdockerOrgName=${{ env.registry }}/${{ secrets.DOCKER_ORG }} -Pversion=${{github.event.release.name}} -Prelease.releaseVersion=${{ github.event.release.name }}
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.registry }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: publish - name: publish
env: env:
architecture: ${{ steps.prep.outputs.ARCH }} architecture: ${{ steps.prep.outputs.ARCH }}
run: ./gradlew --no-daemon dockerUpload -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Prelease.releaseVersion=${{ github.ref_name }} run: ./gradlew --no-daemon dockerUpload -PdockerOrgName=${{ env.registry }}/${{ secrets.DOCKER_ORG }} -Pversion=${{github.event.release.name}} -Prelease.releaseVersion=${{ github.event.release.name }}
multiArch: multiArch:
needs: buildDocker needs: buildDocker
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
@ -94,20 +91,33 @@ jobs:
packages: write packages: write
steps: steps:
- name: Checkout Repo - name: Checkout Repo
uses: actions/checkout@v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- name: Set up Java - name: Set up Java
uses: actions/setup-java@v4.0.0 uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: temurin distribution: temurin
java-version: 17 java-version: 17
- name: setup gradle - name: setup gradle
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
- name: login to ghcr with:
uses: docker/login-action@v3.0.0 cache-disabled: true
- name: login to ${{ env.registry }}
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
with: with:
registry: ${{ env.registry }} registry: ${{ env.registry }}
username: ${{ github.actor }} username: ${{ secrets.DOCKER_USER_RW }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.DOCKER_PASSWORD_RW }}
- name: multi-arch docker - name: multi-arch docker
run: ./gradlew manifestDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Prelease.releaseVersion=${{ github.ref_name }} run: ./gradlew manifestDocker -PdockerOrgName=${{ env.registry }}/${{ secrets.DOCKER_ORG }} -Pversion=${{github.event.release.name}} -Prelease.releaseVersion=${{ github.event.release.name }}
amendNotes:
needs: multiArch
runs-on: ubuntu-22.04
permissions:
contents: write
steps:
- name: add pull command to release notes
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844
with:
append_body: true
body: |
`docker pull ${{env.registry}}/${{secrets.DOCKER_ORG}}/besu:${{github.event.release.name}}`

@ -1,73 +1,40 @@
name: integration-tests name: integration-tests
on: on:
workflow_dispatch:
pull_request: pull_request:
pull_request_review: branches:
types: - main
- submitted - release-*
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env: env:
GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false" GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false -Dorg.gradle.parallel=true -Dorg.gradle.caching=true"
jobs: jobs:
shouldRun:
name: checks to ensure we should run
runs-on: ubuntu-22.04
outputs:
shouldRun: ${{steps.shouldRun.outputs.result}}
steps:
- name: required check
id: shouldRun
uses: actions/github-script@v7.0.1
env:
# fun fact, this changes based on incoming event, it will be different when we run this on pushes to main
RELEVANT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
with:
script: |
const { RELEVANT_SHA } = process.env;
const { data: { statuses } } = await github.rest.repos.getCombinedStatusForRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: RELEVANT_SHA,
});
const intTested = statuses && statuses.filter(({ context }) => context === 'integration-tests');
const alreadyRun = intTested && intTested.find(({ state }) => state === 'success') > 0;
const { data: reviews } = await github.rest.pulls.listReviews({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.issue.number,
});
const approvingReviews = reviews && reviews.filter(review => review.state === 'APPROVED');
const shouldRun = !alreadyRun && github.actor != 'dependabot[bot]' && (approvingReviews.length > 0);
console.log("tests should be run = %j", shouldRun);
console.log("alreadyRun = %j", alreadyRun);
console.log("approvingReviews = %j", approvingReviews.length);
return shouldRun;
integration-tests: integration-tests:
name: "integration-passed"
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
needs: shouldRun
if: ${{ needs.shouldRun.outputs.shouldRun == 'true' }}
permissions: permissions:
statuses: write statuses: write
checks: write checks: write
steps: steps:
- name: Checkout Repo - name: Checkout Repo
uses: actions/checkout@v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
ref: ${{ github.event.pull_request.head.sha || github.ref }}
- name: Set up Java - name: Set up Java
uses: actions/setup-java@v4.0.0 uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: temurin distribution: temurin
java-version: 17 java-version: 17
- name: setup gradle - name: setup gradle
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
- name: run integration tests
run: ./gradlew integrationTest compileJmh -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: Publish Test Report
uses: mikepenz/action-junit-report@v4
if: (success() || failure())
with: with:
report_paths: '**/build/test-results/integrationTest/TEST-*.xml' cache-disabled: true
- name: run integration tests
run: ./gradlew integrationTest compileJmh

@ -1,121 +0,0 @@
name: nightly
on:
workflow_dispatch:
schedule:
# * is a special character in YAML so you have to quote this string
# expression evaluates to midnight every night
- cron: '0 0 * * *'
env:
nightly-tag: develop
registry: ghcr.io
jobs:
hadolint:
runs-on: ubuntu-22.04
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: hadoLint_openj9-jdk_17
run: docker run --rm -i hadolint/hadolint < docker/openj9-jdk-17/Dockerfile
- name: hadoLint_openjdk_17
run: docker run --rm -i hadolint/hadolint < docker/openjdk-17/Dockerfile
- name: hadoLint_openjdk_17_debug
run: docker run --rm -i hadolint/hadolint < docker/openjdk-17-debug/Dockerfile
- name: hadoLint_openjdk_latest
run: docker run --rm -i hadolint/hadolint < docker/openjdk-latest/Dockerfile
- name: hadoLint_graalvm
run: docker run --rm -i hadolint/hadolint < docker/graalvm/Dockerfile
buildDocker:
needs: hadolint
permissions:
contents: read
packages: write
strategy:
fail-fast: false
matrix:
platform:
- ubuntu-22.04
- [self-hosted, ARM64]
runs-on: ${{ matrix.platform }}
steps:
- name: Prepare
id: prep
run: |
platform=${{ matrix.platform }}
if [ "$platform" = 'ubuntu-22.04' ]; then
echo "PLATFORM_PAIR=linux-amd64" >> $GITHUB_OUTPUT
echo "ARCH=amd64" >> $GITHUB_OUTPUT
else
echo "PLATFORM_PAIR=linux-arm64" >> $GITHUB_OUTPUT
echo "ARCH=arm64" >> $GITHUB_OUTPUT
fi
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: short sha
id: shortSha
run: echo "sha=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: build image
uses: gradle/gradle-build-action@v2.12.0
with:
arguments: distDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main
- name: install goss
run: |
mkdir -p docker/reports
curl -L https://github.com/aelsabbahy/goss/releases/download/v0.4.4/goss-${{ steps.prep.outputs.PLATFORM_PAIR }} -o ./docker/tests/goss-${{ steps.prep.outputs.PLATFORM_PAIR }}
- name: test docker
uses: gradle/gradle-build-action@v2.12.0
env:
architecture: ${{ steps.prep.outputs.ARCH }}
with:
arguments: testDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main
- name: login to ghcr
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.registry }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: publish
env:
architecture: ${{ steps.prep.outputs.ARCH }}
run: ./gradlew --no-daemon dockerUpload -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main
multiArch:
permissions:
contents: read
packages: write
needs: buildDocker
runs-on: ubuntu-22.04
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: Login to DockerHub
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.registry }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: multi-arch docker
run: ./gradlew manifestDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main

@ -1,49 +0,0 @@
name: parallel-unit-tests
#experimental work in progress - trying to figure out how to split tests across multi-modules by runtime
on:
workflow_dispatch:
env:
GRADLE_OPTS: "-Dorg.gradle.daemon=false"
total-runners: 4
jobs:
junit:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
runner_index:
- 0
- 1
- 2
- 3
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Split tests
id: split-tests
uses: chaosaffe/split-tests@v1-alpha.1
with:
glob: '**/src/test/java/**/*.java'
split-total: ${{ env.total-runners }}
split-index: ${{ matrix.runner_index }}
line-count: true
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: adopt
java-version: 17
cache: gradle
- name: write out test list
run: echo "${{ steps.split-tests.outputs.test-suite }}" >> testList.txt
- name: debug testfile paths
run: cat testList.txt
- name: format gradle args
# regex means: truncate file paths to align with package name, replacing with tests switch, then drop file extension,
# then swap path delimiter with package delimiter
run: cat testList.txt | sed -e 's/[^ ]*src\/test\/java\//--tests\ /g' -e 's/\.java//g' -e 's/\//\./g' >> gradleArgs.txt
- name: debug test class list
run: cat gradleArgs.txt
- name: run unit tests
run: ./gradlew test `cat gradleArgs.txt`

@ -1,22 +0,0 @@
name: "comment on pr with checklist"
on:
pull_request_target:
types: [ opened ]
branches: [ main ]
jobs:
checklist:
name: "add checklist as a comment on newly opened PRs"
runs-on: ubuntu-22.04
permissions:
pull-requests: write
steps:
- uses: actions/github-script@v7.0.1
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: '- [ ] I thought about documentation and added the `doc-change-required` label to this PR if [updates are required](https://wiki.hyperledger.org/display/BESU/Documentation).\n- [ ] I thought about the changelog and included a [changelog update if required](https://wiki.hyperledger.org/display/BESU/Changelog).\n- [ ] If my PR includes database changes (e.g. KeyValueSegmentIdentifier) I have thought about compatibility and performed forwards and backwards compatibility tests\n- [ ] I thought about running CI.\n- [ ] If I did not run CI, I ran as much locally as possible before pushing.\n-'
})

@ -2,11 +2,16 @@ name: pre-review
on: on:
pull_request: pull_request:
workflow_dispatch: branches:
- main
- release-*
permissions: concurrency:
statuses: write group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
checks: write cancel-in-progress: true
env:
GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false -Dorg.gradle.parallel=true"
jobs: jobs:
repolint: repolint:
@ -15,46 +20,57 @@ jobs:
container: ghcr.io/todogroup/repolinter:v0.11.2 container: ghcr.io/todogroup/repolinter:v0.11.2
steps: steps:
- name: Checkout Code - name: Checkout Code
uses: actions/checkout@v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
ref: ${{ github.event.pull_request.head.sha || github.ref }}
- name: Lint Repo - name: Lint Repo
run: bundle exec /app/bin/repolinter.js --rulesetUrl https://raw.githubusercontent.com/hyperledger-labs/hyperledger-community-management-tools/main/repo_structure/repolint.json --format markdown run: bundle exec /app/bin/repolinter.js --rulesetUrl https://raw.githubusercontent.com/hyperledger-labs/hyperledger-community-management-tools/main/repo_structure/repolint.json --format markdown
gradle-wrapper: gradle-wrapper:
name: "Gradle Wrapper Validation" name: "Gradle Wrapper Validation"
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
steps: steps:
- uses: actions/checkout@v4.1.1 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: gradle/wrapper-validation-action@v1.1.0 with:
ref: ${{ github.event.pull_request.head.sha || github.ref }}
- uses: gradle/wrapper-validation-action@56b90f209b02bf6d1deae490e9ef18b21a389cd4
spotless: spotless:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
if: ${{ github.actor != 'dependabot[bot]' }}
steps: steps:
- name: Checkout Repo - name: Checkout Repo
uses: actions/checkout@v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
ref: ${{ github.event.pull_request.head.sha || github.ref }}
- name: Set up Java - name: Set up Java
uses: actions/setup-java@v4.0.0 uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: temurin distribution: temurin
java-version: 17 java-version: 17
- name: Setup Gradle - name: Setup Gradle
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
with:
cache-disabled: true
- name: run spotless - name: run spotless
run: ./gradlew spotlessCheck -Dorg.gradle.parallel=true -Dorg.gradle.caching=true run: ./gradlew spotlessCheck
compile: compile:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
timeout-minutes: 30 timeout-minutes: 30
needs: [spotless, gradle-wrapper, repolint] needs: [spotless, gradle-wrapper, repolint]
steps: steps:
- name: Checkout Repo - name: Checkout Repo
uses: actions/checkout@v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
ref: ${{ github.event.pull_request.head.sha || github.ref }}
- name: Set up Java - name: Set up Java
uses: actions/setup-java@v4.0.0 uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: temurin distribution: temurin
java-version: 17 java-version: 17
- name: Setup Gradle - name: Setup Gradle
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
with:
cache-disabled: true
- name: Gradle Compile - name: Gradle Compile
run: ./gradlew build -x test -x spotlessCheck -Dorg.gradle.parallel=true -Dorg.gradle.caching=true run: ./gradlew build -x test -x spotlessCheck
unitTests: unitTests:
env: env:
GRADLEW_UNIT_TEST_ARGS: ${{matrix.gradle_args}} GRADLEW_UNIT_TEST_ARGS: ${{matrix.gradle_args}}
@ -72,32 +88,62 @@ jobs:
- "ethereum:api:testBonsai" - "ethereum:api:testBonsai"
- "ethereum:api:testForest" - "ethereum:api:testForest"
- "ethereum:api:testRemainder" - "ethereum:api:testRemainder"
- "ethereum:eth:test"
- "ethereum:core:test" - "ethereum:core:test"
#includes will need exact strings from gradle args above
include:
- gradle_args: "test -x besu:test -x consensus:test -x crypto:test -x ethereum:eth:test -x ethereum:api:test -x ethereum:core:test"
filename: "everythingElse"
- gradle_args: "besu:test consensus:test crypto:test"
filename: "consensusCrypto"
- gradle_args: "ethereum:api:testBonsai"
filename: "apiBonsai"
- gradle_args: "ethereum:api:testRemainder"
filename: "apiForest"
- gradle_args: "ethereum:api:testRemainder"
filename: "apiRemainder"
- gradle_args: "ethereum:eth:test"
filename: "eth"
- gradle_args: "ethereum:core:test"
filename: "core"
steps: steps:
- name: Checkout Repo - name: Checkout Repo
uses: actions/checkout@v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
ref: ${{ github.event.pull_request.head.sha || github.ref }}
- name: Set up Java - name: Set up Java
uses: actions/setup-java@v4.0.0 uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: temurin distribution: temurin
java-version: 17 java-version: 17
- name: Setup Gradle - name: Setup Gradle
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
with:
cache-disabled: true
- name: run unit tests - name: run unit tests
id: unitTest id: unitTest
run: ./gradlew $GRADLEW_UNIT_TEST_ARGS -Dorg.gradle.parallel=true -Dorg.gradle.caching=true run: ./gradlew $GRADLEW_UNIT_TEST_ARGS
- name: Publish Test Report - name: Upload Unit Test Results
uses: mikepenz/action-junit-report@v4 uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3
if: success() || failure() # always run even if the build step fails
with: with:
report_paths: '**/test-results/**/TEST-*.xml' name: unit-${{matrix.filename}}-test-results
annotate_only: true path: '**/test-results/**/TEST-*.xml'
pre-review: unittests-passed:
name: "unittests-passed"
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
needs: [unitTests] needs: [unitTests]
permissions: permissions:
checks: write checks: write
statuses: write statuses: write
if: always()
steps: steps:
- name: consolidation # Fail if any `needs` job was not a success.
run: echo "consolidating statuses" # Along with `if: always()`, this allows this job to act as a single required status check for the entire workflow.
- name: Fail on workflow error
run: exit 1
if: >-
${{
contains(needs.*.result, 'failure')
|| contains(needs.*.result, 'cancelled')
|| contains(needs.*.result, 'skipped')
}}

@ -1,147 +1,83 @@
name: reference-tests name: reference-tests
on: on:
workflow_dispatch:
pull_request: pull_request:
pull_request_review: branches:
types: - main
- submitted - release-*
env: env:
GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false" GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false -Dorg.gradle.parallel=true -Dorg.gradle.caching=true"
total-runners: 6 total-runners: 10
jobs: concurrency:
shouldRun: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
name: checks to ensure we should run cancel-in-progress: true
# necessary because there is no single PR approved event, need to check all comments/approvals/denials
# might also be a job running, and additional approvals
runs-on: ubuntu-22.04
outputs:
shouldRun: ${{steps.shouldRun.outputs.result}}
steps:
- name: required check
id: shouldRun
uses: actions/github-script@v7.0.1
env:
# fun fact, this changes based on incoming event, it will be different when we run this on pushes to main
RELEVANT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
with:
script: |
const { RELEVANT_SHA } = process.env;
const { data: { statuses } } = await github.rest.repos.getCombinedStatusForRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: RELEVANT_SHA,
});
const refTested = statuses && statuses.filter(({ context }) => context === 'reference-tests');
const alreadyRun = refTested && refTested.find(({ state }) => state === 'success') > 0;
const { data: reviews } = await github.rest.pulls.listReviews({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.issue.number,
});
const approvingReviews = reviews && reviews.filter(review => review.state === 'APPROVED');
const shouldRun = !alreadyRun && github.actor != 'dependabot[bot]' && (approvingReviews.length > 0);
console.log("tests should be run = %j", shouldRun);
console.log("alreadyRun = %j", alreadyRun);
console.log("approvingReviews = %j", approvingReviews.length);
return shouldRun;
prepareReferenceTestEthereum:
runs-on: ubuntu-22.04
needs: shouldRun
if: ${{ needs.shouldRun.outputs.shouldRun == 'true' }}
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
with:
submodules: recursive
set-safe-directory: true
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: execute generate reference tests
run: ./gradlew ethereum:referencetests:blockchainReferenceTests ethereum:referencetests:generalstateReferenceTests ethereum:referencetests:generalstateRegressionReferenceTests -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: store generated tests
uses: actions/upload-artifact@v3
with:
name: 'reference-tests'
path: 'ethereum/referencetests/build/generated/sources/reference-test/**/*.java'
jobs:
referenceTestEthereum: referenceTestEthereum:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
permissions: permissions:
statuses: write statuses: write
checks: write checks: write
needs: packages: read
- prepareReferenceTestEthereum
if: ${{ needs.shouldRun.outputs.shouldRun == 'true' }}
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
runner_index: [0,1,2,3,4,5] runner_index: [1,2,3,4,5,6,7,8,9,10]
steps: steps:
- name: Checkout Repo - name: Checkout Repo
uses: actions/checkout@v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with: with:
ref: ${{ github.event.pull_request.head.sha || github.ref }}
submodules: recursive submodules: recursive
- name: Set up Java - name: Set up Java
uses: actions/setup-java@v4.0.0 uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: adopt-openj9 distribution: adopt-openj9
java-version: 17 java-version: 17
- name: retrieve generated tests
uses: actions/download-artifact@v3.0.2
with:
name: 'reference-tests'
path: 'ethereum/referencetests/build/generated/sources/reference-test/'
- name: get reference test report
uses: dawidd6/action-download-artifact@v2
with:
branch: main
name_is_regexp: true
name: 'reference-test-node-\d*\d-results'
path: tmp/ref-xml-reports-downloaded
if_no_artifact_found: true
- name: setup gradle - name: setup gradle
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
- name: Split tests
id: split-tests
uses: r7kamura/split-tests-by-timings@v0
with: with:
reports: tmp/ref-xml-reports-downloaded cache-disabled: true
glob: 'ethereum/referencetests/build/generated/sources/reference-test/**/*.java' - name: execute generate reference tests
total: ${{env.total-runners}} run: ./gradlew ethereum:referencetests:blockchainReferenceTests ethereum:referencetests:generalstateReferenceTests ethereum:referencetests:generalstateRegressionReferenceTests -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
index: ${{ matrix.runner_index }} - name: list test files generated
run: find ethereum/referencetests/build/generated/sources/reference-test -name "*.java" | sort >> filenames.txt
- name: Split tests
run: ./.github/workflows/splitList.sh filenames.txt ${{env.total-runners}}
- name: echo test file count
run: cat group_${{matrix.runner_index}}.txt | wc
- name: convert to test suite classnames
run: cat group_${{matrix.runner_index}}.txt | sed -e 's/^.*java\///' -e 's@/@.@g' -e 's/\.java//' -e 's/^/--tests /' > testClasses.txt
- name: compose gradle args - name: compose gradle args
run: echo ${{ steps.split-tests.outputs.paths }} | sed -e 's/^.*java\///' -e 's@/@.@g' -e 's/\.java//' -e 's/^/--tests /' > refTestArgs.txt run: tr '\n' ' ' < testClasses.txt > refTestArgs.txt
- name: refTestArgs.txt
run: cat refTestArgs.txt
- name: run reference tests - name: run reference tests
run: ./gradlew ethereum:referenceTests:referenceTests `cat refTestArgs.txt` -Dorg.gradle.parallel=true -Dorg.gradle.caching=true run: ./gradlew ethereum:referenceTests:referenceTests `cat refTestArgs.txt`
- name: Upload Test Report - name: Upload Test Report
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3
if: always() # always run even if the previous step fails if: always() # always run even if the previous step fails
with: with:
name: reference-test-node-${{matrix.runner_index}}-results name: reference-test-node-${{matrix.runner_index}}-results
path: '**/build/test-results/referenceTests/TEST-*.xml' path: '**/build/test-results/referenceTests/TEST-*.xml'
- name: Publish Test Report reftests-passed:
uses: mikepenz/action-junit-report@v4 name: "reftests-passed"
if: success() || failure() # always run even if the build step fails
with:
report_paths: '**/build/test-results/referenceTest/TEST-*.xml'
reference-tests:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
needs: [ referenceTestEthereum ] needs: [ referenceTestEthereum ]
permissions: permissions:
checks: write checks: write
statuses: write statuses: write
if: always()
steps: steps:
- name: consolidation # Fail if any `needs` job was not a success.
run: echo "consolidating statuses" # Along with `if: always()`, this allows this job to act as a single required status check for the entire workflow.
- name: Fail on workflow error
run: exit 1
if: >-
${{
contains(needs.*.result, 'failure')
|| contains(needs.*.result, 'cancelled')
|| contains(needs.*.result, 'skipped')
}}

@ -1,23 +1,30 @@
name: release besu name: release besu
on: on:
workflow_dispatch:
release: release:
types: [released] types: [released]
env:
registry: docker.io
jobs: jobs:
dockerPromoteX64: dockerPromoteX64:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
steps: steps:
- uses: actions/checkout@v4.1.1 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: actions/setup-java@v4.0.0 - uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: 'temurin' # See 'Supported distributions' for available options distribution: 'temurin' # See 'Supported distributions' for available options
java-version: '17' java-version: '17'
cache: gradle cache: gradle
- name: Login to DockerHub - name: login to ${{ env.registry }}
run: echo '${{ secrets.DOCKER_PASSWORD_RW }}' | docker login -u '${{ secrets.DOCKER_USER_RW }}' --password-stdin uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
with:
registry: ${{ env.registry }}
username: ${{ secrets.DOCKER_USER_RW }}
password: ${{ secrets.DOCKER_PASSWORD_RW }}
- name: Setup Gradle - name: Setup Gradle
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
with:
cache-disabled: true
- name: Docker upload - name: Docker upload
run: ./gradlew "-Prelease.releaseVersion=${{ github.ref_name }}" "-PdockerOrgName=${{ secrets.DOCKER_ORG }}" dockerUploadRelease run: ./gradlew "-Prelease.releaseVersion=${{ github.event.release.name }}" "-PdockerOrgName=${{ env.registry }}/${{ secrets.DOCKER_ORG }}" dockerUploadRelease
- name: Docker manifest - name: Docker manifest
run: ./gradlew "-Prelease.releaseVersion=${{ github.ref_name }}" "-PdockerOrgName=${{ secrets.DOCKER_ORG }}" manifestDockerRelease run: ./gradlew "-Prelease.releaseVersion=${{ github.event.release.name }}" "-PdockerOrgName=${{ env.registry }}/${{ secrets.DOCKER_ORG }}" manifestDockerRelease

@ -16,24 +16,26 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- name: Set up JDK 17 - name: Set up JDK 17
uses: actions/setup-java@v4.0.0 uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93
with: with:
distribution: 'temurin' distribution: 'temurin'
java-version: '17' java-version: '17'
- name: Cache SonarCloud packages - name: Cache SonarCloud packages
uses: actions/cache@v3 uses: actions/cache@e12d46a63a90f2fae62d114769bbf2a179198b5c
with: with:
path: ~/.sonar/cache path: ~/.sonar/cache
key: ${{ runner.os }}-sonar key: ${{ runner.os }}-sonar
restore-keys: ${{ runner.os }}-sonar restore-keys: ${{ runner.os }}-sonar
- name: setup gradle - name: setup gradle
uses: gradle/gradle-build-action@v2.12.0 uses: gradle/actions/setup-gradle@9e899d11ad247ec76be7a60bc1cf9d3abbb9e7f1
with:
cache-disabled: true
- name: Build and analyze - name: Build and analyze
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
SONAR_ORGANIZATION: ${{ env.SONAR_ORGANIZATION }} SONAR_ORGANIZATION: ${{ vars.SONAR_ORGANIZATION }}
SONAR_PROJECT_KEY: $ {{ env.SONAR_PROJECT_KEY }} SONAR_PROJECT_KEY: ${{ vars.SONAR_PROJECT_KEY }}
run: ./gradlew build sonarqube --continue --info -Dorg.gradle.parallel=true -Dorg.gradle.caching=true run: ./gradlew build sonarqube --continue --info -Dorg.gradle.parallel=true -Dorg.gradle.caching=true

@ -1,16 +1,77 @@
# Changelog # Changelog
## 24.1.2-SNAPSHOT ## Next Release
### Breaking Changes ### Breaking Changes
- Following the OpenMetrics convention, the updated Prometheus client adds the `_total` suffix to every metrics of type counter, with the effect that some existing metrics have been renamed to have this suffix. If you are using the official Besu Grafana dashboard [(available here)](https://grafana.com/grafana/dashboards/16455-besu-full/), just update it to the latest revision, that accepts the old and the new name of the affected metrics. If you have a custom dashboards or use the metrics in other ways, then you need to manually update it to support the new naming. - RocksDB database metadata format has changed to be more expressive, the migration of an existing metadata file to the new format is automatic at startup. Before performing a downgrade to a previous version it is mandatory to revert to the original format using the subcommand `besu --data-path=/path/to/besu/datadir storage revert-metadata v2-to-v1`.
- BFT networks won't start with SNAP or CHECKPOINT sync (previously Besu would start with this config but quietly fail to sync, so it's now more obvious that it won't work) [#6625](https://github.com/hyperledger/besu/pull/6625), [#6667](https://github.com/hyperledger/besu/pull/6667)
- Forest pruning has been removed, it was deprecated since 24.1.0. In case you are still using it you must now remove any of the following options: `pruning-enabled`, `pruning-blocks-retained` and `pruning-block-confirmations`, from your configuration, and you may want to consider switching to Bonsai.
### Upcoming Breaking Changes
- Receipt compaction will be enabled by default in a future version of Besu. After this change it will not be possible to downgrade to the previous Besu version.
### Deprecations
### Additions and Improvements
- Update "host allow list" logic to transition from deprecated `host()` method to suggested `authority()` method.[#6878](https://github.com/hyperledger/besu/issues/6878)
- `txpool_besuPendingTransactions`change parameter `numResults` to optional parameter [#6708](https://github.com/hyperledger/besu/pull/6708)
- Extend `Blockchain` service [#6592](https://github.com/hyperledger/besu/pull/6592)
- Add bft-style `blockperiodseconds` transitions to Clique [#6596](https://github.com/hyperledger/besu/pull/6596)
- Add `createemptyblocks` transitions to Clique [#6608](https://github.com/hyperledger/besu/pull/6608)
- RocksDB database metadata refactoring [#6555](https://github.com/hyperledger/besu/pull/6555)
- Make layered txpool aware of `minGasPrice` and `minPriorityFeePerGas` dynamic options [#6611](https://github.com/hyperledger/besu/pull/6611)
- Update commons-compress to 1.26.0 [#6648](https://github.com/hyperledger/besu/pull/6648)
- Update Vert.x to 4.5.4 [#6666](https://github.com/hyperledger/besu/pull/6666)
- Refactor and extend `TransactionPoolValidatorService` [#6636](https://github.com/hyperledger/besu/pull/6636)
- Introduce `TransactionSimulationService` [#6686](https://github.com/hyperledger/besu/pull/6686)
- Transaction call object to accept both `input` and `data` field simultaneously if they are set to equal values [#6702](https://github.com/hyperledger/besu/pull/6702)
- `eth_call` for blob tx allows for empty `maxFeePerBlobGas` [#6731](https://github.com/hyperledger/besu/pull/6731)
- Extend error handling of plugin RPC methods [#6759](https://github.com/hyperledger/besu/pull/6759)
- Added engine_newPayloadV4 and engine_getPayloadV4 methods [#6783](https://github.com/hyperledger/besu/pull/6783)
- Reduce storage size of receipts [#6602](https://github.com/hyperledger/besu/pull/6602)
- Dedicated log marker for invalid txs removed from the txpool [#6826](https://github.com/hyperledger/besu/pull/6826)
- Prevent startup with BONSAI and privacy enabled [#6809](https://github.com/hyperledger/besu/pull/6809)
- Remove deprecated Forest pruning [#6810](https://github.com/hyperledger/besu/pull/6810)
- Experimental Snap Sync Server [#6640](https://github.com/hyperledger/besu/pull/6640)
- Upgrade Reference Tests to 13.2 [#6854](https://github.com/hyperledger/besu/pull/6854)
- Update Web3j dependencies [#6811](https://github.com/hyperledger/besu/pull/6811)
- Add `tx-pool-blob-price-bump` option to configure the price bump percentage required to replace blob transactions (by default 100%) [#6874](https://github.com/hyperledger/besu/pull/6874)
### Bug fixes
- Fix txpool dump/restore race condition [#6665](https://github.com/hyperledger/besu/pull/6665)
- Make block transaction selection max time aware of PoA transitions [#6676](https://github.com/hyperledger/besu/pull/6676)
- Don't enable the BFT mining coordinator when running sub commands such as `blocks export` [#6675](https://github.com/hyperledger/besu/pull/6675)
- In JSON-RPC return optional `v` fields for type 1 and type 2 transactions [#6762](https://github.com/hyperledger/besu/pull/6762)
- Fix Shanghai/QBFT block import bug when syncing new nodes [#6765](https://github.com/hyperledger/besu/pull/6765)
- Fix to avoid broadcasting full blob txs, instead of only the tx announcement, to a subset of nodes [#6835](https://github.com/hyperledger/besu/pull/6835)
- Snap client fixes discovered during snap server testing [#6847](https://github.com/hyperledger/besu/pull/6847)
- Correctly initialize the txpool as disabled on creation [#6890](https://github.com/hyperledger/besu/pull/6890)
### Download Links
## 24.3.0
### Breaking Changes
- SNAP - Snap sync is now the default for named networks [#6530](https://github.com/hyperledger/besu/pull/6530)
- if you want to use the previous default behavior, you'll need to specify `--sync-mode=FAST`
- BONSAI - Default data storage format is now Bonsai [#6536](https://github.com/hyperledger/besu/pull/6536)
- if you had previously used the default (FOREST), at startup you will get an error indicating the mismatch
`Mismatch: DB at '/your-path' is FOREST (Version 1) but config expects BONSAI (Version 2). Please check your config.`
- to fix this mismatch, specify the format explicitly using `--data-storage-format=FOREST`
- Following the OpenMetrics convention, the updated Prometheus client adds the `_total` suffix to every metrics of type counter, with the effect that some existing metrics have been renamed to have this suffix. If you are using the official Besu Grafana dashboard [(available here)](https://grafana.com/grafana/dashboards/16455-besu-full/), just update it to the latest revision, that accepts the old and the new name of the affected metrics. If you have a custom dashboard or use the metrics in other ways, then you need to manually update it to support the new naming.
- The `trace-filter` method in JSON-RPC API now has a default block range limit of 1000, adjustable with `--rpc-max-trace-filter-range` (thanks @alyokaz) [#6446](https://github.com/hyperledger/besu/pull/6446) - The `trace-filter` method in JSON-RPC API now has a default block range limit of 1000, adjustable with `--rpc-max-trace-filter-range` (thanks @alyokaz) [#6446](https://github.com/hyperledger/besu/pull/6446)
- Requesting the Ethereum Node Record (ENR) to acquire the fork id from bonded peers is now enabled by default, so the following change has been made [#5628](https://github.com/hyperledger/besu/pull/5628): - Requesting the Ethereum Node Record (ENR) to acquire the fork id from bonded peers is now enabled by default, so the following change has been made [#5628](https://github.com/hyperledger/besu/pull/5628):
- `--Xfilter-on-enr-fork-id` has been removed. To disable the feature use `--filter-on-enr-fork-id=false`. - `--Xfilter-on-enr-fork-id` has been removed. To disable the feature use `--filter-on-enr-fork-id=false`.
- `--engine-jwt-enabled` has been removed. Use `--engine-jwt-disabled` instead. [#6491](https://github.com/hyperledger/besu/pull/6491) - `--engine-jwt-enabled` has been removed. Use `--engine-jwt-disabled` instead. [#6491](https://github.com/hyperledger/besu/pull/6491)
- Release docker images now provided at ghcr.io instead of dockerhub
### Deprecations ### Deprecations
- X_SNAP and X_CHECKPOINT are marked for deprecation and will be removed in 24.6.0 in favor of SNAP and CHECKPOINT [#6405](https://github.com/hyperledger/besu/pull/6405)
- `--Xp2p-peer-lower-bound` is deprecated. [#6501](https://github.com/hyperledger/besu/pull/6501)
### Upcoming Breaking Changes
- `--Xbonsai-limit-trie-logs-enabled` will be removed. You will need to use `--bonsai-limit-trie-logs-enabled` instead. Additionally, this limit will change to be enabled by default.
- If you do not want the limit enabled (eg you have `--bonsai-historical-block-limit` set < 512), you need to explicitly disable it using `--bonsai-limit-trie-logs-enabled=false` or increase the limit. [#6561](https://github.com/hyperledger/besu/pull/6561)
### Additions and Improvements ### Additions and Improvements
- Upgrade Prometheus and Opentelemetry dependencies [#6422](https://github.com/hyperledger/besu/pull/6422) - Upgrade Prometheus and Opentelemetry dependencies [#6422](https://github.com/hyperledger/besu/pull/6422)
@ -19,16 +80,50 @@
- Log blob count when importing a block via Engine API [#6466](https://github.com/hyperledger/besu/pull/6466) - Log blob count when importing a block via Engine API [#6466](https://github.com/hyperledger/besu/pull/6466)
- Introduce `--Xbonsai-limit-trie-logs-enabled` experimental feature which by default will only retain the latest 512 trie logs, saving about 3GB per week in database growth [#5390](https://github.com/hyperledger/besu/issues/5390) - Introduce `--Xbonsai-limit-trie-logs-enabled` experimental feature which by default will only retain the latest 512 trie logs, saving about 3GB per week in database growth [#5390](https://github.com/hyperledger/besu/issues/5390)
- Introduce `besu storage x-trie-log prune` experimental offline subcommand which will prune all redundant trie logs except the latest 512 [#6303](https://github.com/hyperledger/besu/pull/6303) - Introduce `besu storage x-trie-log prune` experimental offline subcommand which will prune all redundant trie logs except the latest 512 [#6303](https://github.com/hyperledger/besu/pull/6303)
- Improve flat trace generation performance [#6472](https://github.com/hyperledger/besu/pull/6472)
- SNAP and CHECKPOINT sync - early access flag removed so now simply SNAP and CHECKPOINT [#6405](https://github.com/hyperledger/besu/pull/6405)
- X_SNAP and X_CHECKPOINT are marked for deprecation and will be removed in 24.4.0
- Github Actions based build. - Github Actions based build.
- Introduce caching mechanism to optimize Keccak hash calculations for account storage slots during block processing [#6452](https://github.com/hyperledger/besu/pull/6452) - Introduce caching mechanism to optimize Keccak hash calculations for account storage slots during block processing [#6452](https://github.com/hyperledger/besu/pull/6452)
- Added configuration options for `pragueTime` to genesis file for Prague fork development [#6473](https://github.com/hyperledger/besu/pull/6473) - Added configuration options for `pragueTime` to genesis file for Prague fork development [#6473](https://github.com/hyperledger/besu/pull/6473)
- Moving trielog storage to RocksDB's blobdb to improve write amplications [#6289](https://github.com/hyperledger/besu/pull/6289) - Moving trielog storage to RocksDB's blobdb to improve write amplications [#6289](https://github.com/hyperledger/besu/pull/6289)
- Support for `shanghaiTime` fork and Shanghai EVM smart contracts in QBFT/IBFT chains [#6353](https://github.com/hyperledger/besu/pull/6353)
- Change ExecutionHaltReason for contract creation collision case to return ILLEGAL_STATE_CHANGE [#6518](https://github.com/hyperledger/besu/pull/6518)
- Experimental feature `--Xbonsai-code-using-code-hash-enabled` for storing Bonsai code storage by code hash [#6505](https://github.com/hyperledger/besu/pull/6505)
- More accurate column size `storage rocksdb usage` subcommand [#6540](https://github.com/hyperledger/besu/pull/6540)
- Adds `storage rocksdb x-stats` subcommand [#6540](https://github.com/hyperledger/besu/pull/6540)
- New `eth_blobBaseFee`JSON-RPC method [#6581](https://github.com/hyperledger/besu/pull/6581)
- Add blob transaction support to `eth_call` [#6661](https://github.com/hyperledger/besu/pull/6661)
- Add blobs to `eth_feeHistory` [#6679](https://github.com/hyperledger/besu/pull/6679)
- Upgrade reference tests to version 13.1 [#6574](https://github.com/hyperledger/besu/pull/6574)
- Extend `BesuConfiguration` service [#6584](https://github.com/hyperledger/besu/pull/6584)
- Add `ethereum_min_gas_price` and `ethereum_min_priority_fee` metrics to track runtime values of `min-gas-price` and `min-priority-fee` [#6587](https://github.com/hyperledger/besu/pull/6587)
- Option to perform version incompatibility checks when starting Besu. In this first release of the feature, if `--version-compatibility-protection` is set to true it checks that the version of Besu being started is the same or higher than the previous version. [6307](https://github.com/hyperledger/besu/pull/6307)
- Moved account frame warming from GasCalculator into the Call operations [#6557](https://github.com/hyperledger/besu/pull/6557)
### Bug fixes ### Bug fixes
- Fix the way an advertised host configured with `--p2p-host` is treated when communicating with the originator of a PING packet [#6225](https://github.com/hyperledger/besu/pull/6225) - Fix the way an advertised host configured with `--p2p-host` is treated when communicating with the originator of a PING packet [#6225](https://github.com/hyperledger/besu/pull/6225)
- Fix `poa-block-txs-selection-max-time` option that was inadvertently reset to its default after being configured [#6444](https://github.com/hyperledger/besu/pull/6444) - Fix `poa-block-txs-selection-max-time` option that was inadvertently reset to its default after being configured [#6444](https://github.com/hyperledger/besu/pull/6444)
- Fix for tx incorrectly discarded when there is a timeout during block creation [#6563](https://github.com/hyperledger/besu/pull/6563)
- Fix traces so that call gas costing in traces matches other clients traces [#6525](https://github.com/hyperledger/besu/pull/6525)
### Download Links
https://github.com/hyperledger/besu/releases/tag/24.3.0
https://github.com/hyperledger/besu/releases/download/24.3.0/besu-24.3.0.tar.gz / sha256 8037ce51bb5bb396d29717a812ea7ff577b0d6aa341d67d1e5b77cbc55b15f84
https://github.com/hyperledger/besu/releases/download/24.3.0/besu-24.3.0.zip / sha256 41ea2ca734a3b377f43ee178166b5b809827084789378dbbe4e5b52bbd8e0674
## 24.1.2
### Bug fixes
- Fix ETC Spiral upgrade breach of consensus [#6524](https://github.com/hyperledger/besu/pull/6524)
### Additions and Improvements
- Adds timestamp to enable Cancun upgrade on mainnet [#6545](https://github.com/hyperledger/besu/pull/6545)
- Github Actions based build.[#6427](https://github.com/hyperledger/besu/pull/6427)
### Download Links ### Download Links
https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.2/besu-24.1.2.zip / sha256 9033f300edd81c770d3aff27a29f59dd4b6142a113936886a8f170718e412971
https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.2/besu-24.1.2.tar.gz / sha256 082db8cf4fb67527aa0dd757e5d254b3b497f5027c23287f9c0a74a6a743bf08
## 24.1.1 ## 24.1.1
@ -36,7 +131,7 @@
- New `EXECUTION_HALTED` error returned if there is an error executing or simulating a transaction, with the reason for execution being halted. Replaces the generic `INTERNAL_ERROR` return code in certain cases which some applications may be checking for [#6343](https://github.com/hyperledger/besu/pull/6343) - New `EXECUTION_HALTED` error returned if there is an error executing or simulating a transaction, with the reason for execution being halted. Replaces the generic `INTERNAL_ERROR` return code in certain cases which some applications may be checking for [#6343](https://github.com/hyperledger/besu/pull/6343)
- The Besu Docker images with `openjdk-latest` tags since 23.10.3 were incorrectly using UID 1001 instead of 1000 for the container's `besu` user. The user now uses 1000 again. Containers created from or migrated to images using UID 1001 will need to chown their persistent database files to UID 1000 (thanks @h4l) [#6360](https://github.com/hyperledger/besu/pull/6360) - The Besu Docker images with `openjdk-latest` tags since 23.10.3 were incorrectly using UID 1001 instead of 1000 for the container's `besu` user. The user now uses 1000 again. Containers created from or migrated to images using UID 1001 will need to chown their persistent database files to UID 1000 (thanks @h4l) [#6360](https://github.com/hyperledger/besu/pull/6360)
- The deprecated `--privacy-onchain-groups-enabled` option has now been removed. Use the `--privacy-flexible-groups-enabled` option instead. [#6411](https://github.com/hyperledger/besu/pull/6411) - The deprecated `--privacy-onchain-groups-enabled` option has now been removed. Use the `--privacy-flexible-groups-enabled` option instead. [#6411](https://github.com/hyperledger/besu/pull/6411)
- The time that can be spent selecting transactions during block creation is not capped at 5 seconds for PoS and PoW networks, and for PoA networks, at 75% of the block period specified in the genesis. This is to prevent possible DoS attacks in case a single transaction is taking too long to execute, and to have a stable block production rate. This could be a breaking change if an existing network needs to accept transactions that take more time to executed than the newly introduced limit. If it is mandatory for these networks to keep processing these long processing transaction, then the default value of `block-txs-selection-max-time` or `poa-block-txs-selection-max-time` needs to be tuned accordingly. [#6423](https://github.com/hyperledger/besu/pull/6423) - The time that can be spent selecting transactions during block creation is not capped at 5 seconds for PoS and PoW networks, and for PoA networks, at 75% of the block period specified in the genesis. This is to prevent possible DoS attacks in case a single transaction is taking too long to execute, and to have a stable block production rate. This could be a breaking change if an existing network needs to accept transactions that take more time to execute than the newly introduced limit. If it is mandatory for these networks to keep processing these long processing transaction, then the default value of `block-txs-selection-max-time` or `poa-block-txs-selection-max-time` needs to be tuned accordingly. [#6423](https://github.com/hyperledger/besu/pull/6423)
### Deprecations ### Deprecations
@ -85,6 +180,7 @@ https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.1/besu-24.1.1.t
- Add custom genesis file name to config overview if specified [#6297](https://github.com/hyperledger/besu/pull/6297) - Add custom genesis file name to config overview if specified [#6297](https://github.com/hyperledger/besu/pull/6297)
- Update Gradle plugins and replace unmaintained License Gradle Plugin with the actively maintained Gradle License Report [#6275](https://github.com/hyperledger/besu/pull/6275) - Update Gradle plugins and replace unmaintained License Gradle Plugin with the actively maintained Gradle License Report [#6275](https://github.com/hyperledger/besu/pull/6275)
- Optimize RocksDB WAL files, allows for faster restart and a more linear disk space utilization [#6328](https://github.com/hyperledger/besu/pull/6328) - Optimize RocksDB WAL files, allows for faster restart and a more linear disk space utilization [#6328](https://github.com/hyperledger/besu/pull/6328)
- Add a cache on senders by transaction hash [#6375](https://github.com/hyperledger/besu/pull/6375)
### Bug fixes ### Bug fixes
- Hotfix for selfdestruct preimages on bonsai [#6359]((https://github.com/hyperledger/besu/pull/6359) - Hotfix for selfdestruct preimages on bonsai [#6359]((https://github.com/hyperledger/besu/pull/6359)
@ -2249,7 +2345,7 @@ Workaround - Limit the number of blocks queried by each `eth_getLogs` call.
- Implemented private contract log filters including JSON-RPC methods to interact with private filters. [\#735](https://github.com/hyperledger/besu/pull/735) - Implemented private contract log filters including JSON-RPC methods to interact with private filters. [\#735](https://github.com/hyperledger/besu/pull/735)
- Implemented EIP-2315: Simple Subroutines for the EVM [\#717](https://github.com/hyperledger/besu/pull/717) - Implemented EIP-2315: Simple Subroutines for the EVM [\#717](https://github.com/hyperledger/besu/pull/717)
- Implemented Splunk logging. [\#725](https://github.com/hyperledger/besu/pull/725) - Implemented Splunk logging. [\#725](https://github.com/hyperledger/besu/pull/725)
- Implemented optional native library encryption. [\#675](https://github.com/hyperledger/besu/pull/675). To enable add `--Xsecp256k1-native-enabled` (for transaciton signatures) and/or `--Xaltbn128-native-enabled` (for altbn128 precomiled contracts) as command line options. - Implemented optional native library encryption. [\#675](https://github.com/hyperledger/besu/pull/675). To enable add `--Xsecp256k1-native-enabled` (for transaction signatures) and/or `--Xaltbn128-native-enabled` (for altbn128 precomiled contracts) as command line options.
### Bug Fixes ### Bug Fixes
@ -2431,7 +2527,7 @@ Early access features are available features that are not recommended for produc
have unstable interfaces. have unstable interfaces.
* [Onchain privacy groups](https://besu.hyperledger.org/en/latest/Concepts/Privacy/Onchain-PrivacyGroups/) with add and remove members. * [Onchain privacy groups](https://besu.hyperledger.org/en/latest/Concepts/Privacy/Onchain-PrivacyGroups/) with add and remove members.
Not being able to to re-add a member to an onchain privacy group is a [known issue](https://github.com/hyperledger/besu/issues/455) Not being able to re-add a member to an onchain privacy group is a [known issue](https://github.com/hyperledger/besu/issues/455)
with the add and remove functionality. with the add and remove functionality.
### Known Issues ### Known Issues

@ -56,15 +56,12 @@ import java.math.BigInteger;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import org.junit.After; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.ExtendWith;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /** Superclass for acceptance tests. */
* Superclass for acceptance tests. For now (transition to junit5 is ongoing) this class supports
* junit4 format.
*/
@ExtendWith(AcceptanceTestBaseTestWatcher.class) @ExtendWith(AcceptanceTestBaseTestWatcher.class)
public class AcceptanceTestBase { public class AcceptanceTestBase {
@ -131,7 +128,7 @@ public class AcceptanceTestBase {
exitedSuccessfully = new ExitedWithCode(0); exitedSuccessfully = new ExitedWithCode(0);
} }
@After @AfterEach
public void tearDownAcceptanceTestBase() { public void tearDownAcceptanceTestBase() {
reportMemory(); reportMemory();
cluster.close(); cluster.close();

@ -58,6 +58,7 @@ public class BlockUtils {
null, null,
null, null,
null, null,
null,
blockHeaderFunctions); blockHeaderFunctions);
} }
} }

@ -33,6 +33,7 @@ import org.hyperledger.besu.ethereum.eth.transactions.TransactionPoolConfigurati
import org.hyperledger.besu.ethereum.p2p.config.NetworkingConfiguration; import org.hyperledger.besu.ethereum.p2p.config.NetworkingConfiguration;
import org.hyperledger.besu.ethereum.p2p.rlpx.connections.netty.TLSConfiguration; import org.hyperledger.besu.ethereum.p2p.rlpx.connections.netty.TLSConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration; import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.metrics.prometheus.MetricsConfiguration; import org.hyperledger.besu.metrics.prometheus.MetricsConfiguration;
import org.hyperledger.besu.pki.config.PkiKeyStoreConfiguration; import org.hyperledger.besu.pki.config.PkiKeyStoreConfiguration;
import org.hyperledger.besu.tests.acceptance.dsl.condition.Condition; import org.hyperledger.besu.tests.acceptance.dsl.condition.Condition;
@ -61,6 +62,7 @@ import java.nio.file.Path;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Properties; import java.util.Properties;
@ -109,6 +111,7 @@ public class BesuNode implements NodeConfiguration, RunnableNode, AutoCloseable
private final WebSocketConfiguration webSocketConfiguration; private final WebSocketConfiguration webSocketConfiguration;
private final JsonRpcIpcConfiguration jsonRpcIpcConfiguration; private final JsonRpcIpcConfiguration jsonRpcIpcConfiguration;
private final MetricsConfiguration metricsConfiguration; private final MetricsConfiguration metricsConfiguration;
private final DataStorageConfiguration dataStorageConfiguration;
private Optional<PermissioningConfiguration> permissioningConfiguration; private Optional<PermissioningConfiguration> permissioningConfiguration;
private final ApiConfiguration apiConfiguration; private final ApiConfiguration apiConfiguration;
private final GenesisConfigurationProvider genesisConfigProvider; private final GenesisConfigurationProvider genesisConfigProvider;
@ -145,6 +148,7 @@ public class BesuNode implements NodeConfiguration, RunnableNode, AutoCloseable
final MetricsConfiguration metricsConfiguration, final MetricsConfiguration metricsConfiguration,
final Optional<PermissioningConfiguration> permissioningConfiguration, final Optional<PermissioningConfiguration> permissioningConfiguration,
final ApiConfiguration apiConfiguration, final ApiConfiguration apiConfiguration,
final DataStorageConfiguration dataStorageConfiguration,
final Optional<String> keyfilePath, final Optional<String> keyfilePath,
final boolean devMode, final boolean devMode,
final NetworkName network, final NetworkName network,
@ -195,6 +199,7 @@ public class BesuNode implements NodeConfiguration, RunnableNode, AutoCloseable
this.metricsConfiguration = metricsConfiguration; this.metricsConfiguration = metricsConfiguration;
this.permissioningConfiguration = permissioningConfiguration; this.permissioningConfiguration = permissioningConfiguration;
this.apiConfiguration = apiConfiguration; this.apiConfiguration = apiConfiguration;
this.dataStorageConfiguration = dataStorageConfiguration;
this.genesisConfigProvider = genesisConfigProvider; this.genesisConfigProvider = genesisConfigProvider;
this.devMode = devMode; this.devMode = devMode;
this.network = network; this.network = network;
@ -427,11 +432,14 @@ public class BesuNode implements NodeConfiguration, RunnableNode, AutoCloseable
getGenesisConfig() getGenesisConfig()
.map( .map(
gc -> gc ->
gc.toLowerCase().contains("ibft") ? ConsensusType.IBFT2 : ConsensusType.QBFT) gc.toLowerCase(Locale.ROOT).contains("ibft")
? ConsensusType.IBFT2
: ConsensusType.QBFT)
.orElse(ConsensusType.IBFT2); .orElse(ConsensusType.IBFT2);
nodeRequests = nodeRequests =
new NodeRequests( new NodeRequests(
web3jService,
new JsonRpc2_0Web3j(web3jService, 2000, Async.defaultExecutorService()), new JsonRpc2_0Web3j(web3jService, 2000, Async.defaultExecutorService()),
new CliqueRequestFactory(web3jService), new CliqueRequestFactory(web3jService),
new BftRequestFactory(web3jService, bftType), new BftRequestFactory(web3jService, bftType),
@ -690,6 +698,10 @@ public class BesuNode implements NodeConfiguration, RunnableNode, AutoCloseable
this.privacyParameters = privacyParameters; this.privacyParameters = privacyParameters;
} }
public DataStorageConfiguration getDataStorageConfiguration() {
return dataStorageConfiguration;
}
public boolean isDevMode() { public boolean isDevMode() {
return devMode; return devMode;
} }
@ -777,6 +789,21 @@ public class BesuNode implements NodeConfiguration, RunnableNode, AutoCloseable
nodeRequests.shutdown(); nodeRequests.shutdown();
nodeRequests = null; nodeRequests = null;
} }
deleteRuntimeFiles();
}
private void deleteRuntimeFiles() {
try {
Files.deleteIfExists(homeDirectory.resolve("besu.networks"));
} catch (IOException e) {
LOG.error("Failed to clean up besu.networks file in {}", homeDirectory, e);
}
try {
Files.deleteIfExists(homeDirectory.resolve("besu.ports"));
} catch (IOException e) {
LOG.error("Failed to clean up besu.ports file in {}", homeDirectory, e);
}
} }
@Override @Override

@ -18,11 +18,14 @@ import static com.google.common.base.Preconditions.checkState;
import static java.nio.charset.StandardCharsets.UTF_8; import static java.nio.charset.StandardCharsets.UTF_8;
import org.hyperledger.besu.cli.options.TransactionPoolOptions; import org.hyperledger.besu.cli.options.TransactionPoolOptions;
import org.hyperledger.besu.cli.options.stable.DataStorageOptions;
import org.hyperledger.besu.cli.options.unstable.NetworkingOptions; import org.hyperledger.besu.cli.options.unstable.NetworkingOptions;
import org.hyperledger.besu.ethereum.api.jsonrpc.ipc.JsonRpcIpcConfiguration; import org.hyperledger.besu.ethereum.api.jsonrpc.ipc.JsonRpcIpcConfiguration;
import org.hyperledger.besu.ethereum.eth.transactions.ImmutableTransactionPoolConfiguration; import org.hyperledger.besu.ethereum.eth.transactions.ImmutableTransactionPoolConfiguration;
import org.hyperledger.besu.ethereum.p2p.rlpx.connections.netty.TLSConfiguration; import org.hyperledger.besu.ethereum.p2p.rlpx.connections.netty.TLSConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration; import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.ethereum.worldstate.ImmutableDataStorageConfiguration;
import org.hyperledger.besu.metrics.prometheus.MetricsConfiguration; import org.hyperledger.besu.metrics.prometheus.MetricsConfiguration;
import org.hyperledger.besu.plugin.services.metrics.MetricCategory; import org.hyperledger.besu.plugin.services.metrics.MetricCategory;
import org.hyperledger.besu.tests.acceptance.dsl.StaticNodesUtils; import org.hyperledger.besu.tests.acceptance.dsl.StaticNodesUtils;
@ -49,6 +52,7 @@ import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.lang3.SystemUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.slf4j.MDC; import org.slf4j.MDC;
@ -74,8 +78,15 @@ public class ProcessBesuNodeRunner implements BesuNodeRunner {
final Path dataDir = node.homeDirectory(); final Path dataDir = node.homeDirectory();
final var workingDir =
new File(System.getProperty("user.dir")).getParentFile().getParentFile().toPath();
final List<String> params = new ArrayList<>(); final List<String> params = new ArrayList<>();
params.add("build/install/besu/bin/besu"); if (SystemUtils.IS_OS_WINDOWS) {
params.add(workingDir.resolve("build\\install\\besu\\bin\\besu.bat").toString());
} else {
params.add("build/install/besu/bin/besu");
}
params.add("--data-path"); params.add("--data-path");
params.add(dataDir.toAbsolutePath().toString()); params.add(dataDir.toAbsolutePath().toString());
@ -109,6 +120,13 @@ public class ProcessBesuNodeRunner implements BesuNodeRunner {
.build()) .build())
.getCLIOptions()); .getCLIOptions());
params.addAll(
DataStorageOptions.fromConfig(
ImmutableDataStorageConfiguration.builder()
.from(DataStorageConfiguration.DEFAULT_FOREST_CONFIG)
.build())
.getCLIOptions());
if (node.getMiningParameters().isMiningEnabled()) { if (node.getMiningParameters().isMiningEnabled()) {
params.add("--miner-enabled"); params.add("--miner-enabled");
params.add("--miner-coinbase"); params.add("--miner-coinbase");
@ -412,15 +430,13 @@ public class ProcessBesuNodeRunner implements BesuNodeRunner {
LOG.info("Creating besu process with params {}", params); LOG.info("Creating besu process with params {}", params);
final ProcessBuilder processBuilder = final ProcessBuilder processBuilder =
new ProcessBuilder(params) new ProcessBuilder(params)
.directory(new File(System.getProperty("user.dir")).getParentFile().getParentFile()) .directory(workingDir.toFile())
.redirectErrorStream(true) .redirectErrorStream(true)
.redirectInput(Redirect.INHERIT); .redirectInput(Redirect.INHERIT);
if (!node.getPlugins().isEmpty()) { if (!node.getPlugins().isEmpty()) {
processBuilder processBuilder
.environment() .environment()
.put( .put("BESU_OPTS", "-Dbesu.plugins.dir=" + dataDir.resolve("plugins").toAbsolutePath());
"BESU_OPTS",
"-Dbesu.plugins.dir=" + dataDir.resolve("plugins").toAbsolutePath().toString());
} }
// Use non-blocking randomness for acceptance tests // Use non-blocking randomness for acceptance tests
processBuilder processBuilder
@ -562,7 +578,7 @@ public class ProcessBesuNodeRunner implements BesuNodeRunner {
LOG.info("Killing {} process, pid {}", name, process.pid()); LOG.info("Killing {} process, pid {}", name, process.pid());
process.destroy(); process.descendants().forEach(ProcessHandle::destroy);
try { try {
process.waitFor(30, TimeUnit.SECONDS); process.waitFor(30, TimeUnit.SECONDS);
} catch (final InterruptedException e) { } catch (final InterruptedException e) {

@ -28,7 +28,9 @@ import org.hyperledger.besu.crypto.KeyPairUtil;
import org.hyperledger.besu.cryptoservices.KeyPairSecurityModule; import org.hyperledger.besu.cryptoservices.KeyPairSecurityModule;
import org.hyperledger.besu.cryptoservices.NodeKey; import org.hyperledger.besu.cryptoservices.NodeKey;
import org.hyperledger.besu.ethereum.GasLimitCalculator; import org.hyperledger.besu.ethereum.GasLimitCalculator;
import org.hyperledger.besu.ethereum.api.ApiConfiguration;
import org.hyperledger.besu.ethereum.api.graphql.GraphQLConfiguration; import org.hyperledger.besu.ethereum.api.graphql.GraphQLConfiguration;
import org.hyperledger.besu.ethereum.core.ImmutableMiningParameters;
import org.hyperledger.besu.ethereum.eth.EthProtocolConfiguration; import org.hyperledger.besu.ethereum.eth.EthProtocolConfiguration;
import org.hyperledger.besu.ethereum.eth.sync.SynchronizerConfiguration; import org.hyperledger.besu.ethereum.eth.sync.SynchronizerConfiguration;
import org.hyperledger.besu.ethereum.eth.transactions.ImmutableTransactionPoolConfiguration; import org.hyperledger.besu.ethereum.eth.transactions.ImmutableTransactionPoolConfiguration;
@ -36,31 +38,38 @@ import org.hyperledger.besu.ethereum.eth.transactions.TransactionPoolConfigurati
import org.hyperledger.besu.ethereum.p2p.peers.EnodeURLImpl; import org.hyperledger.besu.ethereum.p2p.peers.EnodeURLImpl;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueStorageProvider; import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueStorageProvider;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueStorageProviderBuilder; import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueStorageProviderBuilder;
import org.hyperledger.besu.ethereum.transaction.TransactionSimulator;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.evm.internal.EvmConfiguration; import org.hyperledger.besu.evm.internal.EvmConfiguration;
import org.hyperledger.besu.metrics.MetricsSystemFactory; import org.hyperledger.besu.metrics.MetricsSystemFactory;
import org.hyperledger.besu.metrics.ObservableMetricsSystem; import org.hyperledger.besu.metrics.ObservableMetricsSystem;
import org.hyperledger.besu.plugin.data.EnodeURL; import org.hyperledger.besu.plugin.data.EnodeURL;
import org.hyperledger.besu.plugin.services.BesuConfiguration; import org.hyperledger.besu.plugin.services.BesuConfiguration;
import org.hyperledger.besu.plugin.services.BesuEvents; import org.hyperledger.besu.plugin.services.BesuEvents;
import org.hyperledger.besu.plugin.services.BlockchainService;
import org.hyperledger.besu.plugin.services.PermissioningService;
import org.hyperledger.besu.plugin.services.PicoCLIOptions; import org.hyperledger.besu.plugin.services.PicoCLIOptions;
import org.hyperledger.besu.plugin.services.PluginTransactionValidatorService; import org.hyperledger.besu.plugin.services.PrivacyPluginService;
import org.hyperledger.besu.plugin.services.RpcEndpointService; import org.hyperledger.besu.plugin.services.RpcEndpointService;
import org.hyperledger.besu.plugin.services.SecurityModuleService; import org.hyperledger.besu.plugin.services.SecurityModuleService;
import org.hyperledger.besu.plugin.services.StorageService; import org.hyperledger.besu.plugin.services.StorageService;
import org.hyperledger.besu.plugin.services.TransactionPoolValidatorService;
import org.hyperledger.besu.plugin.services.TransactionSelectionService; import org.hyperledger.besu.plugin.services.TransactionSelectionService;
import org.hyperledger.besu.plugin.services.TransactionSimulationService;
import org.hyperledger.besu.plugin.services.storage.rocksdb.RocksDBPlugin; import org.hyperledger.besu.plugin.services.storage.rocksdb.RocksDBPlugin;
import org.hyperledger.besu.plugin.services.txselection.PluginTransactionSelectorFactory;
import org.hyperledger.besu.plugin.services.txvalidator.PluginTransactionValidatorFactory;
import org.hyperledger.besu.services.BesuConfigurationImpl; import org.hyperledger.besu.services.BesuConfigurationImpl;
import org.hyperledger.besu.services.BesuEventsImpl; import org.hyperledger.besu.services.BesuEventsImpl;
import org.hyperledger.besu.services.BesuPluginContextImpl; import org.hyperledger.besu.services.BesuPluginContextImpl;
import org.hyperledger.besu.services.BlockchainServiceImpl;
import org.hyperledger.besu.services.PermissioningServiceImpl; import org.hyperledger.besu.services.PermissioningServiceImpl;
import org.hyperledger.besu.services.PicoCLIOptionsImpl; import org.hyperledger.besu.services.PicoCLIOptionsImpl;
import org.hyperledger.besu.services.PluginTransactionValidatorServiceImpl; import org.hyperledger.besu.services.PrivacyPluginServiceImpl;
import org.hyperledger.besu.services.RpcEndpointServiceImpl; import org.hyperledger.besu.services.RpcEndpointServiceImpl;
import org.hyperledger.besu.services.SecurityModuleServiceImpl; import org.hyperledger.besu.services.SecurityModuleServiceImpl;
import org.hyperledger.besu.services.StorageServiceImpl; import org.hyperledger.besu.services.StorageServiceImpl;
import org.hyperledger.besu.services.TransactionPoolValidatorServiceImpl;
import org.hyperledger.besu.services.TransactionSelectionServiceImpl; import org.hyperledger.besu.services.TransactionSelectionServiceImpl;
import org.hyperledger.besu.services.TransactionSimulationServiceImpl;
import java.io.File; import java.io.File;
import java.nio.file.Path; import java.nio.file.Path;
@ -70,7 +79,6 @@ import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -93,17 +101,28 @@ public class ThreadBesuNodeRunner implements BesuNodeRunner {
final BesuNode node, final BesuNode node,
final StorageServiceImpl storageService, final StorageServiceImpl storageService,
final SecurityModuleServiceImpl securityModuleService, final SecurityModuleServiceImpl securityModuleService,
final BesuConfiguration commonPluginConfiguration) { final TransactionSimulationServiceImpl transactionSimulationServiceImpl,
final TransactionSelectionServiceImpl transactionSelectionServiceImpl,
final TransactionPoolValidatorServiceImpl transactionPoolValidatorServiceImpl,
final BlockchainServiceImpl blockchainServiceImpl,
final RpcEndpointServiceImpl rpcEndpointServiceImpl,
final BesuConfiguration commonPluginConfiguration,
final PermissioningServiceImpl permissioningService) {
final CommandLine commandLine = new CommandLine(CommandSpec.create()); final CommandLine commandLine = new CommandLine(CommandSpec.create());
final BesuPluginContextImpl besuPluginContext = new BesuPluginContextImpl(); final BesuPluginContextImpl besuPluginContext = new BesuPluginContextImpl();
besuPluginContext.addService(StorageService.class, storageService); besuPluginContext.addService(StorageService.class, storageService);
besuPluginContext.addService(SecurityModuleService.class, securityModuleService); besuPluginContext.addService(SecurityModuleService.class, securityModuleService);
besuPluginContext.addService(PicoCLIOptions.class, new PicoCLIOptionsImpl(commandLine)); besuPluginContext.addService(PicoCLIOptions.class, new PicoCLIOptionsImpl(commandLine));
besuPluginContext.addService(RpcEndpointService.class, new RpcEndpointServiceImpl()); besuPluginContext.addService(RpcEndpointService.class, rpcEndpointServiceImpl);
besuPluginContext.addService( besuPluginContext.addService(
TransactionSelectionService.class, new TransactionSelectionServiceImpl()); TransactionSelectionService.class, transactionSelectionServiceImpl);
besuPluginContext.addService( besuPluginContext.addService(
PluginTransactionValidatorService.class, new PluginTransactionValidatorServiceImpl()); TransactionPoolValidatorService.class, transactionPoolValidatorServiceImpl);
besuPluginContext.addService(
TransactionSimulationService.class, transactionSimulationServiceImpl);
besuPluginContext.addService(BlockchainService.class, blockchainServiceImpl);
besuPluginContext.addService(BesuConfiguration.class, commonPluginConfiguration);
final Path pluginsPath; final Path pluginsPath;
final String pluginDir = System.getProperty("besu.plugins.dir"); final String pluginDir = System.getProperty("besu.plugins.dir");
if (pluginDir == null || pluginDir.isEmpty()) { if (pluginDir == null || pluginDir.isEmpty()) {
@ -117,15 +136,16 @@ public class ThreadBesuNodeRunner implements BesuNodeRunner {
} else { } else {
pluginsPath = Path.of(pluginDir); pluginsPath = Path.of(pluginDir);
} }
besuPluginContext.registerPlugins(pluginsPath);
commandLine.parseArgs(node.getConfiguration().getExtraCLIOptions().toArray(new String[0]));
besuPluginContext.addService(BesuConfiguration.class, commonPluginConfiguration); besuPluginContext.addService(BesuConfiguration.class, commonPluginConfiguration);
besuPluginContext.addService(PermissioningService.class, permissioningService);
besuPluginContext.addService(PrivacyPluginService.class, new PrivacyPluginServiceImpl());
besuPluginContext.registerPlugins(pluginsPath);
commandLine.parseArgs(node.getConfiguration().getExtraCLIOptions().toArray(new String[0]));
// register built-in plugins // register built-in plugins
new RocksDBPlugin().register(besuPluginContext); new RocksDBPlugin().register(besuPluginContext);
return besuPluginContext; return besuPluginContext;
} }
@ -143,15 +163,43 @@ public class ThreadBesuNodeRunner implements BesuNodeRunner {
final StorageServiceImpl storageService = new StorageServiceImpl(); final StorageServiceImpl storageService = new StorageServiceImpl();
final SecurityModuleServiceImpl securityModuleService = new SecurityModuleServiceImpl(); final SecurityModuleServiceImpl securityModuleService = new SecurityModuleServiceImpl();
final TransactionSimulationServiceImpl transactionSimulationServiceImpl =
new TransactionSimulationServiceImpl();
final TransactionSelectionServiceImpl transactionSelectionServiceImpl =
new TransactionSelectionServiceImpl();
final TransactionPoolValidatorServiceImpl transactionPoolValidatorServiceImpl =
new TransactionPoolValidatorServiceImpl();
final BlockchainServiceImpl blockchainServiceImpl = new BlockchainServiceImpl();
final RpcEndpointServiceImpl rpcEndpointServiceImpl = new RpcEndpointServiceImpl();
final Path dataDir = node.homeDirectory(); final Path dataDir = node.homeDirectory();
final BesuConfiguration commonPluginConfiguration = final BesuConfigurationImpl commonPluginConfiguration = new BesuConfigurationImpl();
new BesuConfigurationImpl(dataDir, dataDir.resolve(DATABASE_PATH)); final PermissioningServiceImpl permissioningService = new PermissioningServiceImpl();
final var miningParameters =
ImmutableMiningParameters.builder()
.from(node.getMiningParameters())
.transactionSelectionService(transactionSelectionServiceImpl)
.build();
commonPluginConfiguration.init(
dataDir,
dataDir.resolve(DATABASE_PATH),
node.getDataStorageConfiguration(),
miningParameters);
final BesuPluginContextImpl besuPluginContext = final BesuPluginContextImpl besuPluginContext =
besuPluginContextMap.computeIfAbsent( besuPluginContextMap.computeIfAbsent(
node, node,
n -> n ->
buildPluginContext( buildPluginContext(
node, storageService, securityModuleService, commonPluginConfiguration)); node,
storageService,
securityModuleService,
transactionSimulationServiceImpl,
transactionSelectionServiceImpl,
transactionPoolValidatorServiceImpl,
blockchainServiceImpl,
rpcEndpointServiceImpl,
commonPluginConfiguration,
permissioningService));
GlobalOpenTelemetry.resetForTest(); GlobalOpenTelemetry.resetForTest();
final ObservableMetricsSystem metricsSystem = final ObservableMetricsSystem metricsSystem =
@ -184,23 +232,20 @@ public class ThreadBesuNodeRunner implements BesuNodeRunner {
ImmutableTransactionPoolConfiguration.builder() ImmutableTransactionPoolConfiguration.builder()
.from(node.getTransactionPoolConfiguration()) .from(node.getTransactionPoolConfiguration())
.strictTransactionReplayProtectionEnabled(node.isStrictTxReplayProtectionEnabled()) .strictTransactionReplayProtectionEnabled(node.isStrictTxReplayProtectionEnabled())
.transactionPoolValidatorService(transactionPoolValidatorServiceImpl)
.build(); .build();
final int maxPeers = 25; final int maxPeers = 25;
final Optional<PluginTransactionSelectorFactory> transactionSelectorFactory =
getTransactionSelectorFactory(besuPluginContext);
final PluginTransactionValidatorFactory pluginTransactionValidatorFactory =
getPluginTransactionValidatorFactory(besuPluginContext);
builder builder
.synchronizerConfiguration(new SynchronizerConfiguration.Builder().build()) .synchronizerConfiguration(new SynchronizerConfiguration.Builder().build())
.dataDirectory(node.homeDirectory()) .dataDirectory(node.homeDirectory())
.miningParameters(node.getMiningParameters()) .miningParameters(miningParameters)
.privacyParameters(node.getPrivacyParameters()) .privacyParameters(node.getPrivacyParameters())
.nodeKey(new NodeKey(new KeyPairSecurityModule(KeyPairUtil.loadKeyPair(dataDir)))) .nodeKey(new NodeKey(new KeyPairSecurityModule(KeyPairUtil.loadKeyPair(dataDir))))
.metricsSystem(metricsSystem) .metricsSystem(metricsSystem)
.transactionPoolConfiguration(txPoolConfig) .transactionPoolConfiguration(txPoolConfig)
.dataStorageConfiguration(DataStorageConfiguration.DEFAULT_FOREST_CONFIG)
.ethProtocolConfiguration(EthProtocolConfiguration.defaultConfig()) .ethProtocolConfiguration(EthProtocolConfiguration.defaultConfig())
.clock(Clock.systemUTC()) .clock(Clock.systemUTC())
.isRevertReasonEnabled(node.isRevertReasonEnabled()) .isRevertReasonEnabled(node.isRevertReasonEnabled())
@ -211,12 +256,9 @@ public class ThreadBesuNodeRunner implements BesuNodeRunner {
.map(pkiConfig -> new PkiBlockCreationConfigurationProvider().load(pkiConfig))) .map(pkiConfig -> new PkiBlockCreationConfigurationProvider().load(pkiConfig)))
.evmConfiguration(EvmConfiguration.DEFAULT) .evmConfiguration(EvmConfiguration.DEFAULT)
.maxPeers(maxPeers) .maxPeers(maxPeers)
.lowerBoundPeers(maxPeers)
.maxRemotelyInitiatedPeers(15) .maxRemotelyInitiatedPeers(15)
.networkConfiguration(node.getNetworkingConfiguration()) .networkConfiguration(node.getNetworkingConfiguration())
.randomPeerPriority(false) .randomPeerPriority(false);
.transactionSelectorFactory(transactionSelectorFactory)
.pluginTransactionValidatorFactory(pluginTransactionValidatorFactory);
node.getGenesisConfig() node.getGenesisConfig()
.map(GenesisConfigFile::fromConfig) .map(GenesisConfigFile::fromConfig)
@ -224,6 +266,10 @@ public class ThreadBesuNodeRunner implements BesuNodeRunner {
final BesuController besuController = builder.build(); final BesuController besuController = builder.build();
initTransactionSimulationService(
transactionSimulationServiceImpl, besuController, node.getApiConfiguration());
initBlockchainService(blockchainServiceImpl, besuController);
final RunnerBuilder runnerBuilder = new RunnerBuilder(); final RunnerBuilder runnerBuilder = new RunnerBuilder();
runnerBuilder.permissioningConfiguration(node.getPermissioningConfiguration()); runnerBuilder.permissioningConfiguration(node.getPermissioningConfiguration());
runnerBuilder.apiConfiguration(node.getApiConfiguration()); runnerBuilder.apiConfiguration(node.getApiConfiguration());
@ -241,7 +287,7 @@ public class ThreadBesuNodeRunner implements BesuNodeRunner {
.jsonRpcIpcConfiguration(node.jsonRpcIpcConfiguration()) .jsonRpcIpcConfiguration(node.jsonRpcIpcConfiguration())
.dataDir(node.homeDirectory()) .dataDir(node.homeDirectory())
.metricsSystem(metricsSystem) .metricsSystem(metricsSystem)
.permissioningService(new PermissioningServiceImpl()) .permissioningService(permissioningService)
.metricsConfiguration(node.getMetricsConfiguration()) .metricsConfiguration(node.getMetricsConfiguration())
.p2pEnabled(node.isP2pEnabled()) .p2pEnabled(node.isP2pEnabled())
.p2pTLSConfiguration(node.getTLSConfiguration()) .p2pTLSConfiguration(node.getTLSConfiguration())
@ -250,15 +296,14 @@ public class ThreadBesuNodeRunner implements BesuNodeRunner {
node.getStaticNodes().stream() node.getStaticNodes().stream()
.map(EnodeURLImpl::fromString) .map(EnodeURLImpl::fromString)
.collect(Collectors.toList())) .collect(Collectors.toList()))
.besuPluginContext(new BesuPluginContextImpl()) .besuPluginContext(besuPluginContext)
.autoLogBloomCaching(false) .autoLogBloomCaching(false)
.storageProvider(storageProvider) .storageProvider(storageProvider)
.rpcEndpointService(new RpcEndpointServiceImpl()); .rpcEndpointService(rpcEndpointServiceImpl);
node.engineRpcConfiguration().ifPresent(runnerBuilder::engineJsonRpcConfiguration); node.engineRpcConfiguration().ifPresent(runnerBuilder::engineJsonRpcConfiguration);
final Runner runner = runnerBuilder.build();
besuPluginContext.beforeExternalServices(); besuPluginContext.beforeExternalServices();
final Runner runner = runnerBuilder.build();
runner.startExternalServices(); runner.startExternalServices();
@ -277,6 +322,25 @@ public class ThreadBesuNodeRunner implements BesuNodeRunner {
MDC.remove("node"); MDC.remove("node");
} }
private void initBlockchainService(
final BlockchainServiceImpl blockchainServiceImpl, final BesuController besuController) {
blockchainServiceImpl.init(
besuController.getProtocolContext(), besuController.getProtocolSchedule());
}
private void initTransactionSimulationService(
final TransactionSimulationServiceImpl transactionSimulationService,
final BesuController besuController,
final ApiConfiguration apiConfiguration) {
transactionSimulationService.init(
besuController.getProtocolContext().getBlockchain(),
new TransactionSimulator(
besuController.getProtocolContext().getBlockchain(),
besuController.getProtocolContext().getWorldStateArchive(),
besuController.getProtocolSchedule(),
apiConfiguration.getGasCap()));
}
@Override @Override
public void stopNode(final BesuNode node) { public void stopNode(final BesuNode node) {
final BesuPluginContextImpl pluginContext = besuPluginContextMap.remove(node); final BesuPluginContextImpl pluginContext = besuPluginContextMap.remove(node);
@ -326,18 +390,4 @@ public class ThreadBesuNodeRunner implements BesuNodeRunner {
public String getConsoleContents() { public String getConsoleContents() {
throw new RuntimeException("Console contents can only be captured in process execution"); throw new RuntimeException("Console contents can only be captured in process execution");
} }
private Optional<PluginTransactionSelectorFactory> getTransactionSelectorFactory(
final BesuPluginContextImpl besuPluginContext) {
final Optional<TransactionSelectionService> txSelectionService =
besuPluginContext.getService(TransactionSelectionService.class);
return txSelectionService.isPresent() ? txSelectionService.get().get() : Optional.empty();
}
private PluginTransactionValidatorFactory getPluginTransactionValidatorFactory(
final BesuPluginContextImpl besuPluginContext) {
final Optional<PluginTransactionValidatorService> txValidatorService =
besuPluginContext.getService(PluginTransactionValidatorService.class);
return txValidatorService.map(PluginTransactionValidatorService::get).orElse(null);
}
} }

@ -26,6 +26,7 @@ import org.hyperledger.besu.ethereum.eth.transactions.TransactionPoolConfigurati
import org.hyperledger.besu.ethereum.p2p.config.NetworkingConfiguration; import org.hyperledger.besu.ethereum.p2p.config.NetworkingConfiguration;
import org.hyperledger.besu.ethereum.p2p.rlpx.connections.netty.TLSConfiguration; import org.hyperledger.besu.ethereum.p2p.rlpx.connections.netty.TLSConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration; import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.metrics.prometheus.MetricsConfiguration; import org.hyperledger.besu.metrics.prometheus.MetricsConfiguration;
import org.hyperledger.besu.pki.config.PkiKeyStoreConfiguration; import org.hyperledger.besu.pki.config.PkiKeyStoreConfiguration;
import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.genesis.GenesisConfigurationProvider; import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.genesis.GenesisConfigurationProvider;
@ -48,6 +49,7 @@ public class BesuNodeConfiguration {
private final MetricsConfiguration metricsConfiguration; private final MetricsConfiguration metricsConfiguration;
private final Optional<PermissioningConfiguration> permissioningConfiguration; private final Optional<PermissioningConfiguration> permissioningConfiguration;
private final ApiConfiguration apiConfiguration; private final ApiConfiguration apiConfiguration;
private final DataStorageConfiguration dataStorageConfiguration;
private final Optional<String> keyFilePath; private final Optional<String> keyFilePath;
private final boolean devMode; private final boolean devMode;
private final GenesisConfigurationProvider genesisConfigProvider; private final GenesisConfigurationProvider genesisConfigProvider;
@ -84,6 +86,7 @@ public class BesuNodeConfiguration {
final MetricsConfiguration metricsConfiguration, final MetricsConfiguration metricsConfiguration,
final Optional<PermissioningConfiguration> permissioningConfiguration, final Optional<PermissioningConfiguration> permissioningConfiguration,
final ApiConfiguration apiConfiguration, final ApiConfiguration apiConfiguration,
final DataStorageConfiguration dataStorageConfiguration,
final Optional<String> keyFilePath, final Optional<String> keyFilePath,
final boolean devMode, final boolean devMode,
final NetworkName network, final NetworkName network,
@ -117,6 +120,7 @@ public class BesuNodeConfiguration {
this.metricsConfiguration = metricsConfiguration; this.metricsConfiguration = metricsConfiguration;
this.permissioningConfiguration = permissioningConfiguration; this.permissioningConfiguration = permissioningConfiguration;
this.apiConfiguration = apiConfiguration; this.apiConfiguration = apiConfiguration;
this.dataStorageConfiguration = dataStorageConfiguration;
this.keyFilePath = keyFilePath; this.keyFilePath = keyFilePath;
this.dataPath = dataPath; this.dataPath = dataPath;
this.devMode = devMode; this.devMode = devMode;
@ -183,6 +187,10 @@ public class BesuNodeConfiguration {
return apiConfiguration; return apiConfiguration;
} }
public DataStorageConfiguration getDataStorageConfiguration() {
return dataStorageConfiguration;
}
public Optional<String> getKeyFilePath() { public Optional<String> getKeyFilePath() {
return keyFilePath; return keyFilePath;
} }

@ -39,6 +39,7 @@ import org.hyperledger.besu.ethereum.eth.transactions.TransactionPoolConfigurati
import org.hyperledger.besu.ethereum.p2p.config.NetworkingConfiguration; import org.hyperledger.besu.ethereum.p2p.config.NetworkingConfiguration;
import org.hyperledger.besu.ethereum.p2p.rlpx.connections.netty.TLSConfiguration; import org.hyperledger.besu.ethereum.p2p.rlpx.connections.netty.TLSConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration; import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.metrics.prometheus.MetricsConfiguration; import org.hyperledger.besu.metrics.prometheus.MetricsConfiguration;
import org.hyperledger.besu.pki.config.PkiKeyStoreConfiguration; import org.hyperledger.besu.pki.config.PkiKeyStoreConfiguration;
import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.genesis.GenesisConfigurationProvider; import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.genesis.GenesisConfigurationProvider;
@ -73,6 +74,8 @@ public class BesuNodeConfigurationBuilder {
private MetricsConfiguration metricsConfiguration = MetricsConfiguration.builder().build(); private MetricsConfiguration metricsConfiguration = MetricsConfiguration.builder().build();
private Optional<PermissioningConfiguration> permissioningConfiguration = Optional.empty(); private Optional<PermissioningConfiguration> permissioningConfiguration = Optional.empty();
private ApiConfiguration apiConfiguration = ImmutableApiConfiguration.builder().build(); private ApiConfiguration apiConfiguration = ImmutableApiConfiguration.builder().build();
private DataStorageConfiguration dataStorageConfiguration =
DataStorageConfiguration.DEFAULT_FOREST_CONFIG;
private String keyFilePath = null; private String keyFilePath = null;
private boolean devMode = true; private boolean devMode = true;
private GenesisConfigurationProvider genesisConfigProvider = ignore -> Optional.empty(); private GenesisConfigurationProvider genesisConfigProvider = ignore -> Optional.empty();
@ -506,6 +509,12 @@ public class BesuNodeConfigurationBuilder {
return this; return this;
} }
public BesuNodeConfigurationBuilder dataStorageConfiguration(
final DataStorageConfiguration dataStorageConfiguration) {
this.dataStorageConfiguration = dataStorageConfiguration;
return this;
}
public BesuNodeConfiguration build() { public BesuNodeConfiguration build() {
return new BesuNodeConfiguration( return new BesuNodeConfiguration(
name, name,
@ -519,6 +528,7 @@ public class BesuNodeConfigurationBuilder {
metricsConfiguration, metricsConfiguration,
permissioningConfiguration, permissioningConfiguration,
apiConfiguration, apiConfiguration,
dataStorageConfiguration,
Optional.ofNullable(keyFilePath), Optional.ofNullable(keyFilePath),
devMode, devMode,
network, network,

@ -49,6 +49,7 @@ import java.nio.file.Paths;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.Set;
import java.util.function.UnaryOperator; import java.util.function.UnaryOperator;
import io.vertx.core.Vertx; import io.vertx.core.Vertx;
@ -72,6 +73,7 @@ public class BesuNodeFactory {
config.getMetricsConfiguration(), config.getMetricsConfiguration(),
config.getPermissioningConfiguration(), config.getPermissioningConfiguration(),
config.getApiConfiguration(), config.getApiConfiguration(),
config.getDataStorageConfiguration(),
config.getKeyFilePath(), config.getKeyFilePath(),
config.isDevMode(), config.isDevMode(),
config.getNetwork(), config.getNetwork(),
@ -375,17 +377,27 @@ public class BesuNodeFactory {
public BesuNode createCliqueNode(final String name, final CliqueOptions cliqueOptions) public BesuNode createCliqueNode(final String name, final CliqueOptions cliqueOptions)
throws IOException { throws IOException {
return createCliqueNodeWithExtraCliOptions(name, cliqueOptions, List.of()); return createCliqueNodeWithExtraCliOptionsAndRpcApis(name, cliqueOptions, List.of());
} }
public BesuNode createCliqueNodeWithExtraCliOptions( public BesuNode createCliqueNodeWithExtraCliOptionsAndRpcApis(
final String name, final CliqueOptions cliqueOptions, final List<String> extraCliOptions) final String name, final CliqueOptions cliqueOptions, final List<String> extraCliOptions)
throws IOException { throws IOException {
return createCliqueNodeWithExtraCliOptionsAndRpcApis(
name, cliqueOptions, extraCliOptions, Set.of());
}
public BesuNode createCliqueNodeWithExtraCliOptionsAndRpcApis(
final String name,
final CliqueOptions cliqueOptions,
final List<String> extraCliOptions,
final Set<String> extraRpcApis)
throws IOException {
return create( return create(
new BesuNodeConfigurationBuilder() new BesuNodeConfigurationBuilder()
.name(name) .name(name)
.miningEnabled() .miningEnabled()
.jsonRpcConfiguration(node.createJsonRpcWithCliqueEnabledConfig()) .jsonRpcConfiguration(node.createJsonRpcWithCliqueEnabledConfig(extraRpcApis))
.webSocketConfiguration(node.createWebSocketEnabledConfig()) .webSocketConfiguration(node.createWebSocketEnabledConfig())
.devMode(false) .devMode(false)
.jsonRpcTxPool() .jsonRpcTxPool()
@ -583,7 +595,7 @@ public class BesuNodeFactory {
new BesuNodeConfigurationBuilder() new BesuNodeConfigurationBuilder()
.name(name) .name(name)
.miningEnabled() .miningEnabled()
.jsonRpcConfiguration(node.createJsonRpcWithCliqueEnabledConfig()) .jsonRpcConfiguration(node.createJsonRpcWithCliqueEnabledConfig(Set.of()))
.webSocketConfiguration(node.createWebSocketEnabledConfig()) .webSocketConfiguration(node.createWebSocketEnabledConfig())
.jsonRpcTxPool() .jsonRpcTxPool()
.devMode(false) .devMode(false)

@ -30,8 +30,10 @@ import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.genesis.Gene
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.Set;
public class NodeConfigurationFactory { public class NodeConfigurationFactory {
@ -44,8 +46,10 @@ public class NodeConfigurationFactory {
return genesisConfigProvider.create(nodes); return genesisConfigProvider.create(nodes);
} }
public JsonRpcConfiguration createJsonRpcWithCliqueEnabledConfig() { public JsonRpcConfiguration createJsonRpcWithCliqueEnabledConfig(final Set<String> extraRpcApis) {
return createJsonRpcWithRpcApiEnabledConfig(CLIQUE.name()); final var enabledApis = new HashSet<>(extraRpcApis);
enabledApis.add(CLIQUE.name());
return createJsonRpcWithRpcApiEnabledConfig(enabledApis.toArray(String[]::new));
} }
public JsonRpcConfiguration createJsonRpcWithIbft2EnabledConfig(final boolean minerEnabled) { public JsonRpcConfiguration createJsonRpcWithIbft2EnabledConfig(final boolean minerEnabled) {

@ -76,6 +76,7 @@ public class PrivacyNode implements AutoCloseable {
private final boolean isFlexiblePrivacyEnabled; private final boolean isFlexiblePrivacyEnabled;
private final boolean isMultitenancyEnabled; private final boolean isMultitenancyEnabled;
private final boolean isPrivacyPluginEnabled; private final boolean isPrivacyPluginEnabled;
private final BesuNodeConfiguration besuConfig;
public PrivacyNode( public PrivacyNode(
final PrivacyNodeConfiguration privacyConfiguration, final PrivacyNodeConfiguration privacyConfiguration,
@ -89,7 +90,7 @@ public class PrivacyNode implements AutoCloseable {
selectEnclave(enclaveType, enclaveDir, config, privacyConfiguration, containerNetwork); selectEnclave(enclaveType, enclaveDir, config, privacyConfiguration, containerNetwork);
this.vertx = vertx; this.vertx = vertx;
final BesuNodeConfiguration besuConfig = config; this.besuConfig = config;
isFlexiblePrivacyEnabled = privacyConfiguration.isFlexiblePrivacyGroupEnabled(); isFlexiblePrivacyEnabled = privacyConfiguration.isFlexiblePrivacyGroupEnabled();
isMultitenancyEnabled = privacyConfiguration.isMultitenancyEnabled(); isMultitenancyEnabled = privacyConfiguration.isMultitenancyEnabled();
@ -108,6 +109,7 @@ public class PrivacyNode implements AutoCloseable {
besuConfig.getMetricsConfiguration(), besuConfig.getMetricsConfiguration(),
besuConfig.getPermissioningConfiguration(), besuConfig.getPermissioningConfiguration(),
besuConfig.getApiConfiguration(), besuConfig.getApiConfiguration(),
besuConfig.getDataStorageConfiguration(),
besuConfig.getKeyFilePath(), besuConfig.getKeyFilePath(),
besuConfig.isDevMode(), besuConfig.isDevMode(),
besuConfig.getNetwork(), besuConfig.getNetwork(),
@ -272,6 +274,8 @@ public class PrivacyNode implements AutoCloseable {
private PrivacyStorageProvider createKeyValueStorageProvider( private PrivacyStorageProvider createKeyValueStorageProvider(
final Path dataLocation, final Path dbLocation) { final Path dataLocation, final Path dbLocation) {
final var besuConfiguration = new BesuConfigurationImpl();
besuConfiguration.init(dataLocation, dbLocation, null, besuConfig.getMiningParameters());
return new PrivacyKeyValueStorageProviderBuilder() return new PrivacyKeyValueStorageProviderBuilder()
.withStorageFactory( .withStorageFactory(
new RocksDBKeyValuePrivacyStorageFactory( new RocksDBKeyValuePrivacyStorageFactory(
@ -284,7 +288,7 @@ public class PrivacyNode implements AutoCloseable {
DEFAULT_IS_HIGH_SPEC), DEFAULT_IS_HIGH_SPEC),
Arrays.asList(KeyValueSegmentIdentifier.values()), Arrays.asList(KeyValueSegmentIdentifier.values()),
RocksDBMetricsFactory.PRIVATE_ROCKS_DB_METRICS))) RocksDBMetricsFactory.PRIVATE_ROCKS_DB_METRICS)))
.withCommonConfiguration(new BesuConfigurationImpl(dataLocation, dbLocation)) .withCommonConfiguration(besuConfiguration)
.withMetricsSystem(new NoOpMetricsSystem()) .withMetricsSystem(new NoOpMetricsSystem())
.build(); .build();
} }

@ -53,6 +53,6 @@ public class WebSocket {
public void verifyTotalEventsReceived(final int expectedTotalEventCount) { public void verifyTotalEventsReceived(final int expectedTotalEventCount) {
WaitUtils.waitFor( WaitUtils.waitFor(
() -> assertThat(connection.getSubscriptionEvents()).hasSize(expectedTotalEventCount)); 60, () -> assertThat(connection.getSubscriptionEvents()).hasSize(expectedTotalEventCount));
} }
} }

@ -20,6 +20,7 @@ import java.lang.reflect.Method;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.web3j.crypto.Credentials; import org.web3j.crypto.Credentials;
@ -83,7 +84,7 @@ public class DeploySmartContractTransaction<T extends Contract> implements Trans
@SuppressWarnings("rawtypes") @SuppressWarnings("rawtypes")
private boolean parameterTypesAreEqual( private boolean parameterTypesAreEqual(
final Class<?>[] expectedTypes, final ArrayList<Object> actualObjects) { final Class<?>[] expectedTypes, final List<Object> actualObjects) {
if (expectedTypes.length != actualObjects.size()) { if (expectedTypes.length != actualObjects.size()) {
return false; return false;
} }

@ -27,10 +27,11 @@ import org.hyperledger.besu.tests.acceptance.dsl.transaction.txpool.TxPoolReques
import java.util.Optional; import java.util.Optional;
import org.web3j.protocol.Web3j; import org.web3j.protocol.Web3j;
import org.web3j.protocol.Web3jService;
import org.web3j.protocol.websocket.WebSocketService; import org.web3j.protocol.websocket.WebSocketService;
public class NodeRequests { public class NodeRequests {
private final Web3jService web3jService;
private final Web3j netEth; private final Web3j netEth;
private final CliqueRequestFactory clique; private final CliqueRequestFactory clique;
private final BftRequestFactory bft; private final BftRequestFactory bft;
@ -44,6 +45,7 @@ public class NodeRequests {
private final TxPoolRequestFactory txPool; private final TxPoolRequestFactory txPool;
public NodeRequests( public NodeRequests(
final Web3jService web3jService,
final Web3j netEth, final Web3j netEth,
final CliqueRequestFactory clique, final CliqueRequestFactory clique,
final BftRequestFactory bft, final BftRequestFactory bft,
@ -55,6 +57,7 @@ public class NodeRequests {
final TxPoolRequestFactory txPool, final TxPoolRequestFactory txPool,
final Optional<WebSocketService> websocketService, final Optional<WebSocketService> websocketService,
final LoginRequestFactory login) { final LoginRequestFactory login) {
this.web3jService = web3jService;
this.netEth = netEth; this.netEth = netEth;
this.clique = clique; this.clique = clique;
this.bft = bft; this.bft = bft;
@ -116,4 +119,8 @@ public class NodeRequests {
netEth.shutdown(); netEth.shutdown();
websocketService.ifPresent(WebSocketService::close); websocketService.ifPresent(WebSocketService::close);
} }
public Web3jService getWeb3jService() {
return web3jService;
}
} }

@ -12,24 +12,19 @@
*/ */
plugins { plugins {
id 'org.web3j' version '4.9.2' id 'org.web3j' version '4.11.1'
id 'org.web3j.solidity' version '0.3.5' id 'org.web3j.solidity' version '0.4.0'
}
configurations.all {
resolutionStrategy.eachDependency { DependencyResolveDetails details ->
if (details.requested.group == 'org.web3j' && details.requested.version == '4.9.2') {
details.useVersion '4.9.4'
details.because 'Plugin version is 4.9.2 (latest), but we want it to use web3j libs version 4.9.4'
}
}
} }
web3j { generatedPackageName = 'org.hyperledger.besu.tests.web3j.generated' } web3j { generatedPackageName = 'org.hyperledger.besu.tests.web3j.generated' }
sourceSets.main.solidity.srcDirs = ["$projectDir/contracts"] sourceSets.main.solidity.srcDirs = ["$projectDir/contracts"]
solidity { resolvePackages = false } solidity {
resolvePackages = false
// TODO: remove the forced version, when DEV network is upgraded to support latest forks
version '0.8.19'
}
dependencies { dependencies {
api 'org.slf4j:slf4j-api' api 'org.slf4j:slf4j-api'
@ -155,6 +150,33 @@ task acceptanceTestMainnet(type: Test) {
doFirst { mkdir "${buildDir}/jvmErrorLogs" } doFirst { mkdir "${buildDir}/jvmErrorLogs" }
} }
task acceptanceTestNotPrivacy(type: Test) {
inputs.property "integration.date", LocalTime.now() // so it runs at every invocation
exclude '**/privacy/**'
useJUnitPlatform {}
dependsOn(rootProject.installDist)
setSystemProperties(test.getSystemProperties())
systemProperty 'acctests.runBesuAsProcess', 'true'
systemProperty 'java.security.properties', "${buildDir}/resources/test/acceptanceTesting.security"
mustRunAfter rootProject.subprojects*.test
description = 'Runs MAINNET Besu acceptance tests (excluding privacy since they run nightly, and are being refactored).'
group = 'verification'
jvmArgs "-XX:ErrorFile=${buildDir}/jvmErrorLogs/java_err_pid%p.log"
testLogging {
exceptionFormat = 'full'
showStackTraces = true
showStandardStreams = Boolean.getBoolean('acctests.showStandardStreams')
showExceptions = true
showCauses = true
}
doFirst { mkdir "${buildDir}/jvmErrorLogs" }
}
task acceptanceTestCliqueBft(type: Test) { task acceptanceTestCliqueBft(type: Test) {
inputs.property "integration.date", LocalTime.now() // so it runs at every invocation inputs.property "integration.date", LocalTime.now() // so it runs at every invocation
include '**/bft/**' include '**/bft/**'

@ -12,7 +12,7 @@
* *
* SPDX-License-Identifier: Apache-2.0 * SPDX-License-Identifier: Apache-2.0
*/ */
pragma solidity >=0.7.0 <0.9.0; pragma solidity >=0.7.0 <0.8.20;
// compile with: // compile with:
// solc SimpleStorage.sol --bin --abi --optimize --overwrite -o . // solc SimpleStorage.sol --bin --abi --optimize --overwrite -o .

@ -150,6 +150,36 @@ public class BftMiningAcceptanceTest extends ParameterizedBftTestBase {
cluster.verify(receiver.balanceEquals(3)); cluster.verify(receiver.balanceEquals(3));
} }
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions")
public void shouldMineOnSingleNodeWithFreeGas_Shanghai(
final String testName, final BftAcceptanceTestParameterization nodeFactory) throws Exception {
setUp(testName, nodeFactory);
final BesuNode minerNode = nodeFactory.createNode(besu, "miner1");
updateGenesisConfigToShanghai(minerNode, true);
cluster.start(minerNode);
cluster.verify(blockchain.reachesHeight(minerNode, 1));
final Account sender = accounts.createAccount("account1");
final Account receiver = accounts.createAccount("account2");
minerNode.execute(accountTransactions.createTransfer(sender, 50, Amount.ZERO));
cluster.verify(sender.balanceEquals(50));
minerNode.execute(accountTransactions.create1559Transfer(sender, 50, 4, Amount.ZERO));
cluster.verify(sender.balanceEquals(100));
minerNode.execute(
accountTransactions.createIncrementalTransfers(sender, receiver, 1, Amount.ZERO));
cluster.verify(receiver.balanceEquals(1));
minerNode.execute(
accountTransactions.create1559IncrementalTransfers(sender, receiver, 2, 4, Amount.ZERO));
cluster.verify(receiver.balanceEquals(3));
}
@ParameterizedTest(name = "{index}: {0}") @ParameterizedTest(name = "{index}: {0}")
@MethodSource("factoryFunctions") @MethodSource("factoryFunctions")
public void shouldMineOnMultipleNodes( public void shouldMineOnMultipleNodes(
@ -245,4 +275,16 @@ public class BftMiningAcceptanceTest extends ParameterizedBftTestBase {
config.put("zeroBaseFee", zeroBaseFeeEnabled); config.put("zeroBaseFee", zeroBaseFeeEnabled);
minerNode.setGenesisConfig(genesisConfigNode.toString()); minerNode.setGenesisConfig(genesisConfigNode.toString());
} }
private static void updateGenesisConfigToShanghai(
final BesuNode minerNode, final boolean zeroBaseFeeEnabled) {
final Optional<String> genesisConfig =
minerNode.getGenesisConfigProvider().create(List.of(minerNode));
final ObjectNode genesisConfigNode = JsonUtil.objectNodeFromString(genesisConfig.orElseThrow());
final ObjectNode config = (ObjectNode) genesisConfigNode.get("config");
config.remove("berlinBlock");
config.put("shanghaiTime", 100);
config.put("zeroBaseFee", zeroBaseFeeEnabled);
minerNode.setGenesisConfig(genesisConfigNode.toString());
}
} }

@ -14,14 +14,23 @@
*/ */
package org.hyperledger.besu.tests.acceptance.clique; package org.hyperledger.besu.tests.acceptance.clique;
import static java.util.stream.Collectors.joining;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
import static org.assertj.core.data.Percentage.withPercentage;
import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5; import org.hyperledger.besu.tests.acceptance.dsl.AcceptanceTestBaseJunit5;
import org.hyperledger.besu.tests.acceptance.dsl.account.Account; import org.hyperledger.besu.tests.acceptance.dsl.account.Account;
import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode; import org.hyperledger.besu.tests.acceptance.dsl.node.BesuNode;
import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.genesis.GenesisConfigurationFactory.CliqueOptions; import org.hyperledger.besu.tests.acceptance.dsl.node.configuration.genesis.GenesisConfigurationFactory.CliqueOptions;
import java.io.IOException; import java.io.IOException;
import java.math.BigInteger;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.web3j.protocol.core.DefaultBlockParameter;
public class CliqueMiningAcceptanceTest extends AcceptanceTestBaseJunit5 { public class CliqueMiningAcceptanceTest extends AcceptanceTestBaseJunit5 {
@ -55,7 +64,7 @@ public class CliqueMiningAcceptanceTest extends AcceptanceTestBaseJunit5 {
} }
@Test @Test
public void shouldMineBlocksOnlyWhenTransactionsArePresentWhenCreateEmptyBlockIsFalse() public void shouldMineBlocksOnlyWhenTransactionsArePresentWhenCreateEmptyBlocksIsFalse()
throws IOException { throws IOException {
final var cliqueOptionsNoEmptyBlocks = final var cliqueOptionsNoEmptyBlocks =
new CliqueOptions( new CliqueOptions(
@ -123,4 +132,173 @@ public class CliqueMiningAcceptanceTest extends AcceptanceTestBaseJunit5 {
cluster.verifyOnActiveNodes(clique.blockIsCreatedByProposer(minerNode1)); cluster.verifyOnActiveNodes(clique.blockIsCreatedByProposer(minerNode1));
cluster.verifyOnActiveNodes(clique.blockIsCreatedByProposer(minerNode2)); cluster.verifyOnActiveNodes(clique.blockIsCreatedByProposer(minerNode2));
} }
@Test
public void shouldMineBlocksAccordingToBlockPeriodTransitions() throws IOException {
final var cliqueOptions = new CliqueOptions(3, CliqueOptions.DEFAULT.epochLength(), true);
final BesuNode minerNode = besu.createCliqueNode("miner1", cliqueOptions);
// setup transitions
final Map<String, Object> decreasePeriodTo2_Transition =
Map.of("block", 3, "blockperiodseconds", 2);
final Map<String, Object> decreasePeriodTo1_Transition =
Map.of("block", 4, "blockperiodseconds", 1);
// ensure previous blockperiodseconds transition is carried over
final Map<String, Object> dummy_Transition = Map.of("block", 5, "createemptyblocks", true);
final Map<String, Object> increasePeriodTo2_Transition =
Map.of("block", 6, "blockperiodseconds", 2);
final Optional<String> initialGenesis =
minerNode.getGenesisConfigProvider().create(List.of(minerNode));
final String genesisWithTransitions =
prependTransitionsToCliqueOptions(
initialGenesis.orElseThrow(),
List.of(
decreasePeriodTo2_Transition,
decreasePeriodTo1_Transition,
dummy_Transition,
increasePeriodTo2_Transition));
minerNode.setGenesisConfig(genesisWithTransitions);
// Mine 6 blocks
cluster.start(minerNode);
minerNode.verify(blockchain.reachesHeight(minerNode, 5));
// Assert the block period decreased/increased after each transition
final long block1Timestamp = getTimestampForBlock(minerNode, 1);
final long block2Timestamp = getTimestampForBlock(minerNode, 2);
final long block3Timestamp = getTimestampForBlock(minerNode, 3);
final long block4Timestamp = getTimestampForBlock(minerNode, 4);
final long block5Timestamp = getTimestampForBlock(minerNode, 5);
final long block6Timestamp = getTimestampForBlock(minerNode, 6);
assertThat(block2Timestamp - block1Timestamp).isCloseTo(3, withPercentage(20));
assertThat(block3Timestamp - block2Timestamp).isCloseTo(2, withPercentage(20));
assertThat(block4Timestamp - block3Timestamp).isCloseTo(1, withPercentage(20));
assertThat(block5Timestamp - block4Timestamp).isCloseTo(1, withPercentage(20));
assertThat(block6Timestamp - block5Timestamp).isCloseTo(2, withPercentage(20));
}
@Test
public void shouldMineBlocksAccordingToCreateEmptyBlocksTransitions() throws IOException {
final var cliqueOptionsEmptyBlocks =
new CliqueOptions(2, CliqueOptions.DEFAULT.epochLength(), true);
final BesuNode minerNode = besu.createCliqueNode("miner1", cliqueOptionsEmptyBlocks);
// setup transitions
final Map<String, Object> noEmptyBlocks_Transition =
Map.of("block", 3, "createemptyblocks", false);
final Map<String, Object> emptyBlocks_Transition =
Map.of("block", 4, "createemptyblocks", true);
final Map<String, Object> secondNoEmptyBlocks_Transition =
Map.of("block", 6, "createemptyblocks", false);
// ensure previous createemptyblocks transition is carried over
final Map<String, Object> dummy_Transition = Map.of("block", 7, "blockperiodseconds", 1);
final Optional<String> initialGenesis =
minerNode.getGenesisConfigProvider().create(List.of(minerNode));
final String genesisWithTransitions =
prependTransitionsToCliqueOptions(
initialGenesis.orElseThrow(),
List.of(
noEmptyBlocks_Transition,
emptyBlocks_Transition,
secondNoEmptyBlocks_Transition,
dummy_Transition));
minerNode.setGenesisConfig(genesisWithTransitions);
final Account sender = accounts.createAccount("account1");
// Mine 2 blocks
cluster.start(minerNode);
minerNode.verify(blockchain.reachesHeight(minerNode, 1));
// tx required to mine block
cluster.verify(clique.noNewBlockCreated(minerNode));
minerNode.execute(accountTransactions.createTransfer(sender, 50));
minerNode.verify(clique.blockIsCreatedByProposer(minerNode));
// Mine 2 more blocks so chain head is 5
minerNode.verify(blockchain.reachesHeight(minerNode, 2));
// tx required to mine block 6
cluster.verify(clique.noNewBlockCreated(minerNode));
minerNode.execute(accountTransactions.createTransfer(sender, 50));
minerNode.verify(clique.blockIsCreatedByProposer(minerNode));
// check createemptyblocks transition carried over when other transition activated...
// tx required to mine block 7
cluster.verify(clique.noNewBlockCreated(minerNode));
}
private long getTimestampForBlock(final BesuNode minerNode, final int blockNumber) {
return minerNode
.execute(
ethTransactions.block(DefaultBlockParameter.valueOf(BigInteger.valueOf(blockNumber))))
.getTimestamp()
.longValue();
}
private String prependTransitionsToCliqueOptions(
final String originalOptions, final List<Map<String, Object>> transitions) {
final StringBuilder stringBuilder =
new StringBuilder()
.append(formatCliqueTransitionsOptions(transitions))
.append(",\n")
.append(quote("clique"))
.append(": {");
return originalOptions.replace(quote("clique") + ": {", stringBuilder.toString());
}
private String formatCliqueTransitionsOptions(final List<Map<String, Object>> transitions) {
final StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(quote("transitions"));
stringBuilder.append(": {\n");
stringBuilder.append(quote("clique"));
stringBuilder.append(": [");
final String formattedTransitions =
transitions.stream().map(this::formatTransition).collect(joining(",\n"));
stringBuilder.append(formattedTransitions);
stringBuilder.append("\n]");
stringBuilder.append("}\n");
return stringBuilder.toString();
}
private String quote(final Object value) {
return '"' + value.toString() + '"';
}
private String formatTransition(final Map<String, Object> transition) {
final StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("{");
String formattedTransition =
transition.keySet().stream()
.map(key -> formatKeyValues(key, transition.get(key)))
.collect(joining(","));
stringBuilder.append(formattedTransition);
stringBuilder.append("}");
return stringBuilder.toString();
}
private String formatKeyValues(final Object... keyOrValue) {
if (keyOrValue.length % 2 == 1) {
// An odd number of strings cannot form a set of key-value pairs
throw new IllegalArgumentException("Must supply key-value pairs");
}
final StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i < keyOrValue.length; i += 2) {
if (i > 0) {
stringBuilder.append(", ");
}
final String key = keyOrValue[i].toString();
final Object value = keyOrValue[i + 1];
final String valueStr = value instanceof String ? quote(value) : value.toString();
stringBuilder.append(String.format("\n%s: %s", quote(key), valueStr));
}
return stringBuilder.toString();
}
} }

@ -44,6 +44,17 @@ public class EthSendRawTransactionAcceptanceTest extends AcceptanceTestBase {
strictNode = besu.createArchiveNode("strictNode", configureNode((true))); strictNode = besu.createArchiveNode("strictNode", configureNode((true)));
miningNode = besu.createMinerNode("strictMiningNode", configureNode((true))); miningNode = besu.createMinerNode("strictMiningNode", configureNode((true)));
cluster.start(lenientNode, strictNode, miningNode); cluster.start(lenientNode, strictNode, miningNode);
// verify nodes are fully connected otherwise tx could not be propagated
lenientNode.verify(net.awaitPeerCount(2));
strictNode.verify(net.awaitPeerCount(2));
miningNode.verify(net.awaitPeerCount(2));
// verify that the miner started producing blocks and all other nodes are syncing from it
waitForBlockHeight(miningNode, 1);
final var minerChainHead = miningNode.execute(ethTransactions.block());
lenientNode.verify(blockchain.minimumHeight(minerChainHead.getNumber().longValue()));
strictNode.verify(blockchain.minimumHeight(minerChainHead.getNumber().longValue()));
} }
@Test @Test
@ -53,6 +64,7 @@ public class EthSendRawTransactionAcceptanceTest extends AcceptanceTestBase {
final String txHash = tx.transactionHash(); final String txHash = tx.transactionHash();
lenientNode.verify(eth.expectSuccessfulEthRawTransaction(rawTx)); lenientNode.verify(eth.expectSuccessfulEthRawTransaction(rawTx));
// Tx should be included on-chain // Tx should be included on-chain
miningNode.verify(eth.expectSuccessfulTransactionReceipt(txHash)); miningNode.verify(eth.expectSuccessfulTransactionReceipt(txHash));
} }

@ -1,48 +0,0 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.tests.acceptance.jsonrpc;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.stream.Stream;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.params.provider.Arguments;
public class ExecutionEngineEip6110AcceptanceTest extends AbstractJsonRpcTest {
private static final String GENESIS_FILE = "/jsonrpc/engine/eip6110/genesis.json";
private static final String TEST_CASE_PATH = "/jsonrpc/engine/eip6110/test-cases/";
private static JsonRpcTestsContext testsContext;
public ExecutionEngineEip6110AcceptanceTest() {
super(testsContext);
}
@BeforeAll
public static void init() throws IOException {
testsContext = new JsonRpcTestsContext(GENESIS_FILE);
}
public static Stream<Arguments> testCases() throws URISyntaxException {
return testCasesFromPath(TEST_CASE_PATH);
}
@AfterAll
public static void tearDown() {
testsContext.cluster.close();
}
}

@ -47,10 +47,7 @@ public class NodeSmartContractPermissioningV2AcceptanceTest
permissionedNode.execute(allowNode(permissionedNode)); permissionedNode.execute(allowNode(permissionedNode));
permissionedNode.verify(connectionIsAllowed(permissionedNode)); permissionedNode.verify(connectionIsAllowed(permissionedNode));
allowedNode.verify(eth.syncingStatus(false)); verifyAllNodesHaveFinishedSyncing();
bootnode.verify(eth.syncingStatus(false));
permissionedNode.verify(eth.syncingStatus(false));
forbiddenNode.verify(eth.syncingStatus(false));
} }
@Test @Test
@ -92,6 +89,8 @@ public class NodeSmartContractPermissioningV2AcceptanceTest
permissionedNode.verify(admin.addPeer(allowedNode)); permissionedNode.verify(admin.addPeer(allowedNode));
permissionedNode.verify(net.awaitPeerCount(2)); permissionedNode.verify(net.awaitPeerCount(2));
verifyAllNodesHaveFinishedSyncing();
// permissioning changes in peer should propagate to permissioned node // permissioning changes in peer should propagate to permissioned node
allowedNode.execute(allowNode(forbiddenNode)); allowedNode.execute(allowNode(forbiddenNode));
allowedNode.verify(connectionIsAllowed(forbiddenNode)); allowedNode.verify(connectionIsAllowed(forbiddenNode));
@ -101,6 +100,13 @@ public class NodeSmartContractPermissioningV2AcceptanceTest
permissionedNode.verify(net.awaitPeerCount(3)); permissionedNode.verify(net.awaitPeerCount(3));
} }
private void verifyAllNodesHaveFinishedSyncing() {
allowedNode.verify(eth.syncingStatus(false));
bootnode.verify(eth.syncingStatus(false));
permissionedNode.verify(eth.syncingStatus(false));
forbiddenNode.verify(eth.syncingStatus(false));
}
@Test @Test
public void onchainPermissioningAllowlistShouldPersistAcrossRestarts() { public void onchainPermissioningAllowlistShouldPersistAcrossRestarts() {
permissionedCluster.stop(); permissionedCluster.stop();

@ -37,22 +37,13 @@ public class PermissioningPluginTest extends AcceptanceTestBaseJunit5 {
@BeforeEach @BeforeEach
public void setUp() throws Exception { public void setUp() throws Exception {
final BesuNodeConfigurationBuilder builder = minerNode = besu.create(createNodeBuilder().name("miner").build());
new BesuNodeConfigurationBuilder()
.miningEnabled(false)
.plugins(List.of("testPlugins"))
.extraCLIOptions(List.of("--plugin-permissioning-test-enabled=true"))
.jsonRpcEnabled()
.jsonRpcTxPool()
.jsonRpcAdmin();
minerNode = besu.create(builder.name("miner").build()); aliceNode = besu.create(createNodeBuilder().name("alice").keyFilePath("key").build());
aliceNode = besu.create(builder.name("alice").keyFilePath("key").build()); bobNode = besu.create(createNodeBuilder().name("bob").keyFilePath("key1").build());
bobNode = besu.create(builder.name("bob").keyFilePath("key1").build()); charlieNode = besu.create(createNodeBuilder().name("charlie").keyFilePath("key2").build());
charlieNode = besu.create(builder.name("charlie").keyFilePath("key2").build());
cluster.start(minerNode, charlieNode); cluster.start(minerNode, charlieNode);
@ -63,6 +54,16 @@ public class PermissioningPluginTest extends AcceptanceTestBaseJunit5 {
bobNode.awaitPeerDiscovery(net.awaitPeerCount(2)); bobNode.awaitPeerDiscovery(net.awaitPeerCount(2));
} }
private BesuNodeConfigurationBuilder createNodeBuilder() {
return new BesuNodeConfigurationBuilder()
.miningEnabled(false)
.plugins(List.of("testPlugins"))
.extraCLIOptions(List.of("--plugin-permissioning-test-enabled=true"))
.jsonRpcEnabled()
.jsonRpcTxPool()
.jsonRpcAdmin();
}
@Test @Test
public void blockedConnectionNodeCanOnlyConnectToTransactionNode() { public void blockedConnectionNodeCanOnlyConnectToTransactionNode() {
minerNode.verify(admin.hasPeer(aliceNode)); minerNode.verify(admin.hasPeer(aliceNode));

@ -187,6 +187,10 @@ public class PrivacyClusterAcceptanceTest extends PrivacyAcceptanceTestBase {
EnclaveEncryptorType.EC.equals(enclaveEncryptorType) EnclaveEncryptorType.EC.equals(enclaveEncryptorType)
? "0x3e5d325a03ad3ce5640502219833d30b89ce3ce1" ? "0x3e5d325a03ad3ce5640502219833d30b89ce3ce1"
: "0xebf56429e6500e84442467292183d4d621359838"; : "0xebf56429e6500e84442467292183d4d621359838";
final String receiptPrivacyGroupId =
EnclaveEncryptorType.EC.equals(enclaveEncryptorType)
? "MjuFB4b9Hz+f8zvkWWasxZWRjHWXU4t7B2nOHo4mekA="
: "DyAOiF/ynpc+JXa2YAGB0bCitSlOMNm+ShmB/7M6C4w=";
final RawPrivateTransaction rawPrivateTransaction = final RawPrivateTransaction rawPrivateTransaction =
RawPrivateTransaction.createContractTransaction( RawPrivateTransaction.createContractTransaction(
@ -196,6 +200,7 @@ public class PrivacyClusterAcceptanceTest extends PrivacyAcceptanceTestBase {
Numeric.prependHexPrefix(EventEmitter.BINARY), Numeric.prependHexPrefix(EventEmitter.BINARY),
Base64String.wrap(alice.getEnclaveKey()), Base64String.wrap(alice.getEnclaveKey()),
Collections.singletonList(Base64String.wrap(bob.getEnclaveKey())), Collections.singletonList(Base64String.wrap(bob.getEnclaveKey())),
Base64String.wrap(receiptPrivacyGroupId),
RESTRICTED); RESTRICTED);
final String signedPrivateTransaction = final String signedPrivateTransaction =
@ -243,10 +248,6 @@ public class PrivacyClusterAcceptanceTest extends PrivacyAcceptanceTestBase {
"MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEXIgZqRA25V+3nN+Do6b5r0jiUunub6ubjPhqwHpPxP44uUYh9RKCQNRnsqCJ9PjeTnC8R3ieJk7HWAlycU1bug==")) "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEXIgZqRA25V+3nN+Do6b5r0jiUunub6ubjPhqwHpPxP44uUYh9RKCQNRnsqCJ9PjeTnC8R3ieJk7HWAlycU1bug=="))
: new ArrayList<>( : new ArrayList<>(
Collections.singletonList("Ko2bVqD+nNlNYL5EE7y3IdOnviftjiizpjRt+HTuFBs=")); Collections.singletonList("Ko2bVqD+nNlNYL5EE7y3IdOnviftjiizpjRt+HTuFBs="));
final String receiptPrivacyGroupId =
EnclaveEncryptorType.EC.equals(enclaveEncryptorType)
? "MjuFB4b9Hz+f8zvkWWasxZWRjHWXU4t7B2nOHo4mekA="
: "DyAOiF/ynpc+JXa2YAGB0bCitSlOMNm+ShmB/7M6C4w=";
final PrivateTransactionReceipt expectedReceipt = final PrivateTransactionReceipt expectedReceipt =
new PrivateTransactionReceipt( new PrivateTransactionReceipt(

@ -41,9 +41,19 @@ public class NewPendingTransactionAcceptanceTest extends AcceptanceTestBase {
minerNode = besu.createMinerNode("miner-node1"); minerNode = besu.createMinerNode("miner-node1");
archiveNode = besu.createArchiveNode("full-node1"); archiveNode = besu.createArchiveNode("full-node1");
cluster.start(minerNode, archiveNode); cluster.start(minerNode, archiveNode);
// verify nodes are fully connected otherwise tx could not be propagated
minerNode.verify(net.awaitPeerCount(1));
archiveNode.verify(net.awaitPeerCount(1));
accountOne = accounts.createAccount("account-one"); accountOne = accounts.createAccount("account-one");
minerWebSocket = new WebSocket(vertx, minerNode.getConfiguration()); minerWebSocket = new WebSocket(vertx, minerNode.getConfiguration());
archiveWebSocket = new WebSocket(vertx, archiveNode.getConfiguration()); archiveWebSocket = new WebSocket(vertx, archiveNode.getConfiguration());
// verify that the miner started producing blocks and all other nodes are syncing from it
waitForBlockHeight(minerNode, 1);
final var minerChainHead = minerNode.execute(ethTransactions.block());
archiveNode.verify(blockchain.minimumHeight(minerChainHead.getNumber().longValue()));
} }
@AfterEach @AfterEach

File diff suppressed because one or more lines are too long

@ -1,34 +0,0 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_forkchoiceUpdatedV3",
"params": [
{
"headBlockHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"safeBlockHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"finalizedBlockHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
},
{
"timestamp": "0x10",
"prevRandao": "0x0000000000000000000000000000000000000000000000000000000000000000",
"suggestedFeeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"withdrawals": [],
"parentBeaconBlockRoot": "0x0000000000000000000000000000000000000000000000000000000000000000"
}
],
"id": 67
},
"response": {
"jsonrpc": "2.0",
"id": 67,
"result": {
"payloadStatus": {
"status": "VALID",
"latestValidHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"validationError": null
},
"payloadId": "0x282643d459a6f711"
}
},
"statusCode": 200
}

@ -1,44 +0,0 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_getPayloadV3",
"params": [
"0x282643d459a6f711"
],
"id": 67
},
"response": {
"jsonrpc": "2.0",
"id": 67,
"result": {
"executionPayload": {
"parentHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x9b8c4a9a86cb49252075c0db2f0e72fb1e49350a0f70ea36f26f700201961e62",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"prevRandao": "0x0000000000000000000000000000000000000000000000000000000000000000",
"gasLimit": "0x1c9c380",
"gasUsed": "0x0",
"timestamp": "0x10",
"extraData": "0x",
"baseFeePerGas": "0x7",
"excessBlobGas": "0x0",
"parentBeaconBlockRoot": "0x0000000000000000000000000000000000000000000000000000000000000000",
"transactions": [],
"withdrawals": [],
"blockNumber": "0x1",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
"blobGasUsed": "0x0",
"blockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315"
},
"blockValue": "0x0",
"blobsBundle": {
"commitments": [],
"proofs": [],
"blobs": []
},
"shouldOverrideBuilder": false
}
},
"statusCode": 200
}

@ -1,40 +0,0 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_newPayloadV3",
"params": [
{
"parentHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x9b8c4a9a86cb49252075c0db2f0e72fb1e49350a0f70ea36f26f700201961e62",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"prevRandao": "0x0000000000000000000000000000000000000000000000000000000000000000",
"gasLimit": "0x1c9c380",
"gasUsed": "0x0",
"timestamp": "0x10",
"extraData": "0x",
"baseFeePerGas": "0x7",
"transactions": [],
"withdrawals": [],
"blockNumber": "0x1",
"blockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
"excessBlobGas": "0x0",
"blobGasUsed": "0x0"
},
[],
"0x0000000000000000000000000000000000000000000000000000000000000000"
],
"id": 67
},
"response": {
"jsonrpc": "2.0",
"id": 67,
"result": {
"status": "VALID",
"latestValidHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"validationError": null
}
},
"statusCode": 200
}

@ -1,28 +0,0 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_forkchoiceUpdatedV3",
"params": [
{
"headBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"safeBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"finalizedBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315"
},
null
],
"id": 67
},
"response": {
"jsonrpc": "2.0",
"id": 67,
"result": {
"payloadStatus": {
"status": "VALID",
"latestValidHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"validationError": null
},
"payloadId": null
}
},
"statusCode": 200
}

@ -1,34 +0,0 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_forkchoiceUpdatedV3",
"params": [
{
"headBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"safeBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"finalizedBlockHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
},
{
"timestamp": "0x20",
"prevRandao": "0x0000000000000000000000000000000000000000000000000000000000000000",
"suggestedFeeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"withdrawals": [],
"parentBeaconBlockRoot": "0x0000000000000000000000000000000000000000000000000000000000000000"
}
],
"id": 67
},
"response": {
"jsonrpc": "2.0",
"id": 67,
"result": {
"payloadStatus": {
"status": "VALID",
"latestValidHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"validationError": null
},
"payloadId": "0x282643b909febddf"
}
},
"statusCode": 200
}

@ -1,45 +0,0 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_getPayloadV6110",
"params": [
"0x282643b909febddf"
],
"id": 67
},
"response": {
"jsonrpc": "2.0",
"id": 67,
"result": {
"executionPayload": {
"parentHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x9b8c4a9a86cb49252075c0db2f0e72fb1e49350a0f70ea36f26f700201961e62",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"prevRandao": "0x0000000000000000000000000000000000000000000000000000000000000000",
"gasLimit": "0x1c9c380",
"gasUsed": "0x0",
"timestamp": "0x20",
"extraData": "0x",
"baseFeePerGas": "0x7",
"excessBlobGas": "0x0",
"parentBeaconBlockRoot": "0x0000000000000000000000000000000000000000000000000000000000000000",
"transactions": [],
"withdrawals": [],
"depositReceipts": [],
"blockNumber": "0x2",
"blockHash": "0xc8255831601171a628ef17f6601d3d1d30ff9b382e77592ed1af32354f6dafbb",
"blobGasUsed": "0x0",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"
},
"blockValue": "0x0",
"blobsBundle": {
"commitments": [],
"proofs": [],
"blobs": []
},
"shouldOverrideBuilder": false
}
},
"statusCode": 200
}

@ -1,14 +0,0 @@
{
"request": {
"jsonrpc": "2.0",
"method": "eth_sendRawTransaction",
"params": ["0x02f9021c8217de808459682f008459682f0e830271009442424242424242424242424242424242424242428901bc16d674ec800000b901a422895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000120749715de5d1226545c6b3790f515d551a5cc5bf1d49c87a696860554d2fc4f14000000000000000000000000000000000000000000000000000000000000003096a96086cff07df17668f35f7418ef8798079167e3f4f9b72ecde17b28226137cf454ab1dd20ef5d924786ab3483c2f9000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020003f5102dabe0a27b1746098d1dc17a5d3fbd478759fea9287e4e419b3c3cef20000000000000000000000000000000000000000000000000000000000000060b1acdb2c4d3df3f1b8d3bfd33421660df358d84d78d16c4603551935f4b67643373e7eb63dcb16ec359be0ec41fee33b03a16e80745f2374ff1d3c352508ac5d857c6476d3c3bcf7e6ca37427c9209f17be3af5264c0e2132b3dd1156c28b4e9c080a09f597089338d7f44f5c59f8230bb38f243849228a8d4e9d2e2956e6050f5b2c7a076486996c7e62802b8f95eee114783e4b403fd11093ba96286ff42c595f24452"],
"id": 67
},
"response": {
"jsonrpc": "2.0",
"id": 67,
"result": "0x8ff1a50169f52f14cc1cf0300ec037c054a9b99df462e6372c7ca655bf1f00cd"
},
"statusCode": 200
}

@ -1,41 +0,0 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_newPayloadV6110",
"params": [
{
"parentHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x9b8c4a9a86cb49252075c0db2f0e72fb1e49350a0f70ea36f26f700201961e62",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"prevRandao": "0x0000000000000000000000000000000000000000000000000000000000000000",
"gasLimit": "0x1c9c380",
"gasUsed": "0x0",
"timestamp": "0x20",
"extraData": "0x",
"baseFeePerGas": "0x7",
"excessBlobGas": "0x0",
"transactions": [],
"withdrawals": [],
"depositReceipts" : null,
"blockNumber": "0x2",
"blockHash": "0xf6c3f1180ba58d6ea4c69c9328c7afb1fda41df06c368741c1f8310567879de7",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
"blobGasUsed": "0x0"
},
[],
"0x0000000000000000000000000000000000000000000000000000000000000000"
],
"id": 67
},
"response": {
"jsonrpc": "2.0",
"id": 67,
"error": {
"code": -32602,
"message": "Invalid params",
"data" : "Missing deposit field"
}
},
"statusCode": 200
}

@ -1,45 +0,0 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_newPayloadV6110",
"params": [
{
"parentHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x14208ac0e218167936e220b72d5d5887a963cb858ea2f2d268518f014a3da3fa",
"logsBloom": "0x10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000",
"prevRandao": "0x0000000000000000000000000000000000000000000000000000000000000000",
"gasLimit": "0x1c9c380",
"gasUsed": "0x14B6E",
"timestamp": "0x20",
"extraData": "0x",
"baseFeePerGas": "0x7",
"excessBlobGas": "0x0",
"transactions": [
"0x02f9021c8217de808459682f008459682f0e830271009442424242424242424242424242424242424242428901bc16d674ec800000b901a422895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000120749715de5d1226545c6b3790f515d551a5cc5bf1d49c87a696860554d2fc4f14000000000000000000000000000000000000000000000000000000000000003096a96086cff07df17668f35f7418ef8798079167e3f4f9b72ecde17b28226137cf454ab1dd20ef5d924786ab3483c2f9000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020003f5102dabe0a27b1746098d1dc17a5d3fbd478759fea9287e4e419b3c3cef20000000000000000000000000000000000000000000000000000000000000060b1acdb2c4d3df3f1b8d3bfd33421660df358d84d78d16c4603551935f4b67643373e7eb63dcb16ec359be0ec41fee33b03a16e80745f2374ff1d3c352508ac5d857c6476d3c3bcf7e6ca37427c9209f17be3af5264c0e2132b3dd1156c28b4e9c080a09f597089338d7f44f5c59f8230bb38f243849228a8d4e9d2e2956e6050f5b2c7a076486996c7e62802b8f95eee114783e4b403fd11093ba96286ff42c595f24452"
],
"withdrawals": [],
"depositReceipts" : [
{"amount":"0x773594000","index":"0x0","pubkey":"0x96a96086cff07df17668f35f7418ef8798079167e3f4f9b72ecde17b28226137cf454ab1dd20ef5d924786ab3483c2f9","signature":"0xb1acdb2c4d3df3f1b8d3bfd33421660df358d84d78d16c4603551935f4b67643373e7eb63dcb16ec359be0ec41fee33b03a16e80745f2374ff1d3c352508ac5d857c6476d3c3bcf7e6ca37427c9209f17be3af5264c0e2132b3dd1156c28b4e9","withdrawalCredentials":"0x003f5102dabe0a27b1746098d1dc17a5d3fbd478759fea9287e4e419b3c3cef2"}
],
"blockNumber": "0x2",
"blockHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"receiptsRoot": "0x79ee3424eb720a3ad4b1c5a372bb8160580cbe4d893778660f34213c685627a9",
"blobGasUsed": "0x0"
},
[],
"0x0000000000000000000000000000000000000000000000000000000000000000"
],
"id": 67
},
"response": {
"jsonrpc": "2.0",
"id": 67,
"result": {
"status": "VALID",
"latestValidHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"validationError": null
}
},
"statusCode": 200
}

@ -1,34 +0,0 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_forkchoiceUpdatedV3",
"params": [
{
"headBlockHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"safeBlockHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"finalizedBlockHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
},
{
"timestamp": "0x30",
"prevRandao": "0x0000000000000000000000000000000000000000000000000000000000000000",
"suggestedFeeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"withdrawals": [],
"parentBeaconBlockRoot": "0x0000000000000000000000000000000000000000000000000000000000000000"
}
],
"id": 67
},
"response": {
"jsonrpc": "2.0",
"id": 67,
"result": {
"payloadStatus": {
"status": "VALID",
"latestValidHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"validationError": null
},
"payloadId": "0x282643db882670cf"
}
},
"statusCode" : 200
}

@ -1,45 +0,0 @@
{
"request": {
"jsonrpc": "2.0",
"method": "engine_getPayloadV6110",
"params": [
"0x282643db882670cf"
],
"id": 67
},
"response": {
"jsonrpc": "2.0",
"id": 67,
"result": {
"executionPayload": {
"parentHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x14208ac0e218167936e220b72d5d5887a963cb858ea2f2d268518f014a3da3fa",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"prevRandao": "0x0000000000000000000000000000000000000000000000000000000000000000",
"gasLimit": "0x1c9c380",
"gasUsed": "0x0",
"timestamp": "0x30",
"extraData": "0x",
"baseFeePerGas": "0x7",
"excessBlobGas": "0x0",
"parentBeaconBlockRoot": "0x0000000000000000000000000000000000000000000000000000000000000000",
"transactions": [],
"withdrawals": [],
"depositReceipts": [],
"blockNumber": "0x3",
"blockHash": "0xf1e7093b5d229885caab11a3acb95412af80f9077b742020a8014cf81c8c75f2",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
"blobGasUsed": "0x0"
},
"blockValue": "0x0",
"blobsBundle": {
"commitments": [],
"proofs": [],
"blobs": []
},
"shouldOverrideBuilder": false
}
},
"statusCode": 200
}

@ -28,6 +28,8 @@ jar {
} }
dependencies { dependencies {
api project(':datatypes')
api 'org.slf4j:slf4j-api' api 'org.slf4j:slf4j-api'
implementation project(':config') implementation project(':config')

@ -133,6 +133,7 @@ import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
@ -800,7 +801,7 @@ public class RunnerBuilder {
metricsSystem, metricsSystem,
supportedCapabilities, supportedCapabilities,
jsonRpcConfiguration.getRpcApis().stream() jsonRpcConfiguration.getRpcApis().stream()
.filter(apiGroup -> !apiGroup.toLowerCase().startsWith("engine")) .filter(apiGroup -> !apiGroup.toLowerCase(Locale.ROOT).startsWith("engine"))
.collect(Collectors.toList()), .collect(Collectors.toList()),
filterManager, filterManager,
accountLocalConfigPermissioningController, accountLocalConfigPermissioningController,
@ -938,7 +939,7 @@ public class RunnerBuilder {
metricsSystem, metricsSystem,
supportedCapabilities, supportedCapabilities,
webSocketConfiguration.getRpcApis().stream() webSocketConfiguration.getRpcApis().stream()
.filter(apiGroup -> !apiGroup.toLowerCase().startsWith("engine")) .filter(apiGroup -> !apiGroup.toLowerCase(Locale.ROOT).startsWith("engine"))
.collect(Collectors.toList()), .collect(Collectors.toList()),
filterManager, filterManager,
accountLocalConfigPermissioningController, accountLocalConfigPermissioningController,
@ -1021,7 +1022,7 @@ public class RunnerBuilder {
metricsSystem, metricsSystem,
supportedCapabilities, supportedCapabilities,
jsonRpcIpcConfiguration.getEnabledApis().stream() jsonRpcIpcConfiguration.getEnabledApis().stream()
.filter(apiGroup -> !apiGroup.toLowerCase().startsWith("engine")) .filter(apiGroup -> !apiGroup.toLowerCase(Locale.ROOT).startsWith("engine"))
.collect(Collectors.toList()), .collect(Collectors.toList()),
filterManager, filterManager,
accountLocalConfigPermissioningController, accountLocalConfigPermissioningController,

@ -21,6 +21,7 @@ import static java.util.Arrays.asList;
import static java.util.Collections.singletonList; import static java.util.Collections.singletonList;
import static org.hyperledger.besu.cli.DefaultCommandValues.getDefaultBesuDataPath; import static org.hyperledger.besu.cli.DefaultCommandValues.getDefaultBesuDataPath;
import static org.hyperledger.besu.cli.config.NetworkName.MAINNET; import static org.hyperledger.besu.cli.config.NetworkName.MAINNET;
import static org.hyperledger.besu.cli.options.unstable.NetworkingOptions.PEER_LOWER_BOUND_FLAG;
import static org.hyperledger.besu.cli.util.CommandLineUtils.DEPENDENCY_WARNING_MSG; import static org.hyperledger.besu.cli.util.CommandLineUtils.DEPENDENCY_WARNING_MSG;
import static org.hyperledger.besu.cli.util.CommandLineUtils.isOptionSet; import static org.hyperledger.besu.cli.util.CommandLineUtils.isOptionSet;
import static org.hyperledger.besu.controller.BesuController.DATABASE_PATH; import static org.hyperledger.besu.controller.BesuController.DATABASE_PATH;
@ -48,6 +49,7 @@ import org.hyperledger.besu.cli.error.BesuExecutionExceptionHandler;
import org.hyperledger.besu.cli.error.BesuParameterExceptionHandler; import org.hyperledger.besu.cli.error.BesuParameterExceptionHandler;
import org.hyperledger.besu.cli.options.MiningOptions; import org.hyperledger.besu.cli.options.MiningOptions;
import org.hyperledger.besu.cli.options.TransactionPoolOptions; import org.hyperledger.besu.cli.options.TransactionPoolOptions;
import org.hyperledger.besu.cli.options.stable.ApiConfigurationOptions;
import org.hyperledger.besu.cli.options.stable.DataStorageOptions; import org.hyperledger.besu.cli.options.stable.DataStorageOptions;
import org.hyperledger.besu.cli.options.stable.EthstatsOptions; import org.hyperledger.besu.cli.options.stable.EthstatsOptions;
import org.hyperledger.besu.cli.options.stable.GraphQlOptions; import org.hyperledger.besu.cli.options.stable.GraphQlOptions;
@ -107,7 +109,6 @@ import org.hyperledger.besu.datatypes.Wei;
import org.hyperledger.besu.enclave.EnclaveFactory; import org.hyperledger.besu.enclave.EnclaveFactory;
import org.hyperledger.besu.ethereum.GasLimitCalculator; import org.hyperledger.besu.ethereum.GasLimitCalculator;
import org.hyperledger.besu.ethereum.api.ApiConfiguration; import org.hyperledger.besu.ethereum.api.ApiConfiguration;
import org.hyperledger.besu.ethereum.api.ImmutableApiConfiguration;
import org.hyperledger.besu.ethereum.api.graphql.GraphQLConfiguration; import org.hyperledger.besu.ethereum.api.graphql.GraphQLConfiguration;
import org.hyperledger.besu.ethereum.api.jsonrpc.JsonRpcConfiguration; import org.hyperledger.besu.ethereum.api.jsonrpc.JsonRpcConfiguration;
import org.hyperledger.besu.ethereum.api.jsonrpc.RpcApis; import org.hyperledger.besu.ethereum.api.jsonrpc.RpcApis;
@ -117,7 +118,9 @@ import org.hyperledger.besu.ethereum.api.jsonrpc.websocket.WebSocketConfiguratio
import org.hyperledger.besu.ethereum.api.query.BlockchainQueries; import org.hyperledger.besu.ethereum.api.query.BlockchainQueries;
import org.hyperledger.besu.ethereum.chain.Blockchain; import org.hyperledger.besu.ethereum.chain.Blockchain;
import org.hyperledger.besu.ethereum.core.MiningParameters; import org.hyperledger.besu.ethereum.core.MiningParameters;
import org.hyperledger.besu.ethereum.core.MiningParametersMetrics;
import org.hyperledger.besu.ethereum.core.PrivacyParameters; import org.hyperledger.besu.ethereum.core.PrivacyParameters;
import org.hyperledger.besu.ethereum.core.VersionMetadata;
import org.hyperledger.besu.ethereum.eth.sync.SyncMode; import org.hyperledger.besu.ethereum.eth.sync.SyncMode;
import org.hyperledger.besu.ethereum.eth.sync.SynchronizerConfiguration; import org.hyperledger.besu.ethereum.eth.sync.SynchronizerConfiguration;
import org.hyperledger.besu.ethereum.eth.transactions.ImmutableTransactionPoolConfiguration; import org.hyperledger.besu.ethereum.eth.transactions.ImmutableTransactionPoolConfiguration;
@ -136,8 +139,8 @@ import org.hyperledger.besu.ethereum.storage.StorageProvider;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier; import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueStorageProvider; import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueStorageProvider;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueStorageProviderBuilder; import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueStorageProviderBuilder;
import org.hyperledger.besu.ethereum.trie.forest.pruner.PrunerConfiguration; import org.hyperledger.besu.ethereum.transaction.TransactionSimulator;
import org.hyperledger.besu.ethereum.worldstate.DataStorageFormat; import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.evm.precompile.AbstractAltBnPrecompiledContract; import org.hyperledger.besu.evm.precompile.AbstractAltBnPrecompiledContract;
import org.hyperledger.besu.evm.precompile.BigIntegerModularExponentiationPrecompiledContract; import org.hyperledger.besu.evm.precompile.BigIntegerModularExponentiationPrecompiledContract;
import org.hyperledger.besu.evm.precompile.KZGPointEvalPrecompiledContract; import org.hyperledger.besu.evm.precompile.KZGPointEvalPrecompiledContract;
@ -157,33 +160,35 @@ import org.hyperledger.besu.plugin.services.BlockchainService;
import org.hyperledger.besu.plugin.services.MetricsSystem; import org.hyperledger.besu.plugin.services.MetricsSystem;
import org.hyperledger.besu.plugin.services.PermissioningService; import org.hyperledger.besu.plugin.services.PermissioningService;
import org.hyperledger.besu.plugin.services.PicoCLIOptions; import org.hyperledger.besu.plugin.services.PicoCLIOptions;
import org.hyperledger.besu.plugin.services.PluginTransactionValidatorService;
import org.hyperledger.besu.plugin.services.PrivacyPluginService; import org.hyperledger.besu.plugin.services.PrivacyPluginService;
import org.hyperledger.besu.plugin.services.RpcEndpointService; import org.hyperledger.besu.plugin.services.RpcEndpointService;
import org.hyperledger.besu.plugin.services.SecurityModuleService; import org.hyperledger.besu.plugin.services.SecurityModuleService;
import org.hyperledger.besu.plugin.services.StorageService; import org.hyperledger.besu.plugin.services.StorageService;
import org.hyperledger.besu.plugin.services.TraceService; import org.hyperledger.besu.plugin.services.TraceService;
import org.hyperledger.besu.plugin.services.TransactionPoolValidatorService;
import org.hyperledger.besu.plugin.services.TransactionSelectionService; import org.hyperledger.besu.plugin.services.TransactionSelectionService;
import org.hyperledger.besu.plugin.services.TransactionSimulationService;
import org.hyperledger.besu.plugin.services.exception.StorageException; import org.hyperledger.besu.plugin.services.exception.StorageException;
import org.hyperledger.besu.plugin.services.metrics.MetricCategory; import org.hyperledger.besu.plugin.services.metrics.MetricCategory;
import org.hyperledger.besu.plugin.services.metrics.MetricCategoryRegistry; import org.hyperledger.besu.plugin.services.metrics.MetricCategoryRegistry;
import org.hyperledger.besu.plugin.services.securitymodule.SecurityModule; import org.hyperledger.besu.plugin.services.securitymodule.SecurityModule;
import org.hyperledger.besu.plugin.services.storage.DataStorageFormat;
import org.hyperledger.besu.plugin.services.storage.PrivacyKeyValueStorageFactory; import org.hyperledger.besu.plugin.services.storage.PrivacyKeyValueStorageFactory;
import org.hyperledger.besu.plugin.services.storage.rocksdb.RocksDBPlugin; import org.hyperledger.besu.plugin.services.storage.rocksdb.RocksDBPlugin;
import org.hyperledger.besu.plugin.services.txselection.PluginTransactionSelectorFactory; import org.hyperledger.besu.services.BesuConfigurationImpl;
import org.hyperledger.besu.plugin.services.txvalidator.PluginTransactionValidatorFactory;
import org.hyperledger.besu.services.BesuEventsImpl; import org.hyperledger.besu.services.BesuEventsImpl;
import org.hyperledger.besu.services.BesuPluginContextImpl; import org.hyperledger.besu.services.BesuPluginContextImpl;
import org.hyperledger.besu.services.BlockchainServiceImpl; import org.hyperledger.besu.services.BlockchainServiceImpl;
import org.hyperledger.besu.services.PermissioningServiceImpl; import org.hyperledger.besu.services.PermissioningServiceImpl;
import org.hyperledger.besu.services.PicoCLIOptionsImpl; import org.hyperledger.besu.services.PicoCLIOptionsImpl;
import org.hyperledger.besu.services.PluginTransactionValidatorServiceImpl;
import org.hyperledger.besu.services.PrivacyPluginServiceImpl; import org.hyperledger.besu.services.PrivacyPluginServiceImpl;
import org.hyperledger.besu.services.RpcEndpointServiceImpl; import org.hyperledger.besu.services.RpcEndpointServiceImpl;
import org.hyperledger.besu.services.SecurityModuleServiceImpl; import org.hyperledger.besu.services.SecurityModuleServiceImpl;
import org.hyperledger.besu.services.StorageServiceImpl; import org.hyperledger.besu.services.StorageServiceImpl;
import org.hyperledger.besu.services.TraceServiceImpl; import org.hyperledger.besu.services.TraceServiceImpl;
import org.hyperledger.besu.services.TransactionPoolValidatorServiceImpl;
import org.hyperledger.besu.services.TransactionSelectionServiceImpl; import org.hyperledger.besu.services.TransactionSelectionServiceImpl;
import org.hyperledger.besu.services.TransactionSimulationServiceImpl;
import org.hyperledger.besu.services.kvstore.InMemoryStoragePlugin; import org.hyperledger.besu.services.kvstore.InMemoryStoragePlugin;
import org.hyperledger.besu.util.InvalidConfigurationException; import org.hyperledger.besu.util.InvalidConfigurationException;
import org.hyperledger.besu.util.LogConfigurator; import org.hyperledger.besu.util.LogConfigurator;
@ -231,7 +236,6 @@ import io.vertx.core.json.DecodeException;
import io.vertx.core.metrics.MetricsOptions; import io.vertx.core.metrics.MetricsOptions;
import org.apache.tuweni.bytes.Bytes; import org.apache.tuweni.bytes.Bytes;
import org.apache.tuweni.units.bigints.UInt256; import org.apache.tuweni.units.bigints.UInt256;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger; import org.slf4j.Logger;
import picocli.AutoComplete; import picocli.AutoComplete;
import picocli.CommandLine; import picocli.CommandLine;
@ -254,8 +258,12 @@ import picocli.CommandLine.ParameterException;
synopsisHeading = "%n", synopsisHeading = "%n",
descriptionHeading = "%n@|bold,fg(cyan) Description:|@%n%n", descriptionHeading = "%n@|bold,fg(cyan) Description:|@%n%n",
optionListHeading = "%n@|bold,fg(cyan) Options:|@%n", optionListHeading = "%n@|bold,fg(cyan) Options:|@%n",
footerHeading = "%n", footerHeading = "%nBesu is licensed under the Apache License 2.0%n",
footer = "Besu is licensed under the Apache License 2.0") footer = {
"%n%n@|fg(cyan) To get started quickly, just choose a network to sync and a profile to run with suggested defaults:|@",
"%n@|fg(cyan) for Mainnet|@ --network=mainnet --profile=[minimalist_staker|staker]",
"%nMore info and other profiles at https://besu.hyperledger.org%n"
})
public class BesuCommand implements DefaultCommandValues, Runnable { public class BesuCommand implements DefaultCommandValues, Runnable {
@SuppressWarnings("PrivateStaticFinalLoggers") @SuppressWarnings("PrivateStaticFinalLoggers")
@ -320,7 +328,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
private int maxPeers; private int maxPeers;
private int maxRemoteInitiatedPeers; private int maxRemoteInitiatedPeers;
private int peersLowerBound;
// CLI options defined by user at runtime. // CLI options defined by user at runtime.
// Options parsing is done with CLI library Picocli https://picocli.info/ // Options parsing is done with CLI library Picocli https://picocli.info/
@ -340,16 +347,16 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
description = "The path to Besu data directory (default: ${DEFAULT-VALUE})") description = "The path to Besu data directory (default: ${DEFAULT-VALUE})")
final Path dataPath = getDefaultBesuDataPath(this); final Path dataPath = getDefaultBesuDataPath(this);
// Genesis file path with null default option if the option // Genesis file path with null default option.
// is not defined on command line as this default is handled by Runner // This default is handled by Runner
// to use mainnet json file from resources as indicated in the // to use mainnet json file from resources as indicated in the
// default network option // default network option
// Then we have no control over genesis default value here. // Then we ignore genesis default value here.
@CommandLine.Option( @CommandLine.Option(
names = {"--genesis-file"}, names = {"--genesis-file"},
paramLabel = MANDATORY_FILE_FORMAT_HELP, paramLabel = MANDATORY_FILE_FORMAT_HELP,
description = description =
"Genesis file. Setting this option makes --network option ignored and requires --network-id to be set.") "Genesis file for your custom network. Setting this option requires --network-id to be set. (Cannot be used with --network)")
private final File genesisFile = null; private final File genesisFile = null;
@Option( @Option(
@ -364,7 +371,9 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
P2PDiscoveryOptionGroup p2PDiscoveryOptionGroup = new P2PDiscoveryOptionGroup(); P2PDiscoveryOptionGroup p2PDiscoveryOptionGroup = new P2PDiscoveryOptionGroup();
private final TransactionSelectionServiceImpl transactionSelectionServiceImpl; private final TransactionSelectionServiceImpl transactionSelectionServiceImpl;
private final PluginTransactionValidatorServiceImpl transactionValidatorServiceImpl; private final TransactionPoolValidatorServiceImpl transactionValidatorServiceImpl;
private final TransactionSimulationServiceImpl transactionSimulationServiceImpl;
private final BlockchainServiceImpl blockchainServiceImpl;
static class P2PDiscoveryOptionGroup { static class P2PDiscoveryOptionGroup {
@ -505,7 +514,7 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
names = {"--sync-mode"}, names = {"--sync-mode"},
paramLabel = MANDATORY_MODE_FORMAT_HELP, paramLabel = MANDATORY_MODE_FORMAT_HELP,
description = description =
"Synchronization mode, possible values are ${COMPLETION-CANDIDATES} (default: FAST if a --network is supplied and privacy isn't enabled. FULL otherwise.)") "Synchronization mode, possible values are ${COMPLETION-CANDIDATES} (default: SNAP if a --network is supplied and privacy isn't enabled. FULL otherwise.)")
private SyncMode syncMode = null; private SyncMode syncMode = null;
@Option( @Option(
@ -555,6 +564,12 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
arity = "1") arity = "1")
private final Path kzgTrustedSetupFile = null; private final Path kzgTrustedSetupFile = null;
@Option(
names = {"--version-compatibility-protection"},
description =
"Perform compatibility checks between the version of Besu being started and the version of Besu that last started with this data directory. (default: ${DEFAULT-VALUE})")
private Boolean versionCompatibilityProtection = null;
@CommandLine.ArgGroup(validate = false, heading = "@|bold GraphQL Options|@%n") @CommandLine.ArgGroup(validate = false, heading = "@|bold GraphQL Options|@%n")
GraphQlOptions graphQlOptions = new GraphQlOptions(); GraphQlOptions graphQlOptions = new GraphQlOptions();
@ -781,12 +796,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
"How deep a chain reorganization must be in order for it to be logged (default: ${DEFAULT-VALUE})") "How deep a chain reorganization must be in order for it to be logged (default: ${DEFAULT-VALUE})")
private final Long reorgLoggingThreshold = 6L; private final Long reorgLoggingThreshold = 6L;
@Option(
names = {"--pruning-enabled"},
description =
"Enable disk-space saving optimization that removes old state that is unlikely to be required (default: ${DEFAULT-VALUE})")
private final Boolean pruningEnabled = false;
// Permission Option Group // Permission Option Group
@CommandLine.ArgGroup(validate = false, heading = "@|bold Permissions Options|@%n") @CommandLine.ArgGroup(validate = false, heading = "@|bold Permissions Options|@%n")
PermissionsOptions permissionsOptions = new PermissionsOptions(); PermissionsOptions permissionsOptions = new PermissionsOptions();
@ -836,66 +845,15 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
private final Map<String, String> genesisConfigOverrides = private final Map<String, String> genesisConfigOverrides =
new TreeMap<>(String.CASE_INSENSITIVE_ORDER); new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
@Option(
names = {"--pruning-blocks-retained"},
paramLabel = "<INTEGER>",
description =
"Minimum number of recent blocks for which to keep entire world state (default: ${DEFAULT-VALUE})",
arity = "1")
private final Integer pruningBlocksRetained = PrunerConfiguration.DEFAULT_PRUNING_BLOCKS_RETAINED;
@Option(
names = {"--pruning-block-confirmations"},
paramLabel = "<INTEGER>",
description =
"Minimum number of confirmations on a block before marking begins (default: ${DEFAULT-VALUE})",
arity = "1")
private final Integer pruningBlockConfirmations =
PrunerConfiguration.DEFAULT_PRUNING_BLOCK_CONFIRMATIONS;
@CommandLine.Option( @CommandLine.Option(
names = {"--pid-path"}, names = {"--pid-path"},
paramLabel = MANDATORY_PATH_FORMAT_HELP, paramLabel = MANDATORY_PATH_FORMAT_HELP,
description = "Path to PID file (optional)") description = "Path to PID file (optional)")
private final Path pidPath = null; private final Path pidPath = null;
@CommandLine.Option( // API Configuration Option Group
names = {"--api-gas-price-blocks"}, @CommandLine.ArgGroup(validate = false, heading = "@|bold API Configuration Options|@%n")
description = "Number of blocks to consider for eth_gasPrice (default: ${DEFAULT-VALUE})") ApiConfigurationOptions apiConfigurationOptions = new ApiConfigurationOptions();
private final Long apiGasPriceBlocks = 100L;
@CommandLine.Option(
names = {"--api-gas-price-percentile"},
description = "Percentile value to measure for eth_gasPrice (default: ${DEFAULT-VALUE})")
private final Double apiGasPricePercentile = 50.0;
@CommandLine.Option(
names = {"--api-gas-price-max"},
description = "Maximum gas price for eth_gasPrice (default: ${DEFAULT-VALUE})")
private final Long apiGasPriceMax = 500_000_000_000L;
@CommandLine.Option(
names = {"--api-gas-and-priority-fee-limiting-enabled"},
hidden = true,
description =
"Set to enable gas price and minimum priority fee limit in eth_getGasPrice and eth_feeHistory (default: ${DEFAULT-VALUE})")
private final Boolean apiGasAndPriorityFeeLimitingEnabled = false;
@CommandLine.Option(
names = {"--api-gas-and-priority-fee-lower-bound-coefficient"},
hidden = true,
description =
"Coefficient for setting the lower limit of gas price and minimum priority fee in eth_getGasPrice and eth_feeHistory (default: ${DEFAULT-VALUE})")
private final Long apiGasAndPriorityFeeLowerBoundCoefficient =
ApiConfiguration.DEFAULT_LOWER_BOUND_GAS_AND_PRIORITY_FEE_COEFFICIENT;
@CommandLine.Option(
names = {"--api-gas-and-priority-fee-upper-bound-coefficient"},
hidden = true,
description =
"Coefficient for setting the upper limit of gas price and minimum priority fee in eth_getGasPrice and eth_feeHistory (default: ${DEFAULT-VALUE})")
private final Long apiGasAndPriorityFeeUpperBoundCoefficient =
ApiConfiguration.DEFAULT_UPPER_BOUND_GAS_AND_PRIORITY_FEE_COEFFICIENT;
@CommandLine.Option( @CommandLine.Option(
names = {"--static-nodes-file"}, names = {"--static-nodes-file"},
@ -904,29 +862,11 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
"Specifies the static node file containing the static nodes for this node to connect to") "Specifies the static node file containing the static nodes for this node to connect to")
private final Path staticNodesFile = null; private final Path staticNodesFile = null;
@CommandLine.Option(
names = {"--rpc-max-logs-range"},
description =
"Specifies the maximum number of blocks to retrieve logs from via RPC. Must be >=0. 0 specifies no limit (default: ${DEFAULT-VALUE})")
private final Long rpcMaxLogsRange = 5000L;
@CommandLine.Option(
names = {"--rpc-gas-cap"},
description =
"Specifies the gasLimit cap for transaction simulation RPC methods. Must be >=0. 0 specifies no limit (default: ${DEFAULT-VALUE})")
private final Long rpcGasCap = 0L;
@CommandLine.Option( @CommandLine.Option(
names = {"--cache-last-blocks"}, names = {"--cache-last-blocks"},
description = "Specifies the number of last blocks to cache (default: ${DEFAULT-VALUE})") description = "Specifies the number of last blocks to cache (default: ${DEFAULT-VALUE})")
private final Integer numberOfblocksToCache = 0; private final Integer numberOfblocksToCache = 0;
@Option(
names = {"--rpc-max-trace-filter-range"},
description =
"Specifies the maximum number of blocks for the trace_filter method. Must be >=0. 0 specifies no limit (default: $DEFAULT-VALUE)")
private final Long maxTraceFilterRange = 1000L;
@Mixin private P2PTLSConfigOptions p2pTLSConfigOptions; @Mixin private P2PTLSConfigOptions p2pTLSConfigOptions;
@Mixin private PkiBlockCreationOptions pkiBlockCreationOptions; @Mixin private PkiBlockCreationOptions pkiBlockCreationOptions;
@ -941,9 +881,10 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
private MetricsConfiguration metricsConfiguration; private MetricsConfiguration metricsConfiguration;
private Optional<PermissioningConfiguration> permissioningConfiguration; private Optional<PermissioningConfiguration> permissioningConfiguration;
private Optional<TLSConfiguration> p2pTLSConfiguration; private Optional<TLSConfiguration> p2pTLSConfiguration;
private DataStorageConfiguration dataStorageConfiguration;
private Collection<EnodeURL> staticNodes; private Collection<EnodeURL> staticNodes;
private BesuController besuController; private BesuController besuController;
private BesuConfiguration pluginCommonConfiguration; private BesuConfigurationImpl pluginCommonConfiguration;
private MiningParameters miningParameters; private MiningParameters miningParameters;
private BesuComponent besuComponent; private BesuComponent besuComponent;
@ -994,7 +935,9 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
new PkiBlockCreationConfigurationProvider(), new PkiBlockCreationConfigurationProvider(),
new RpcEndpointServiceImpl(), new RpcEndpointServiceImpl(),
new TransactionSelectionServiceImpl(), new TransactionSelectionServiceImpl(),
new PluginTransactionValidatorServiceImpl()); new TransactionPoolValidatorServiceImpl(),
new TransactionSimulationServiceImpl(),
new BlockchainServiceImpl());
} }
/** /**
@ -1016,6 +959,8 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
* @param rpcEndpointServiceImpl instance of RpcEndpointServiceImpl * @param rpcEndpointServiceImpl instance of RpcEndpointServiceImpl
* @param transactionSelectionServiceImpl instance of TransactionSelectionServiceImpl * @param transactionSelectionServiceImpl instance of TransactionSelectionServiceImpl
* @param transactionValidatorServiceImpl instance of TransactionValidatorServiceImpl * @param transactionValidatorServiceImpl instance of TransactionValidatorServiceImpl
* @param transactionSimulationServiceImpl instance of TransactionSimulationServiceImpl
* @param blockchainServiceImpl instance of BlockchainServiceImpl
*/ */
@VisibleForTesting @VisibleForTesting
protected BesuCommand( protected BesuCommand(
@ -1034,7 +979,9 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
final PkiBlockCreationConfigurationProvider pkiBlockCreationConfigProvider, final PkiBlockCreationConfigurationProvider pkiBlockCreationConfigProvider,
final RpcEndpointServiceImpl rpcEndpointServiceImpl, final RpcEndpointServiceImpl rpcEndpointServiceImpl,
final TransactionSelectionServiceImpl transactionSelectionServiceImpl, final TransactionSelectionServiceImpl transactionSelectionServiceImpl,
final PluginTransactionValidatorServiceImpl transactionValidatorServiceImpl) { final TransactionPoolValidatorServiceImpl transactionValidatorServiceImpl,
final TransactionSimulationServiceImpl transactionSimulationServiceImpl,
final BlockchainServiceImpl blockchainServiceImpl) {
this.besuComponent = besuComponent; this.besuComponent = besuComponent;
this.logger = besuComponent.getBesuCommandLogger(); this.logger = besuComponent.getBesuCommandLogger();
this.rlpBlockImporter = rlpBlockImporter; this.rlpBlockImporter = rlpBlockImporter;
@ -1048,12 +995,14 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
this.securityModuleService = securityModuleService; this.securityModuleService = securityModuleService;
this.permissioningService = permissioningService; this.permissioningService = permissioningService;
this.privacyPluginService = privacyPluginService; this.privacyPluginService = privacyPluginService;
pluginCommonConfiguration = new BesuCommandConfigurationService(); this.pluginCommonConfiguration = new BesuConfigurationImpl();
besuPluginContext.addService(BesuConfiguration.class, pluginCommonConfiguration); besuPluginContext.addService(BesuConfiguration.class, pluginCommonConfiguration);
this.pkiBlockCreationConfigProvider = pkiBlockCreationConfigProvider; this.pkiBlockCreationConfigProvider = pkiBlockCreationConfigProvider;
this.rpcEndpointServiceImpl = rpcEndpointServiceImpl; this.rpcEndpointServiceImpl = rpcEndpointServiceImpl;
this.transactionSelectionServiceImpl = transactionSelectionServiceImpl; this.transactionSelectionServiceImpl = transactionSelectionServiceImpl;
this.transactionValidatorServiceImpl = transactionValidatorServiceImpl; this.transactionValidatorServiceImpl = transactionValidatorServiceImpl;
this.transactionSimulationServiceImpl = transactionSimulationServiceImpl;
this.blockchainServiceImpl = blockchainServiceImpl;
} }
/** /**
@ -1075,6 +1024,9 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
toCommandLine(); toCommandLine();
// use terminal width for usage message
commandLine.getCommandSpec().usageMessage().autoWidth(true);
handleStableOptions(); handleStableOptions();
addSubCommands(in); addSubCommands(in);
registerConverters(); registerConverters();
@ -1119,9 +1071,15 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
vertx = createVertx(createVertxOptions(metricsSystem.get())); vertx = createVertx(createVertxOptions(metricsSystem.get()));
validateOptions(); validateOptions();
configure(); configure();
// If we're not running against a named network, or if version compat protection has been
// explicitly enabled, perform compatibility check
VersionMetadata.versionCompatibilityChecks(versionCompatibilityProtection, dataDir());
configureNativeLibs(); configureNativeLibs();
besuController = initController(); besuController = buildController();
besuPluginContext.beforeExternalServices(); besuPluginContext.beforeExternalServices();
@ -1143,7 +1101,7 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
} }
@VisibleForTesting @VisibleForTesting
void setBesuConfiguration(final BesuConfiguration pluginCommonConfiguration) { void setBesuConfiguration(final BesuConfigurationImpl pluginCommonConfiguration) {
this.pluginCommonConfiguration = pluginCommonConfiguration; this.pluginCommonConfiguration = pluginCommonConfiguration;
} }
@ -1235,7 +1193,10 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
besuPluginContext.addService( besuPluginContext.addService(
TransactionSelectionService.class, transactionSelectionServiceImpl); TransactionSelectionService.class, transactionSelectionServiceImpl);
besuPluginContext.addService( besuPluginContext.addService(
PluginTransactionValidatorService.class, transactionValidatorServiceImpl); TransactionPoolValidatorService.class, transactionValidatorServiceImpl);
besuPluginContext.addService(
TransactionSimulationService.class, transactionSimulationServiceImpl);
besuPluginContext.addService(BlockchainService.class, blockchainServiceImpl);
// register built-in plugins // register built-in plugins
rocksDBPlugin = new RocksDBPlugin(); rocksDBPlugin = new RocksDBPlugin();
@ -1316,6 +1277,16 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
} }
private void startPlugins() { private void startPlugins() {
blockchainServiceImpl.init(
besuController.getProtocolContext(), besuController.getProtocolSchedule());
transactionSimulationServiceImpl.init(
besuController.getProtocolContext().getBlockchain(),
new TransactionSimulator(
besuController.getProtocolContext().getBlockchain(),
besuController.getProtocolContext().getWorldStateArchive(),
besuController.getProtocolSchedule(),
apiConfiguration.getGasCap()));
besuPluginContext.addService( besuPluginContext.addService(
BesuEvents.class, BesuEvents.class,
new BesuEventsImpl( new BesuEventsImpl(
@ -1325,10 +1296,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
besuController.getSyncState())); besuController.getSyncState()));
besuPluginContext.addService(MetricsSystem.class, getMetricsSystem()); besuPluginContext.addService(MetricsSystem.class, getMetricsSystem());
besuPluginContext.addService(
BlockchainService.class,
new BlockchainServiceImpl(besuController.getProtocolContext().getBlockchain()));
besuPluginContext.addService( besuPluginContext.addService(
TraceService.class, TraceService.class,
new TraceServiceImpl( new TraceServiceImpl(
@ -1481,10 +1448,15 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
validateTransactionPoolOptions(); validateTransactionPoolOptions();
validateDataStorageOptions(); validateDataStorageOptions();
validateGraphQlOptions(); validateGraphQlOptions();
validateApiOptions();
p2pTLSConfigOptions.checkP2PTLSOptionsDependencies(logger, commandLine); p2pTLSConfigOptions.checkP2PTLSOptionsDependencies(logger, commandLine);
pkiBlockCreationOptions.checkPkiBlockCreationOptionsDependencies(logger, commandLine); pkiBlockCreationOptions.checkPkiBlockCreationOptionsDependencies(logger, commandLine);
} }
private void validateApiOptions() {
apiConfigurationOptions.validate(commandLine, logger);
}
private void validateTransactionPoolOptions() { private void validateTransactionPoolOptions() {
transactionPoolOptions.validate(commandLine, getActualGenesisConfigOptions()); transactionPoolOptions.validate(commandLine, getActualGenesisConfigOptions());
} }
@ -1568,31 +1540,13 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
} }
} }
private void checkApiOptionsDependencies() {
CommandLineUtils.checkOptionDependencies(
logger,
commandLine,
"--api-gas-and-priority-fee-limiting-enabled",
!apiGasAndPriorityFeeLimitingEnabled,
asList(
"--api-gas-and-priority-fee-upper-bound-coefficient",
"--api-gas-and-priority-fee-lower-bound-coefficient"));
}
private void ensureValidPeerBoundParams() { private void ensureValidPeerBoundParams() {
maxPeers = p2PDiscoveryOptionGroup.maxPeers; maxPeers = p2PDiscoveryOptionGroup.maxPeers;
peersLowerBound = unstableNetworkingOptions.toDomainObject().getPeerLowerBound();
if (peersLowerBound > maxPeers) {
logger.warn(
"`--Xp2p-peer-lower-bound` "
+ peersLowerBound
+ " must not exceed --max-peers "
+ maxPeers);
logger.warn("setting --Xp2p-peer-lower-bound=" + maxPeers);
peersLowerBound = maxPeers;
}
final Boolean isLimitRemoteWireConnectionsEnabled = final Boolean isLimitRemoteWireConnectionsEnabled =
p2PDiscoveryOptionGroup.isLimitRemoteWireConnectionsEnabled; p2PDiscoveryOptionGroup.isLimitRemoteWireConnectionsEnabled;
if (isOptionSet(commandLine, PEER_LOWER_BOUND_FLAG)) {
logger.warn(PEER_LOWER_BOUND_FLAG + " is deprecated and will be removed soon.");
}
if (isLimitRemoteWireConnectionsEnabled) { if (isLimitRemoteWireConnectionsEnabled) {
final float fraction = final float fraction =
Fraction.fromPercentage(p2PDiscoveryOptionGroup.maxRemoteConnectionsPercentage) Fraction.fromPercentage(p2PDiscoveryOptionGroup.maxRemoteConnectionsPercentage)
@ -1600,7 +1554,7 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
checkState( checkState(
fraction >= 0.0 && fraction <= 1.0, fraction >= 0.0 && fraction <= 1.0,
"Fraction of remote connections allowed must be between 0.0 and 1.0 (inclusive)."); "Fraction of remote connections allowed must be between 0.0 and 1.0 (inclusive).");
maxRemoteInitiatedPeers = (int) Math.floor(fraction * maxPeers); maxRemoteInitiatedPeers = Math.round(fraction * maxPeers);
} else { } else {
maxRemoteInitiatedPeers = maxPeers; maxRemoteInitiatedPeers = maxPeers;
} }
@ -1644,6 +1598,19 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
throw new ParameterException( throw new ParameterException(
this.commandLine, "Unable to load genesis file. " + e.getCause()); this.commandLine, "Unable to load genesis file. " + e.getCause());
} }
// snap and checkpoint can't be used with BFT but can for clique
if (genesisConfigOptions.isIbftLegacy()
|| genesisConfigOptions.isIbft2()
|| genesisConfigOptions.isQbft()) {
final String errorSuffix = "can't be used with BFT networks";
if (SyncMode.CHECKPOINT.equals(syncMode) || SyncMode.X_CHECKPOINT.equals(syncMode)) {
throw new ParameterException(
commandLine, String.format("%s %s", "Checkpoint sync", errorSuffix));
}
if (syncMode == SyncMode.SNAP || syncMode == SyncMode.X_SNAP) {
throw new ParameterException(commandLine, String.format("%s %s", "Snap sync", errorSuffix));
}
}
return genesisConfigOptions; return genesisConfigOptions;
} }
@ -1673,8 +1640,8 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
CommandLineUtils.failIfOptionDoesntMeetRequirement( CommandLineUtils.failIfOptionDoesntMeetRequirement(
commandLine, commandLine,
"--Xcheckpoint-post-merge-enabled can only be used with X_CHECKPOINT sync-mode", "--Xcheckpoint-post-merge-enabled can only be used with CHECKPOINT sync-mode",
SyncMode.X_CHECKPOINT.equals(getDefaultSyncModeIfNotSet()), SyncMode.isCheckpointSync(getDefaultSyncModeIfNotSet()),
singletonList("--Xcheckpoint-post-merge-enabled")); singletonList("--Xcheckpoint-post-merge-enabled"));
CommandLineUtils.failIfOptionDoesntMeetRequirement( CommandLineUtils.failIfOptionDoesntMeetRequirement(
@ -1692,24 +1659,13 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
"--node-private-key-file", "--node-private-key-file",
"--security-module=" + DEFAULT_SECURITY_MODULE); "--security-module=" + DEFAULT_SECURITY_MODULE);
} }
if (isPruningEnabled()) {
if (dataStorageOptions
.toDomainObject()
.getDataStorageFormat()
.equals(DataStorageFormat.BONSAI)) {
logger.warn("Forest pruning is ignored with Bonsai data storage format.");
} else {
logger.warn(
"Forest pruning is deprecated and will be removed soon. To save disk space consider switching to Bonsai data storage format.");
}
}
} }
private void configure() throws Exception { private void configure() throws Exception {
checkPortClash(); checkPortClash();
checkIfRequiredPortsAreAvailable(); checkIfRequiredPortsAreAvailable();
syncMode = getDefaultSyncModeIfNotSet(); syncMode = getDefaultSyncModeIfNotSet();
versionCompatibilityProtection = getDefaultVersionCompatibilityProtectionIfNotSet();
ethNetworkConfig = updateNetworkConfig(network); ethNetworkConfig = updateNetworkConfig(network);
@ -1739,7 +1695,8 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
unstableIpcOptions.isEnabled(), unstableIpcOptions.isEnabled(),
unstableIpcOptions.getIpcPath(), unstableIpcOptions.getIpcPath(),
unstableIpcOptions.getRpcIpcApis()); unstableIpcOptions.getRpcIpcApis());
apiConfiguration = apiConfiguration(); apiConfiguration = apiConfigurationOptions.apiConfiguration(getMiningParameters());
dataStorageConfiguration = getDataStorageConfiguration();
// hostsWhitelist is a hidden option. If it is specified, add the list to hostAllowlist // hostsWhitelist is a hidden option. If it is specified, add the list to hostAllowlist
if (!hostsWhitelist.isEmpty()) { if (!hostsWhitelist.isEmpty()) {
// if allowlist == default values, remove the default values // if allowlist == default values, remove the default values
@ -1802,10 +1759,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
} }
} }
private BesuController initController() {
return buildController();
}
/** /**
* Builds BesuController * Builds BesuController
* *
@ -1827,6 +1780,11 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
* @return instance of BesuControllerBuilder * @return instance of BesuControllerBuilder
*/ */
public BesuControllerBuilder getControllerBuilder() { public BesuControllerBuilder getControllerBuilder() {
pluginCommonConfiguration.init(
dataDir(),
dataDir().resolve(DATABASE_PATH),
getDataStorageConfiguration(),
getMiningParameters());
final KeyValueStorageProvider storageProvider = keyValueStorageProvider(keyValueStorageName); final KeyValueStorageProvider storageProvider = keyValueStorageProvider(keyValueStorageName);
return controllerBuilderFactory return controllerBuilderFactory
.fromEthNetworkConfig( .fromEthNetworkConfig(
@ -1834,9 +1792,8 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
.synchronizerConfiguration(buildSyncConfig()) .synchronizerConfiguration(buildSyncConfig())
.ethProtocolConfiguration(unstableEthProtocolOptions.toDomainObject()) .ethProtocolConfiguration(unstableEthProtocolOptions.toDomainObject())
.networkConfiguration(unstableNetworkingOptions.toDomainObject()) .networkConfiguration(unstableNetworkingOptions.toDomainObject())
.transactionSelectorFactory(getTransactionSelectorFactory())
.pluginTransactionValidatorFactory(getPluginTransactionValidatorFactory())
.dataDirectory(dataDir()) .dataDirectory(dataDir())
.dataStorageConfiguration(getDataStorageConfiguration())
.miningParameters(getMiningParameters()) .miningParameters(getMiningParameters())
.transactionPoolConfiguration(buildTransactionPoolConfiguration()) .transactionPoolConfiguration(buildTransactionPoolConfiguration())
.nodeKey(new NodeKey(securityModule())) .nodeKey(new NodeKey(securityModule()))
@ -1847,9 +1804,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
.clock(Clock.systemUTC()) .clock(Clock.systemUTC())
.isRevertReasonEnabled(isRevertReasonEnabled) .isRevertReasonEnabled(isRevertReasonEnabled)
.storageProvider(storageProvider) .storageProvider(storageProvider)
.isPruningEnabled(isPruningEnabled())
.pruningConfiguration(
new PrunerConfiguration(pruningBlockConfirmations, pruningBlocksRetained))
.genesisConfigOverrides(genesisConfigOverrides) .genesisConfigOverrides(genesisConfigOverrides)
.gasLimitCalculator( .gasLimitCalculator(
getMiningParameters().getTargetGasLimit().isPresent() getMiningParameters().getTargetGasLimit().isPresent()
@ -1858,28 +1812,13 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
.requiredBlocks(requiredBlocks) .requiredBlocks(requiredBlocks)
.reorgLoggingThreshold(reorgLoggingThreshold) .reorgLoggingThreshold(reorgLoggingThreshold)
.evmConfiguration(unstableEvmOptions.toDomainObject()) .evmConfiguration(unstableEvmOptions.toDomainObject())
.dataStorageConfiguration(dataStorageOptions.toDomainObject())
.maxPeers(p2PDiscoveryOptionGroup.maxPeers) .maxPeers(p2PDiscoveryOptionGroup.maxPeers)
.lowerBoundPeers(peersLowerBound)
.maxRemotelyInitiatedPeers(maxRemoteInitiatedPeers) .maxRemotelyInitiatedPeers(maxRemoteInitiatedPeers)
.randomPeerPriority(p2PDiscoveryOptionGroup.randomPeerPriority) .randomPeerPriority(p2PDiscoveryOptionGroup.randomPeerPriority)
.chainPruningConfiguration(unstableChainPruningOptions.toDomainObject()) .chainPruningConfiguration(unstableChainPruningOptions.toDomainObject())
.cacheLastBlocks(numberOfblocksToCache); .cacheLastBlocks(numberOfblocksToCache);
} }
@NotNull
private Optional<PluginTransactionSelectorFactory> getTransactionSelectorFactory() {
final Optional<TransactionSelectionService> txSelectionService =
besuPluginContext.getService(TransactionSelectionService.class);
return txSelectionService.isPresent() ? txSelectionService.get().get() : Optional.empty();
}
private PluginTransactionValidatorFactory getPluginTransactionValidatorFactory() {
final Optional<PluginTransactionValidatorService> txSValidatorService =
besuPluginContext.getService(PluginTransactionValidatorService.class);
return txSValidatorService.map(PluginTransactionValidatorService::get).orElse(null);
}
private JsonRpcConfiguration createEngineJsonRpcConfiguration( private JsonRpcConfiguration createEngineJsonRpcConfiguration(
final Integer engineListenPort, final List<String> allowCallsFrom) { final Integer engineListenPort, final List<String> allowCallsFrom) {
jsonRpcHttpOptions.checkDependencies(logger, commandLine); jsonRpcHttpOptions.checkDependencies(logger, commandLine);
@ -1917,32 +1856,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
"--privacy-tls-known-enclave-file")); "--privacy-tls-known-enclave-file"));
} }
private ApiConfiguration apiConfiguration() {
checkApiOptionsDependencies();
var builder =
ImmutableApiConfiguration.builder()
.gasPriceBlocks(apiGasPriceBlocks)
.gasPricePercentile(apiGasPricePercentile)
.gasPriceMinSupplier(
getMiningParameters().getMinTransactionGasPrice().getAsBigInteger()::longValueExact)
.gasPriceMax(apiGasPriceMax)
.maxLogsRange(rpcMaxLogsRange)
.gasCap(rpcGasCap)
.isGasAndPriorityFeeLimitingEnabled(apiGasAndPriorityFeeLimitingEnabled)
.maxTraceFilterRange(maxTraceFilterRange);
if (apiGasAndPriorityFeeLimitingEnabled) {
if (apiGasAndPriorityFeeLowerBoundCoefficient > apiGasAndPriorityFeeUpperBoundCoefficient) {
throw new ParameterException(
this.commandLine,
"--api-gas-and-priority-fee-lower-bound-coefficient cannot be greater than the value of --api-gas-and-priority-fee-upper-bound-coefficient");
}
builder
.lowerBoundGasAndPriorityFeeCoefficient(apiGasAndPriorityFeeLowerBoundCoefficient)
.upperBoundGasAndPriorityFeeCoefficient(apiGasAndPriorityFeeUpperBoundCoefficient);
}
return builder.build();
}
/** /**
* Metrics Configuration for Besu * Metrics Configuration for Besu
* *
@ -2020,8 +1933,15 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
if (syncMode == SyncMode.FAST) { if (syncMode == SyncMode.FAST) {
throw new ParameterException(commandLine, String.format("%s %s", "Fast sync", errorSuffix)); throw new ParameterException(commandLine, String.format("%s %s", "Fast sync", errorSuffix));
} }
if (isPruningEnabled()) { if (syncMode == SyncMode.SNAP || syncMode == SyncMode.X_SNAP) {
throw new ParameterException(commandLine, String.format("%s %s", "Pruning", errorSuffix)); throw new ParameterException(commandLine, String.format("%s %s", "Snap sync", errorSuffix));
}
if (syncMode == SyncMode.CHECKPOINT || syncMode == SyncMode.X_CHECKPOINT) {
throw new ParameterException(
commandLine, String.format("%s %s", "Checkpoint sync", errorSuffix));
}
if (getDataStorageConfiguration().getDataStorageFormat().equals(DataStorageFormat.BONSAI)) {
throw new ParameterException(commandLine, String.format("%s %s", "Bonsai", errorSuffix));
} }
if (Boolean.TRUE.equals(privacyOptionGroup.isPrivacyMultiTenancyEnabled) if (Boolean.TRUE.equals(privacyOptionGroup.isPrivacyMultiTenancyEnabled)
@ -2165,6 +2085,7 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
} }
private TransactionPoolConfiguration buildTransactionPoolConfiguration() { private TransactionPoolConfiguration buildTransactionPoolConfiguration() {
transactionPoolOptions.setPluginTransactionValidatorService(transactionValidatorServiceImpl);
final var txPoolConf = transactionPoolOptions.toDomainObject(); final var txPoolConf = transactionPoolOptions.toDomainObject();
final var txPoolConfBuilder = final var txPoolConfBuilder =
ImmutableTransactionPoolConfiguration.builder() ImmutableTransactionPoolConfiguration.builder()
@ -2206,13 +2127,26 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
private MiningParameters getMiningParameters() { private MiningParameters getMiningParameters() {
if (miningParameters == null) { if (miningParameters == null) {
miningOptions.setGenesisBlockPeriodSeconds( miningOptions.setTransactionSelectionService(transactionSelectionServiceImpl);
getGenesisBlockPeriodSeconds(getActualGenesisConfigOptions()));
miningParameters = miningOptions.toDomainObject(); miningParameters = miningOptions.toDomainObject();
getGenesisBlockPeriodSeconds(getActualGenesisConfigOptions())
.ifPresent(miningParameters::setBlockPeriodSeconds);
initMiningParametersMetrics(miningParameters);
} }
return miningParameters; return miningParameters;
} }
private DataStorageConfiguration getDataStorageConfiguration() {
if (dataStorageConfiguration == null) {
dataStorageConfiguration = dataStorageOptions.toDomainObject();
}
return dataStorageConfiguration;
}
private void initMiningParametersMetrics(final MiningParameters miningParameters) {
new MiningParametersMetrics(getMetricsSystem(), miningParameters);
}
private OptionalInt getGenesisBlockPeriodSeconds( private OptionalInt getGenesisBlockPeriodSeconds(
final GenesisConfigOptions genesisConfigOptions) { final GenesisConfigOptions genesisConfigOptions) {
if (genesisConfigOptions.isClique()) { if (genesisConfigOptions.isClique()) {
@ -2230,10 +2164,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
return OptionalInt.empty(); return OptionalInt.empty();
} }
private boolean isPruningEnabled() {
return pruningEnabled;
}
// Blockchain synchronization from peers. // Blockchain synchronization from peers.
private Runner synchronize( private Runner synchronize(
final BesuController controller, final BesuController controller,
@ -2563,11 +2493,12 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
.forEach( .forEach(
port -> { port -> {
if (port.equals(p2PDiscoveryOptionGroup.p2pPort) if (port.equals(p2PDiscoveryOptionGroup.p2pPort)
&& !NetworkUtility.isPortAvailable(port)) { && (NetworkUtility.isPortUnavailableForTcp(port)
|| NetworkUtility.isPortUnavailableForUdp(port))) {
unavailablePorts.add(port); unavailablePorts.add(port);
} }
if (!port.equals(p2PDiscoveryOptionGroup.p2pPort) if (!port.equals(p2PDiscoveryOptionGroup.p2pPort)
&& !NetworkUtility.isPortAvailableForTcp(port)) { && NetworkUtility.isPortUnavailableForTcp(port)) {
unavailablePorts.add(port); unavailablePorts.add(port);
} }
}); });
@ -2623,22 +2554,14 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
return loggingLevelOption.getLogLevel(); return loggingLevelOption.getLogLevel();
} }
private class BesuCommandConfigurationService implements BesuConfiguration { /**
* Returns the flag indicating that version compatibility checks will be made.
@Override *
public Path getStoragePath() { * @return true if compatibility checks should be made, otherwise false
return dataDir().resolve(DATABASE_PATH); */
} @VisibleForTesting
public Boolean getVersionCompatibilityProtection() {
@Override return versionCompatibilityProtection;
public Path getDataPath() {
return dataDir();
}
@Override
public int getDatabaseVersion() {
return dataStorageOptions.toDomainObject().getDataStorageFormat().getDatabaseVersion();
}
} }
private void instantiateSignatureAlgorithmFactory() { private void instantiateSignatureAlgorithmFactory() {
@ -2657,10 +2580,7 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
SignatureAlgorithmFactory.setInstance(SignatureAlgorithmType.create(ecCurve.get())); SignatureAlgorithmFactory.setInstance(SignatureAlgorithmType.create(ecCurve.get()));
} catch (final IllegalArgumentException e) { } catch (final IllegalArgumentException e) {
throw new CommandLine.InitializationException( throw new CommandLine.InitializationException(
new StringBuilder() "Invalid genesis file configuration for ecCurve. " + e.getMessage());
.append("Invalid genesis file configuration for ecCurve. ")
.append(e.getMessage())
.toString());
} }
} }
@ -2744,11 +2664,17 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
.orElse( .orElse(
genesisFile == null genesisFile == null
&& !privacyOptionGroup.isPrivacyEnabled && !privacyOptionGroup.isPrivacyEnabled
&& Optional.ofNullable(network).map(NetworkName::canFastSync).orElse(false) && Optional.ofNullable(network).map(NetworkName::canSnapSync).orElse(false)
? SyncMode.FAST ? SyncMode.SNAP
: SyncMode.FULL); : SyncMode.FULL);
} }
private Boolean getDefaultVersionCompatibilityProtectionIfNotSet() {
// Version compatibility protection is enabled by default for non-named networks
return Optional.ofNullable(versionCompatibilityProtection)
.orElse(commandLine.getParseResult().hasMatchedOption("network") ? false : true);
}
private String generateConfigurationOverview() { private String generateConfigurationOverview() {
final ConfigurationOverviewBuilder builder = new ConfigurationOverviewBuilder(logger); final ConfigurationOverviewBuilder builder = new ConfigurationOverviewBuilder(logger);
@ -2799,14 +2725,15 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
builder.setHighSpecEnabled(); builder.setHighSpecEnabled();
} }
if (dataStorageOptions.toDomainObject().getUnstable().getBonsaiLimitTrieLogsEnabled()) { if (getDataStorageConfiguration().getUnstable().getBonsaiLimitTrieLogsEnabled()) {
builder.setLimitTrieLogsEnabled(); builder.setLimitTrieLogsEnabled();
builder.setTrieLogRetentionLimit( builder.setTrieLogRetentionLimit(getDataStorageConfiguration().getBonsaiMaxLayersToLoad());
dataStorageOptions.toDomainObject().getBonsaiMaxLayersToLoad());
builder.setTrieLogsPruningWindowSize( builder.setTrieLogsPruningWindowSize(
dataStorageOptions.toDomainObject().getUnstable().getBonsaiTrieLogPruningWindowSize()); getDataStorageConfiguration().getUnstable().getBonsaiTrieLogPruningWindowSize());
} }
builder.setSnapServerEnabled(this.unstableSynchronizerOptions.isSnapsyncServerEnabled());
builder.setTxPoolImplementation(buildTransactionPoolConfiguration().getTxPoolImplementation()); builder.setTxPoolImplementation(buildTransactionPoolConfiguration().getTxPoolImplementation());
builder.setWorldStateUpdateMode(unstableEvmOptions.toDomainObject().worldUpdaterMode()); builder.setWorldStateUpdateMode(unstableEvmOptions.toDomainObject().worldUpdaterMode());

@ -55,6 +55,7 @@ public class ConfigurationOverviewBuilder {
private boolean isBonsaiLimitTrieLogsEnabled = false; private boolean isBonsaiLimitTrieLogsEnabled = false;
private long trieLogRetentionLimit = 0; private long trieLogRetentionLimit = 0;
private Integer trieLogsPruningWindowSize = null; private Integer trieLogsPruningWindowSize = null;
private boolean isSnapServerEnabled = false;
private TransactionPoolConfiguration.Implementation txPoolImplementation; private TransactionPoolConfiguration.Implementation txPoolImplementation;
private EvmConfiguration.WorldUpdaterMode worldStateUpdateMode; private EvmConfiguration.WorldUpdaterMode worldStateUpdateMode;
private Map<String, String> environment; private Map<String, String> environment;
@ -219,6 +220,17 @@ public class ConfigurationOverviewBuilder {
return this; return this;
} }
/**
* Sets snap server enabled/disabled
*
* @param snapServerEnabled bool to indicate if snap server is enabled
* @return the builder
*/
public ConfigurationOverviewBuilder setSnapServerEnabled(final boolean snapServerEnabled) {
isSnapServerEnabled = snapServerEnabled;
return this;
}
/** /**
* Sets trie logs pruning window size * Sets trie logs pruning window size
* *
@ -339,6 +351,10 @@ public class ConfigurationOverviewBuilder {
lines.add("Using " + worldStateUpdateMode + " worldstate update mode"); lines.add("Using " + worldStateUpdateMode + " worldstate update mode");
if (isSnapServerEnabled) {
lines.add("Experimental Snap Sync server enabled");
}
if (isBonsaiLimitTrieLogsEnabled) { if (isBonsaiLimitTrieLogsEnabled) {
final StringBuilder trieLogPruningString = new StringBuilder(); final StringBuilder trieLogPruningString = new StringBuilder();
trieLogPruningString trieLogPruningString

@ -74,8 +74,6 @@ public interface DefaultCommandValues {
int SYNC_MIN_PEER_COUNT = 5; int SYNC_MIN_PEER_COUNT = 5;
/** The constant DEFAULT_MAX_PEERS. */ /** The constant DEFAULT_MAX_PEERS. */
int DEFAULT_MAX_PEERS = 25; int DEFAULT_MAX_PEERS = 25;
/** The constant DEFAULT_P2P_PEER_LOWER_BOUND. */
int DEFAULT_P2P_PEER_LOWER_BOUND = 25;
/** The constant DEFAULT_HTTP_MAX_CONNECTIONS. */ /** The constant DEFAULT_HTTP_MAX_CONNECTIONS. */
int DEFAULT_HTTP_MAX_CONNECTIONS = 80; int DEFAULT_HTTP_MAX_CONNECTIONS = 80;
/** The constant DEFAULT_HTTP_MAX_BATCH_SIZE. */ /** The constant DEFAULT_HTTP_MAX_BATCH_SIZE. */

@ -15,6 +15,7 @@
package org.hyperledger.besu.cli.config; package org.hyperledger.besu.cli.config;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.Locale;
import java.util.Optional; import java.util.Optional;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -29,6 +30,7 @@ public enum NetworkName {
GOERLI("/goerli.json", BigInteger.valueOf(5)), GOERLI("/goerli.json", BigInteger.valueOf(5)),
/** Holešky network name. */ /** Holešky network name. */
HOLESKY("/holesky.json", BigInteger.valueOf(17000)), HOLESKY("/holesky.json", BigInteger.valueOf(17000)),
KAUSTINEN("/kaustinen.json", BigInteger.valueOf(69420)),
/** Dev network name. */ /** Dev network name. */
DEV("/dev.json", BigInteger.valueOf(2018), false), DEV("/dev.json", BigInteger.valueOf(2018), false),
@ -43,17 +45,17 @@ public enum NetworkName {
private final String genesisFile; private final String genesisFile;
private final BigInteger networkId; private final BigInteger networkId;
private final boolean canFastSync; private final boolean canSnapSync;
private final String deprecationDate; private final String deprecationDate;
NetworkName(final String genesisFile, final BigInteger networkId) { NetworkName(final String genesisFile, final BigInteger networkId) {
this(genesisFile, networkId, true); this(genesisFile, networkId, true);
} }
NetworkName(final String genesisFile, final BigInteger networkId, final boolean canFastSync) { NetworkName(final String genesisFile, final BigInteger networkId, final boolean canSnapSync) {
this.genesisFile = genesisFile; this.genesisFile = genesisFile;
this.networkId = networkId; this.networkId = networkId;
this.canFastSync = canFastSync; this.canSnapSync = canSnapSync;
// no deprecations planned // no deprecations planned
this.deprecationDate = null; this.deprecationDate = null;
} }
@ -77,12 +79,12 @@ public enum NetworkName {
} }
/** /**
* Can fast sync boolean. * Can SNAP sync boolean.
* *
* @return the boolean * @return the boolean
*/ */
public boolean canFastSync() { public boolean canSnapSync() {
return canFastSync; return canSnapSync;
} }
/** /**
@ -91,7 +93,7 @@ public enum NetworkName {
* @return the string * @return the string
*/ */
public String normalize() { public String normalize() {
return StringUtils.capitalize(name().toLowerCase()); return StringUtils.capitalize(name().toLowerCase(Locale.ROOT));
} }
/** /**

@ -14,9 +14,21 @@
*/ */
package org.hyperledger.besu.cli.config; package org.hyperledger.besu.cli.config;
import java.util.Locale;
import org.apache.commons.lang3.StringUtils;
/** Enum for profile names. Each profile corresponds to a configuration file. */ /** Enum for profile names. Each profile corresponds to a configuration file. */
public enum ProfileName { public enum ProfileName {
/** The 'DEV' profile. Corresponds to the 'profiles/dev.toml' configuration file. */ /** The 'STAKER' profile */
STAKER("profiles/staker.toml"),
/** The 'MINIMALIST_STAKER' profile */
MINIMALIST_STAKER("profiles/minimalist-staker.toml"),
/** The 'ENTERPRISE' profile */
ENTERPRISE("profiles/enterprise-private.toml"),
/** The 'PRIVATE' profile */
PRIVATE("profiles/enterprise-private.toml"),
/** The 'DEV' profile. */
DEV("profiles/dev.toml"); DEV("profiles/dev.toml");
private final String configFile; private final String configFile;
@ -38,4 +50,9 @@ public enum ProfileName {
public String getConfigFile() { public String getConfigFile() {
return configFile; return configFile;
} }
@Override
public String toString() {
return StringUtils.capitalize(name().replaceAll("_", " ").toLowerCase(Locale.ROOT));
}
} }

@ -18,6 +18,7 @@ import org.hyperledger.besu.plugin.services.metrics.MetricCategory;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.HashMap; import java.util.HashMap;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
@ -54,7 +55,7 @@ public class MetricCategoryConverter implements CommandLine.ITypeConverter<Metri
* @param metricCategory the metric category * @param metricCategory the metric category
*/ */
public void addRegistryCategory(final MetricCategory metricCategory) { public void addRegistryCategory(final MetricCategory metricCategory) {
metricCategories.put(metricCategory.getName().toUpperCase(), metricCategory); metricCategories.put(metricCategory.getName().toUpperCase(Locale.ROOT), metricCategory);
} }
/** /**

@ -14,6 +14,7 @@
*/ */
package org.hyperledger.besu.cli.options; package org.hyperledger.besu.cli.options;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.Arrays.asList; import static java.util.Arrays.asList;
import static java.util.Collections.singletonList; import static java.util.Collections.singletonList;
import static org.hyperledger.besu.ethereum.core.MiningParameters.DEFAULT_NON_POA_BLOCK_TXS_SELECTION_MAX_TIME; import static org.hyperledger.besu.ethereum.core.MiningParameters.DEFAULT_NON_POA_BLOCK_TXS_SELECTION_MAX_TIME;
@ -37,10 +38,10 @@ import org.hyperledger.besu.datatypes.Wei;
import org.hyperledger.besu.ethereum.core.ImmutableMiningParameters; import org.hyperledger.besu.ethereum.core.ImmutableMiningParameters;
import org.hyperledger.besu.ethereum.core.ImmutableMiningParameters.MutableInitValues; import org.hyperledger.besu.ethereum.core.ImmutableMiningParameters.MutableInitValues;
import org.hyperledger.besu.ethereum.core.MiningParameters; import org.hyperledger.besu.ethereum.core.MiningParameters;
import org.hyperledger.besu.plugin.services.TransactionSelectionService;
import org.hyperledger.besu.util.number.PositiveNumber; import org.hyperledger.besu.util.number.PositiveNumber;
import java.util.List; import java.util.List;
import java.util.OptionalInt;
import org.apache.tuweni.bytes.Bytes; import org.apache.tuweni.bytes.Bytes;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -189,7 +190,7 @@ public class MiningOptions implements CLIOptions<MiningParameters> {
DEFAULT_POS_BLOCK_CREATION_REPETITION_MIN_DURATION; DEFAULT_POS_BLOCK_CREATION_REPETITION_MIN_DURATION;
} }
private OptionalInt maybeGenesisBlockPeriodSeconds; private TransactionSelectionService transactionSelectionService;
private MiningOptions() {} private MiningOptions() {}
@ -203,13 +204,13 @@ public class MiningOptions implements CLIOptions<MiningParameters> {
} }
/** /**
* Set the optional genesis block period per seconds * Set the transaction selection service
* *
* @param genesisBlockPeriodSeconds if the network is PoA then the block period in seconds * @param transactionSelectionService the transaction selection service
* specified in the genesis file, otherwise empty.
*/ */
public void setGenesisBlockPeriodSeconds(final OptionalInt genesisBlockPeriodSeconds) { public void setTransactionSelectionService(
maybeGenesisBlockPeriodSeconds = genesisBlockPeriodSeconds; final TransactionSelectionService transactionSelectionService) {
this.transactionSelectionService = transactionSelectionService;
} }
/** /**
@ -298,7 +299,7 @@ public class MiningOptions implements CLIOptions<MiningParameters> {
static MiningOptions fromConfig(final MiningParameters miningParameters) { static MiningOptions fromConfig(final MiningParameters miningParameters) {
final MiningOptions miningOptions = MiningOptions.create(); final MiningOptions miningOptions = MiningOptions.create();
miningOptions.setGenesisBlockPeriodSeconds(miningParameters.getGenesisBlockPeriodSeconds()); miningOptions.setTransactionSelectionService(miningParameters.getTransactionSelectionService());
miningOptions.isMiningEnabled = miningParameters.isMiningEnabled(); miningOptions.isMiningEnabled = miningParameters.isMiningEnabled();
miningOptions.iStratumMiningEnabled = miningParameters.isStratumMiningEnabled(); miningOptions.iStratumMiningEnabled = miningParameters.isStratumMiningEnabled();
miningOptions.stratumNetworkInterface = miningParameters.getStratumNetworkInterface(); miningOptions.stratumNetworkInterface = miningParameters.getStratumNetworkInterface();
@ -333,10 +334,9 @@ public class MiningOptions implements CLIOptions<MiningParameters> {
@Override @Override
public MiningParameters toDomainObject() { public MiningParameters toDomainObject() {
if (maybeGenesisBlockPeriodSeconds == null) { checkNotNull(
throw new IllegalStateException( transactionSelectionService,
"genesisBlockPeriodSeconds must be set before using this object"); "transactionSelectionService must be set before using this object");
}
final var updatableInitValuesBuilder = final var updatableInitValuesBuilder =
MutableInitValues.builder() MutableInitValues.builder()
@ -354,7 +354,7 @@ public class MiningOptions implements CLIOptions<MiningParameters> {
} }
return ImmutableMiningParameters.builder() return ImmutableMiningParameters.builder()
.genesisBlockPeriodSeconds(maybeGenesisBlockPeriodSeconds) .transactionSelectionService(transactionSelectionService)
.mutableInitValues(updatableInitValuesBuilder.build()) .mutableInitValues(updatableInitValuesBuilder.build())
.isStratumMiningEnabled(iStratumMiningEnabled) .isStratumMiningEnabled(iStratumMiningEnabled)
.stratumNetworkInterface(stratumNetworkInterface) .stratumNetworkInterface(stratumNetworkInterface)

@ -30,6 +30,7 @@ import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.datatypes.Wei; import org.hyperledger.besu.datatypes.Wei;
import org.hyperledger.besu.ethereum.eth.transactions.ImmutableTransactionPoolConfiguration; import org.hyperledger.besu.ethereum.eth.transactions.ImmutableTransactionPoolConfiguration;
import org.hyperledger.besu.ethereum.eth.transactions.TransactionPoolConfiguration; import org.hyperledger.besu.ethereum.eth.transactions.TransactionPoolConfiguration;
import org.hyperledger.besu.plugin.services.TransactionPoolValidatorService;
import org.hyperledger.besu.util.number.Fraction; import org.hyperledger.besu.util.number.Fraction;
import org.hyperledger.besu.util.number.Percentage; import org.hyperledger.besu.util.number.Percentage;
@ -51,12 +52,15 @@ public class TransactionPoolOptions implements CLIOptions<TransactionPoolConfigu
private static final String TX_POOL_ENABLE_SAVE_RESTORE = "--tx-pool-enable-save-restore"; private static final String TX_POOL_ENABLE_SAVE_RESTORE = "--tx-pool-enable-save-restore";
private static final String TX_POOL_SAVE_FILE = "--tx-pool-save-file"; private static final String TX_POOL_SAVE_FILE = "--tx-pool-save-file";
private static final String TX_POOL_PRICE_BUMP = "--tx-pool-price-bump"; private static final String TX_POOL_PRICE_BUMP = "--tx-pool-price-bump";
private static final String TX_POOL_BLOB_PRICE_BUMP = "--tx-pool-blob-price-bump";
private static final String RPC_TX_FEECAP = "--rpc-tx-feecap"; private static final String RPC_TX_FEECAP = "--rpc-tx-feecap";
private static final String STRICT_TX_REPLAY_PROTECTION_ENABLED_FLAG = private static final String STRICT_TX_REPLAY_PROTECTION_ENABLED_FLAG =
"--strict-tx-replay-protection-enabled"; "--strict-tx-replay-protection-enabled";
private static final String TX_POOL_PRIORITY_SENDERS = "--tx-pool-priority-senders"; private static final String TX_POOL_PRIORITY_SENDERS = "--tx-pool-priority-senders";
private static final String TX_POOL_MIN_GAS_PRICE = "--tx-pool-min-gas-price"; private static final String TX_POOL_MIN_GAS_PRICE = "--tx-pool-min-gas-price";
private TransactionPoolValidatorService transactionPoolValidatorService;
@CommandLine.Option( @CommandLine.Option(
names = {TX_POOL_IMPLEMENTATION}, names = {TX_POOL_IMPLEMENTATION},
paramLabel = "<Enum>", paramLabel = "<Enum>",
@ -99,6 +103,15 @@ public class TransactionPoolOptions implements CLIOptions<TransactionPoolConfigu
arity = "1") arity = "1")
private Percentage priceBump = TransactionPoolConfiguration.DEFAULT_PRICE_BUMP; private Percentage priceBump = TransactionPoolConfiguration.DEFAULT_PRICE_BUMP;
@CommandLine.Option(
names = {TX_POOL_BLOB_PRICE_BUMP},
paramLabel = "<Percentage>",
converter = PercentageConverter.class,
description =
"Blob price bump percentage to replace an already existing transaction blob tx (default: ${DEFAULT-VALUE})",
arity = "1")
private Percentage blobPriceBump = TransactionPoolConfiguration.DEFAULT_BLOB_PRICE_BUMP;
@CommandLine.Option( @CommandLine.Option(
names = {RPC_TX_FEECAP}, names = {RPC_TX_FEECAP},
description = description =
@ -252,6 +265,16 @@ public class TransactionPoolOptions implements CLIOptions<TransactionPoolConfigu
return new TransactionPoolOptions(); return new TransactionPoolOptions();
} }
/**
* Set the plugin txpool validator service
*
* @param transactionPoolValidatorService the plugin txpool validator service
*/
public void setPluginTransactionValidatorService(
final TransactionPoolValidatorService transactionPoolValidatorService) {
this.transactionPoolValidatorService = transactionPoolValidatorService;
}
/** /**
* Create Transaction Pool Options from Transaction Pool Configuration. * Create Transaction Pool Options from Transaction Pool Configuration.
* *
@ -264,6 +287,7 @@ public class TransactionPoolOptions implements CLIOptions<TransactionPoolConfigu
options.saveRestoreEnabled = config.getEnableSaveRestore(); options.saveRestoreEnabled = config.getEnableSaveRestore();
options.noLocalPriority = config.getNoLocalPriority(); options.noLocalPriority = config.getNoLocalPriority();
options.priceBump = config.getPriceBump(); options.priceBump = config.getPriceBump();
options.blobPriceBump = config.getBlobPriceBump();
options.txFeeCap = config.getTxFeeCap(); options.txFeeCap = config.getTxFeeCap();
options.saveFile = config.getSaveFile(); options.saveFile = config.getSaveFile();
options.strictTxReplayProtectionEnabled = config.getStrictTransactionReplayProtectionEnabled(); options.strictTxReplayProtectionEnabled = config.getStrictTransactionReplayProtectionEnabled();
@ -277,6 +301,7 @@ public class TransactionPoolOptions implements CLIOptions<TransactionPoolConfigu
config.getTxPoolLimitByAccountPercentage(); config.getTxPoolLimitByAccountPercentage();
options.sequencedOptions.txPoolMaxSize = config.getTxPoolMaxSize(); options.sequencedOptions.txPoolMaxSize = config.getTxPoolMaxSize();
options.sequencedOptions.pendingTxRetentionPeriod = config.getPendingTxRetentionPeriod(); options.sequencedOptions.pendingTxRetentionPeriod = config.getPendingTxRetentionPeriod();
options.transactionPoolValidatorService = config.getTransactionPoolValidatorService();
options.unstableOptions.txMessageKeepAliveSeconds = options.unstableOptions.txMessageKeepAliveSeconds =
config.getUnstable().getTxMessageKeepAliveSeconds(); config.getUnstable().getTxMessageKeepAliveSeconds();
options.unstableOptions.eth65TrxAnnouncedBufferingPeriod = options.unstableOptions.eth65TrxAnnouncedBufferingPeriod =
@ -320,6 +345,7 @@ public class TransactionPoolOptions implements CLIOptions<TransactionPoolConfigu
.enableSaveRestore(saveRestoreEnabled) .enableSaveRestore(saveRestoreEnabled)
.noLocalPriority(noLocalPriority) .noLocalPriority(noLocalPriority)
.priceBump(priceBump) .priceBump(priceBump)
.blobPriceBump(blobPriceBump)
.txFeeCap(txFeeCap) .txFeeCap(txFeeCap)
.saveFile(saveFile) .saveFile(saveFile)
.strictTransactionReplayProtectionEnabled(strictTxReplayProtectionEnabled) .strictTransactionReplayProtectionEnabled(strictTxReplayProtectionEnabled)
@ -331,6 +357,7 @@ public class TransactionPoolOptions implements CLIOptions<TransactionPoolConfigu
.txPoolLimitByAccountPercentage(sequencedOptions.txPoolLimitByAccountPercentage) .txPoolLimitByAccountPercentage(sequencedOptions.txPoolLimitByAccountPercentage)
.txPoolMaxSize(sequencedOptions.txPoolMaxSize) .txPoolMaxSize(sequencedOptions.txPoolMaxSize)
.pendingTxRetentionPeriod(sequencedOptions.pendingTxRetentionPeriod) .pendingTxRetentionPeriod(sequencedOptions.pendingTxRetentionPeriod)
.transactionPoolValidatorService(transactionPoolValidatorService)
.unstable( .unstable(
ImmutableTransactionPoolConfiguration.Unstable.builder() ImmutableTransactionPoolConfiguration.Unstable.builder()
.txMessageKeepAliveSeconds(unstableOptions.txMessageKeepAliveSeconds) .txMessageKeepAliveSeconds(unstableOptions.txMessageKeepAliveSeconds)

@ -17,6 +17,8 @@
package org.hyperledger.besu.cli.options.stable; package org.hyperledger.besu.cli.options.stable;
import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.DEFAULT_BONSAI_MAX_LAYERS_TO_LOAD; import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.DEFAULT_BONSAI_MAX_LAYERS_TO_LOAD;
import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.DEFAULT_RECEIPT_COMPACTION_ENABLED;
import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.Unstable.DEFAULT_BONSAI_CODE_USING_CODE_HASH_ENABLED;
import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.Unstable.DEFAULT_BONSAI_LIMIT_TRIE_LOGS_ENABLED; import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.Unstable.DEFAULT_BONSAI_LIMIT_TRIE_LOGS_ENABLED;
import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.Unstable.DEFAULT_BONSAI_TRIE_LOG_PRUNING_WINDOW_SIZE; import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.Unstable.DEFAULT_BONSAI_TRIE_LOG_PRUNING_WINDOW_SIZE;
import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.Unstable.MINIMUM_BONSAI_TRIE_LOG_RETENTION_LIMIT; import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.Unstable.MINIMUM_BONSAI_TRIE_LOG_RETENTION_LIMIT;
@ -24,10 +26,11 @@ import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.
import org.hyperledger.besu.cli.options.CLIOptions; import org.hyperledger.besu.cli.options.CLIOptions;
import org.hyperledger.besu.cli.util.CommandLineUtils; import org.hyperledger.besu.cli.util.CommandLineUtils;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration; import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageFormat;
import org.hyperledger.besu.ethereum.worldstate.ImmutableDataStorageConfiguration; import org.hyperledger.besu.ethereum.worldstate.ImmutableDataStorageConfiguration;
import org.hyperledger.besu.plugin.services.storage.DataStorageFormat;
import java.util.List; import java.util.List;
import java.util.Locale;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import picocli.CommandLine; import picocli.CommandLine;
@ -48,7 +51,7 @@ public class DataStorageOptions implements CLIOptions<DataStorageConfiguration>
description = description =
"Format to store trie data in. Either FOREST or BONSAI (default: ${DEFAULT-VALUE}).", "Format to store trie data in. Either FOREST or BONSAI (default: ${DEFAULT-VALUE}).",
arity = "1") arity = "1")
private DataStorageFormat dataStorageFormat = DataStorageFormat.FOREST; private DataStorageFormat dataStorageFormat = DataStorageFormat.BONSAI;
@Option( @Option(
names = {BONSAI_STORAGE_FORMAT_MAX_LAYERS_TO_LOAD, "--bonsai-maximum-back-layers-to-load"}, names = {BONSAI_STORAGE_FORMAT_MAX_LAYERS_TO_LOAD, "--bonsai-maximum-back-layers-to-load"},
@ -60,6 +63,12 @@ public class DataStorageOptions implements CLIOptions<DataStorageConfiguration>
arity = "1") arity = "1")
private Long bonsaiMaxLayersToLoad = DEFAULT_BONSAI_MAX_LAYERS_TO_LOAD; private Long bonsaiMaxLayersToLoad = DEFAULT_BONSAI_MAX_LAYERS_TO_LOAD;
@Option(
names = "--receipt-compaction-enabled",
description = "Enables compact storing of receipts (default: ${DEFAULT-VALUE}).",
arity = "1")
private Boolean receiptCompactionEnabled = DEFAULT_RECEIPT_COMPACTION_ENABLED;
@CommandLine.ArgGroup(validate = false) @CommandLine.ArgGroup(validate = false)
private final DataStorageOptions.Unstable unstableOptions = new Unstable(); private final DataStorageOptions.Unstable unstableOptions = new Unstable();
@ -74,7 +83,7 @@ public class DataStorageOptions implements CLIOptions<DataStorageConfiguration>
@CommandLine.Option( @CommandLine.Option(
hidden = true, hidden = true,
names = {BONSAI_LIMIT_TRIE_LOGS_ENABLED}, names = {BONSAI_LIMIT_TRIE_LOGS_ENABLED, "--Xbonsai-trie-log-pruning-enabled"},
description = description =
"Limit the number of trie logs that are retained. (default: ${DEFAULT-VALUE})") "Limit the number of trie logs that are retained. (default: ${DEFAULT-VALUE})")
private boolean bonsaiLimitTrieLogsEnabled = DEFAULT_BONSAI_LIMIT_TRIE_LOGS_ENABLED; private boolean bonsaiLimitTrieLogsEnabled = DEFAULT_BONSAI_LIMIT_TRIE_LOGS_ENABLED;
@ -85,6 +94,14 @@ public class DataStorageOptions implements CLIOptions<DataStorageConfiguration>
description = description =
"The max number of blocks to load and prune trie logs for at startup. (default: ${DEFAULT-VALUE})") "The max number of blocks to load and prune trie logs for at startup. (default: ${DEFAULT-VALUE})")
private int bonsaiTrieLogPruningWindowSize = DEFAULT_BONSAI_TRIE_LOG_PRUNING_WINDOW_SIZE; private int bonsaiTrieLogPruningWindowSize = DEFAULT_BONSAI_TRIE_LOG_PRUNING_WINDOW_SIZE;
@CommandLine.Option(
hidden = true,
names = {"--Xbonsai-code-using-code-hash-enabled"},
arity = "1",
description =
"Enables code storage using code hash instead of by account hash. (default: ${DEFAULT-VALUE})")
private boolean bonsaiCodeUsingCodeHashEnabled = DEFAULT_BONSAI_CODE_USING_CODE_HASH_ENABLED;
} }
/** /**
* Create data storage options. * Create data storage options.
@ -130,14 +147,23 @@ public class DataStorageOptions implements CLIOptions<DataStorageConfiguration>
} }
} }
static DataStorageOptions fromConfig(final DataStorageConfiguration domainObject) { /**
* Converts to options from the configuration
*
* @param domainObject to be reversed
* @return the options that correspond to the configuration
*/
public static DataStorageOptions fromConfig(final DataStorageConfiguration domainObject) {
final DataStorageOptions dataStorageOptions = DataStorageOptions.create(); final DataStorageOptions dataStorageOptions = DataStorageOptions.create();
dataStorageOptions.dataStorageFormat = domainObject.getDataStorageFormat(); dataStorageOptions.dataStorageFormat = domainObject.getDataStorageFormat();
dataStorageOptions.bonsaiMaxLayersToLoad = domainObject.getBonsaiMaxLayersToLoad(); dataStorageOptions.bonsaiMaxLayersToLoad = domainObject.getBonsaiMaxLayersToLoad();
dataStorageOptions.receiptCompactionEnabled = domainObject.getReceiptCompactionEnabled();
dataStorageOptions.unstableOptions.bonsaiLimitTrieLogsEnabled = dataStorageOptions.unstableOptions.bonsaiLimitTrieLogsEnabled =
domainObject.getUnstable().getBonsaiLimitTrieLogsEnabled(); domainObject.getUnstable().getBonsaiLimitTrieLogsEnabled();
dataStorageOptions.unstableOptions.bonsaiTrieLogPruningWindowSize = dataStorageOptions.unstableOptions.bonsaiTrieLogPruningWindowSize =
domainObject.getUnstable().getBonsaiTrieLogPruningWindowSize(); domainObject.getUnstable().getBonsaiTrieLogPruningWindowSize();
dataStorageOptions.unstableOptions.bonsaiCodeUsingCodeHashEnabled =
domainObject.getUnstable().getBonsaiCodeStoredByCodeHashEnabled();
return dataStorageOptions; return dataStorageOptions;
} }
@ -147,10 +173,12 @@ public class DataStorageOptions implements CLIOptions<DataStorageConfiguration>
return ImmutableDataStorageConfiguration.builder() return ImmutableDataStorageConfiguration.builder()
.dataStorageFormat(dataStorageFormat) .dataStorageFormat(dataStorageFormat)
.bonsaiMaxLayersToLoad(bonsaiMaxLayersToLoad) .bonsaiMaxLayersToLoad(bonsaiMaxLayersToLoad)
.receiptCompactionEnabled(receiptCompactionEnabled)
.unstable( .unstable(
ImmutableDataStorageConfiguration.Unstable.builder() ImmutableDataStorageConfiguration.Unstable.builder()
.bonsaiLimitTrieLogsEnabled(unstableOptions.bonsaiLimitTrieLogsEnabled) .bonsaiLimitTrieLogsEnabled(unstableOptions.bonsaiLimitTrieLogsEnabled)
.bonsaiTrieLogPruningWindowSize(unstableOptions.bonsaiTrieLogPruningWindowSize) .bonsaiTrieLogPruningWindowSize(unstableOptions.bonsaiTrieLogPruningWindowSize)
.bonsaiCodeStoredByCodeHashEnabled(unstableOptions.bonsaiCodeUsingCodeHashEnabled)
.build()) .build())
.build(); .build();
} }
@ -166,6 +194,6 @@ public class DataStorageOptions implements CLIOptions<DataStorageConfiguration>
* @return the normalized string * @return the normalized string
*/ */
public String normalizeDataStorageFormat() { public String normalizeDataStorageFormat() {
return StringUtils.capitalize(dataStorageFormat.toString().toLowerCase()); return StringUtils.capitalize(dataStorageFormat.toString().toLowerCase(Locale.ROOT));
} }
} }

@ -14,6 +14,7 @@
*/ */
package org.hyperledger.besu.cli.options.stable; package org.hyperledger.besu.cli.options.stable;
import java.util.Locale;
import java.util.Set; import java.util.Set;
import picocli.CommandLine; import picocli.CommandLine;
@ -52,8 +53,8 @@ public class LoggingLevelOption {
if ("FATAL".equalsIgnoreCase(logLevel)) { if ("FATAL".equalsIgnoreCase(logLevel)) {
System.out.println("FATAL level is deprecated"); System.out.println("FATAL level is deprecated");
this.logLevel = "ERROR"; this.logLevel = "ERROR";
} else if (ACCEPTED_VALUES.contains(logLevel.toUpperCase())) { } else if (ACCEPTED_VALUES.contains(logLevel.toUpperCase(Locale.ROOT))) {
this.logLevel = logLevel.toUpperCase(); this.logLevel = logLevel.toUpperCase(Locale.ROOT);
} else { } else {
throw new CommandLine.ParameterException( throw new CommandLine.ParameterException(
spec.commandLine(), "Unknown logging value: " + logLevel); spec.commandLine(), "Unknown logging value: " + logLevel);

@ -80,8 +80,8 @@ public class NetworkingOptions implements CLIOptions<NetworkingConfiguration> {
hidden = true, hidden = true,
names = PEER_LOWER_BOUND_FLAG, names = PEER_LOWER_BOUND_FLAG,
description = description =
"Lower bound on the target number of P2P connections (default: ${DEFAULT-VALUE})") "(Deprecated) Lower bound on the target number of P2P connections (default: ${DEFAULT-VALUE})")
private Integer peerLowerBoundConfig = DefaultCommandValues.DEFAULT_P2P_PEER_LOWER_BOUND; private final Integer peerLowerBoundConfig = DefaultCommandValues.DEFAULT_MAX_PEERS;
private NetworkingOptions() {} private NetworkingOptions() {}
@ -107,7 +107,6 @@ public class NetworkingOptions implements CLIOptions<NetworkingConfiguration> {
cliOptions.initiateConnectionsFrequencySec = cliOptions.initiateConnectionsFrequencySec =
networkingConfig.getInitiateConnectionsFrequencySec(); networkingConfig.getInitiateConnectionsFrequencySec();
cliOptions.dnsDiscoveryServerOverride = networkingConfig.getDnsDiscoveryServerOverride(); cliOptions.dnsDiscoveryServerOverride = networkingConfig.getDnsDiscoveryServerOverride();
cliOptions.peerLowerBoundConfig = networkingConfig.getPeerLowerBound();
return cliOptions; return cliOptions;
} }
@ -120,7 +119,6 @@ public class NetworkingOptions implements CLIOptions<NetworkingConfiguration> {
config.setDnsDiscoveryServerOverride(dnsDiscoveryServerOverride); config.setDnsDiscoveryServerOverride(dnsDiscoveryServerOverride);
config.getDiscovery().setDiscoveryV5Enabled(isPeerDiscoveryV5Enabled); config.getDiscovery().setDiscoveryV5Enabled(isPeerDiscoveryV5Enabled);
config.getDiscovery().setFilterOnEnrForkId(filterOnEnrForkId); config.getDiscovery().setFilterOnEnrForkId(filterOnEnrForkId);
config.setPeerLowerBound(peerLowerBoundConfig);
return config; return config;
} }

@ -82,6 +82,8 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
private static final String SNAP_FLAT_DB_HEALING_ENABLED_FLAG = private static final String SNAP_FLAT_DB_HEALING_ENABLED_FLAG =
"--Xsnapsync-synchronizer-flat-db-healing-enabled"; "--Xsnapsync-synchronizer-flat-db-healing-enabled";
private static final String SNAP_SERVER_ENABLED_FLAG = "--Xsnapsync-server-enabled";
private static final String CHECKPOINT_POST_MERGE_FLAG = "--Xcheckpoint-post-merge-enabled"; private static final String CHECKPOINT_POST_MERGE_FLAG = "--Xcheckpoint-post-merge-enabled";
/** /**
@ -106,7 +108,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = DOWNLOADER_CHANGE_TARGET_THRESHOLD_BY_HEIGHT_FLAG, names = DOWNLOADER_CHANGE_TARGET_THRESHOLD_BY_HEIGHT_FLAG,
hidden = true, hidden = true,
defaultValue = "200",
paramLabel = "<LONG>", paramLabel = "<LONG>",
description = description =
"Minimum height difference before switching fast sync download peers (default: ${DEFAULT-VALUE})") "Minimum height difference before switching fast sync download peers (default: ${DEFAULT-VALUE})")
@ -116,7 +117,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = DOWNLOADER_CHANGE_TARGET_THRESHOLD_BY_TD_FLAG, names = DOWNLOADER_CHANGE_TARGET_THRESHOLD_BY_TD_FLAG,
hidden = true, hidden = true,
defaultValue = "1000000000000000000",
paramLabel = "<UINT256>", paramLabel = "<UINT256>",
description = description =
"Minimum total difficulty difference before switching fast sync download peers (default: ${DEFAULT-VALUE})") "Minimum total difficulty difference before switching fast sync download peers (default: ${DEFAULT-VALUE})")
@ -126,7 +126,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = DOWNLOADER_HEADER_REQUEST_SIZE_FLAG, names = DOWNLOADER_HEADER_REQUEST_SIZE_FLAG,
hidden = true, hidden = true,
defaultValue = "200",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = "Number of headers to request per packet (default: ${DEFAULT-VALUE})") description = "Number of headers to request per packet (default: ${DEFAULT-VALUE})")
private int downloaderHeaderRequestSize = private int downloaderHeaderRequestSize =
@ -135,7 +134,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = DOWNLOADER_CHECKPOINT_TIMEOUTS_PERMITTED_FLAG, names = DOWNLOADER_CHECKPOINT_TIMEOUTS_PERMITTED_FLAG,
hidden = true, hidden = true,
defaultValue = "5",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"Number of tries to attempt to download checkpoints before stopping (default: ${DEFAULT-VALUE})") "Number of tries to attempt to download checkpoints before stopping (default: ${DEFAULT-VALUE})")
@ -145,7 +143,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = DOWNLOADER_CHAIN_SEGMENT_SIZE_FLAG, names = DOWNLOADER_CHAIN_SEGMENT_SIZE_FLAG,
hidden = true, hidden = true,
defaultValue = "200",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = "Distance between checkpoint headers (default: ${DEFAULT-VALUE})") description = "Distance between checkpoint headers (default: ${DEFAULT-VALUE})")
private int downloaderChainSegmentSize = private int downloaderChainSegmentSize =
@ -154,7 +151,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = DOWNLOADER_PARALLELISM_FLAG, names = DOWNLOADER_PARALLELISM_FLAG,
hidden = true, hidden = true,
defaultValue = "4",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = "Number of threads to provide to chain downloader (default: ${DEFAULT-VALUE})") description = "Number of threads to provide to chain downloader (default: ${DEFAULT-VALUE})")
private int downloaderParallelism = SynchronizerConfiguration.DEFAULT_DOWNLOADER_PARALLELISM; private int downloaderParallelism = SynchronizerConfiguration.DEFAULT_DOWNLOADER_PARALLELISM;
@ -162,7 +158,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = TRANSACTIONS_PARALLELISM_FLAG, names = TRANSACTIONS_PARALLELISM_FLAG,
hidden = true, hidden = true,
defaultValue = "2",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"Number of threads to commit to transaction processing (default: ${DEFAULT-VALUE})") "Number of threads to commit to transaction processing (default: ${DEFAULT-VALUE})")
@ -179,7 +174,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = PIVOT_DISTANCE_FROM_HEAD_FLAG, names = PIVOT_DISTANCE_FROM_HEAD_FLAG,
hidden = true, hidden = true,
defaultValue = "50",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"Distance from initial chain head to fast sync target (default: ${DEFAULT-VALUE})") "Distance from initial chain head to fast sync target (default: ${DEFAULT-VALUE})")
@ -188,7 +182,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = FULL_VALIDATION_RATE_FLAG, names = FULL_VALIDATION_RATE_FLAG,
hidden = true, hidden = true,
defaultValue = "0.1",
paramLabel = "<FLOAT>", paramLabel = "<FLOAT>",
description = "Fraction of headers fast sync will fully validate (default: ${DEFAULT-VALUE})") description = "Fraction of headers fast sync will fully validate (default: ${DEFAULT-VALUE})")
private float fastSyncFullValidationRate = SynchronizerConfiguration.DEFAULT_FULL_VALIDATION_RATE; private float fastSyncFullValidationRate = SynchronizerConfiguration.DEFAULT_FULL_VALIDATION_RATE;
@ -196,7 +189,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = WORLD_STATE_HASH_COUNT_PER_REQUEST_FLAG, names = WORLD_STATE_HASH_COUNT_PER_REQUEST_FLAG,
hidden = true, hidden = true,
defaultValue = "384",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = "Fast sync world state hashes queried per request (default: ${DEFAULT-VALUE})") description = "Fast sync world state hashes queried per request (default: ${DEFAULT-VALUE})")
private int worldStateHashCountPerRequest = private int worldStateHashCountPerRequest =
@ -205,7 +197,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = WORLD_STATE_REQUEST_PARALLELISM_FLAG, names = WORLD_STATE_REQUEST_PARALLELISM_FLAG,
hidden = true, hidden = true,
defaultValue = "10",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"Number of concurrent requests to use when downloading fast sync world state (default: ${DEFAULT-VALUE})") "Number of concurrent requests to use when downloading fast sync world state (default: ${DEFAULT-VALUE})")
@ -215,7 +206,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = WORLD_STATE_MAX_REQUESTS_WITHOUT_PROGRESS_FLAG, names = WORLD_STATE_MAX_REQUESTS_WITHOUT_PROGRESS_FLAG,
hidden = true, hidden = true,
defaultValue = "1000",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"Number of world state requests accepted without progress before considering the download stalled (default: ${DEFAULT-VALUE})") "Number of world state requests accepted without progress before considering the download stalled (default: ${DEFAULT-VALUE})")
@ -225,7 +215,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = WORLD_STATE_MIN_MILLIS_BEFORE_STALLING_FLAG, names = WORLD_STATE_MIN_MILLIS_BEFORE_STALLING_FLAG,
hidden = true, hidden = true,
defaultValue = "300000",
paramLabel = "<LONG>", paramLabel = "<LONG>",
description = description =
"Minimum time in ms without progress before considering a world state download as stalled (default: ${DEFAULT-VALUE})") "Minimum time in ms without progress before considering a world state download as stalled (default: ${DEFAULT-VALUE})")
@ -235,7 +224,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = WORLD_STATE_TASK_CACHE_SIZE_FLAG, names = WORLD_STATE_TASK_CACHE_SIZE_FLAG,
hidden = true, hidden = true,
defaultValue = "1000000",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"The max number of pending node data requests cached in-memory during fast sync world state download. (default: ${DEFAULT-VALUE})") "The max number of pending node data requests cached in-memory during fast sync world state download. (default: ${DEFAULT-VALUE})")
@ -245,7 +233,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = SNAP_PIVOT_BLOCK_WINDOW_VALIDITY_FLAG, names = SNAP_PIVOT_BLOCK_WINDOW_VALIDITY_FLAG,
hidden = true, hidden = true,
defaultValue = "126",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"The size of the pivot block window before having to change it (default: ${DEFAULT-VALUE})") "The size of the pivot block window before having to change it (default: ${DEFAULT-VALUE})")
@ -255,7 +242,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = SNAP_PIVOT_BLOCK_DISTANCE_BEFORE_CACHING_FLAG, names = SNAP_PIVOT_BLOCK_DISTANCE_BEFORE_CACHING_FLAG,
hidden = true, hidden = true,
defaultValue = "60",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"The distance from the head before loading a pivot block into the cache to have a ready pivot block when the window is finished (default: ${DEFAULT-VALUE})") "The distance from the head before loading a pivot block into the cache to have a ready pivot block when the window is finished (default: ${DEFAULT-VALUE})")
@ -265,7 +251,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = SNAP_STORAGE_COUNT_PER_REQUEST_FLAG, names = SNAP_STORAGE_COUNT_PER_REQUEST_FLAG,
hidden = true, hidden = true,
defaultValue = "384",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = "Snap sync storage queried per request (default: ${DEFAULT-VALUE})") description = "Snap sync storage queried per request (default: ${DEFAULT-VALUE})")
private int snapsyncStorageCountPerRequest = private int snapsyncStorageCountPerRequest =
@ -274,7 +259,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = SNAP_BYTECODE_COUNT_PER_REQUEST_FLAG, names = SNAP_BYTECODE_COUNT_PER_REQUEST_FLAG,
hidden = true, hidden = true,
defaultValue = "84",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = "Snap sync bytecode queried per request (default: ${DEFAULT-VALUE})") description = "Snap sync bytecode queried per request (default: ${DEFAULT-VALUE})")
private int snapsyncBytecodeCountPerRequest = private int snapsyncBytecodeCountPerRequest =
@ -283,7 +267,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = SNAP_TRIENODE_COUNT_PER_REQUEST_FLAG, names = SNAP_TRIENODE_COUNT_PER_REQUEST_FLAG,
hidden = true, hidden = true,
defaultValue = "384",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = "Snap sync trie node queried per request (default: ${DEFAULT-VALUE})") description = "Snap sync trie node queried per request (default: ${DEFAULT-VALUE})")
private int snapsyncTrieNodeCountPerRequest = private int snapsyncTrieNodeCountPerRequest =
@ -292,7 +275,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = SNAP_FLAT_ACCOUNT_HEALED_COUNT_PER_REQUEST_FLAG, names = SNAP_FLAT_ACCOUNT_HEALED_COUNT_PER_REQUEST_FLAG,
hidden = true, hidden = true,
defaultValue = "128",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"Snap sync flat accounts verified and healed per request (default: ${DEFAULT-VALUE})") "Snap sync flat accounts verified and healed per request (default: ${DEFAULT-VALUE})")
@ -302,7 +284,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = SNAP_FLAT_STORAGE_HEALED_COUNT_PER_REQUEST_FLAG, names = SNAP_FLAT_STORAGE_HEALED_COUNT_PER_REQUEST_FLAG,
hidden = true, hidden = true,
defaultValue = "1024",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"Snap sync flat slots verified and healed per request (default: ${DEFAULT-VALUE})") "Snap sync flat slots verified and healed per request (default: ${DEFAULT-VALUE})")
@ -312,12 +293,18 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option( @CommandLine.Option(
names = SNAP_FLAT_DB_HEALING_ENABLED_FLAG, names = SNAP_FLAT_DB_HEALING_ENABLED_FLAG,
hidden = true, hidden = true,
defaultValue = "false",
paramLabel = "<Boolean>", paramLabel = "<Boolean>",
description = "Snap sync flat db healing enabled (default: ${DEFAULT-VALUE})") description = "Snap sync flat db healing enabled (default: ${DEFAULT-VALUE})")
private Boolean snapsyncFlatDbHealingEnabled = private Boolean snapsyncFlatDbHealingEnabled =
SnapSyncConfiguration.DEFAULT_IS_FLAT_DB_HEALING_ENABLED; SnapSyncConfiguration.DEFAULT_IS_FLAT_DB_HEALING_ENABLED;
@CommandLine.Option(
names = SNAP_SERVER_ENABLED_FLAG,
hidden = true,
paramLabel = "<Boolean>",
description = "Snap sync server enabled (default: ${DEFAULT-VALUE})")
private Boolean snapsyncServerEnabled = SnapSyncConfiguration.DEFAULT_SNAP_SERVER_ENABLED;
@CommandLine.Option( @CommandLine.Option(
names = {CHECKPOINT_POST_MERGE_FLAG}, names = {CHECKPOINT_POST_MERGE_FLAG},
hidden = true, hidden = true,
@ -328,21 +315,30 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
private SynchronizerOptions() {} private SynchronizerOptions() {}
/** /**
* Create synchronizer options. * Flag to know whether the flat db healing feature is enabled or disabled.
* *
* @return the synchronizer options * @return true is the flat db healing is enabled
*/ */
public static SynchronizerOptions create() { public boolean isSnapsyncFlatDbHealingEnabled() {
return new SynchronizerOptions(); return snapsyncFlatDbHealingEnabled;
} }
/** /**
* Flag to know whether the flat db healing feature is enabled or disabled. * Flag to know whether the Snap sync server feature is enabled or disabled.
* *
* @return true is the flat db healing is enabled * @return true if snap sync server is enabled
*/ */
public boolean isSnapsyncFlatDbHealingEnabled() { public boolean isSnapsyncServerEnabled() {
return snapsyncFlatDbHealingEnabled; return snapsyncServerEnabled;
}
/**
* Create synchronizer options.
*
* @return the synchronizer options
*/
public static SynchronizerOptions create() {
return new SynchronizerOptions();
} }
/** /**
@ -420,6 +416,7 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
.localFlatAccountCountToHealPerRequest(snapsyncFlatAccountHealedCountPerRequest) .localFlatAccountCountToHealPerRequest(snapsyncFlatAccountHealedCountPerRequest)
.localFlatStorageCountToHealPerRequest(snapsyncFlatStorageHealedCountPerRequest) .localFlatStorageCountToHealPerRequest(snapsyncFlatStorageHealedCountPerRequest)
.isFlatDbHealingEnabled(snapsyncFlatDbHealingEnabled) .isFlatDbHealingEnabled(snapsyncFlatDbHealingEnabled)
.isSnapServerEnabled(snapsyncServerEnabled)
.build()); .build());
builder.checkpointPostMergeEnabled(checkpointPostMergeSyncEnabled); builder.checkpointPostMergeEnabled(checkpointPostMergeSyncEnabled);
@ -478,7 +475,9 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
SNAP_FLAT_ACCOUNT_HEALED_COUNT_PER_REQUEST_FLAG, SNAP_FLAT_ACCOUNT_HEALED_COUNT_PER_REQUEST_FLAG,
OptionParser.format(snapsyncFlatAccountHealedCountPerRequest), OptionParser.format(snapsyncFlatAccountHealedCountPerRequest),
SNAP_FLAT_STORAGE_HEALED_COUNT_PER_REQUEST_FLAG, SNAP_FLAT_STORAGE_HEALED_COUNT_PER_REQUEST_FLAG,
OptionParser.format(snapsyncFlatStorageHealedCountPerRequest))); OptionParser.format(snapsyncFlatStorageHealedCountPerRequest),
SNAP_SERVER_ENABLED_FLAG,
OptionParser.format(snapsyncServerEnabled)));
} }
return value; return value;
} }

@ -178,11 +178,9 @@ class GenerateBlockchainConfig implements Runnable {
if (!SIGNATURE_ALGORITHM.get().isValidPublicKey(publicKey)) { if (!SIGNATURE_ALGORITHM.get().isValidPublicKey(publicKey)) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
new StringBuilder() publicKeyText
.append(publicKeyText) + " is not a valid public key for elliptic curve "
.append(" is not a valid public key for elliptic curve ") + SIGNATURE_ALGORITHM.get().getCurveName());
.append(SIGNATURE_ALGORITHM.get().getCurveName())
.toString());
} }
writeKeypair(publicKey, null); writeKeypair(publicKey, null);
@ -297,10 +295,7 @@ class GenerateBlockchainConfig implements Runnable {
SignatureAlgorithmFactory.setInstance(SignatureAlgorithmType.create(ecCurve.get())); SignatureAlgorithmFactory.setInstance(SignatureAlgorithmType.create(ecCurve.get()));
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
new StringBuilder() "Invalid parameter for ecCurve in genesis config: " + e.getMessage());
.append("Invalid parameter for ecCurve in genesis config: ")
.append(e.getMessage())
.toString());
} }
} }

@ -22,10 +22,6 @@ import java.io.PrintWriter;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.rocksdb.ColumnFamilyDescriptor;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.Options;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException; import org.rocksdb.RocksDBException;
import picocli.CommandLine; import picocli.CommandLine;
import picocli.CommandLine.Command; import picocli.CommandLine.Command;
@ -37,12 +33,12 @@ import picocli.CommandLine.ParentCommand;
description = "Print RocksDB information", description = "Print RocksDB information",
mixinStandardHelpOptions = true, mixinStandardHelpOptions = true,
versionProvider = VersionProvider.class, versionProvider = VersionProvider.class,
subcommands = {RocksDbSubCommand.RocksDbUsage.class}) subcommands = {RocksDbSubCommand.RocksDbUsage.class, RocksDbSubCommand.RocksDbStats.class})
public class RocksDbSubCommand implements Runnable { public class RocksDbSubCommand implements Runnable {
@SuppressWarnings("unused") @SuppressWarnings("unused")
@ParentCommand @ParentCommand
private StorageSubCommand parentCommand; private StorageSubCommand storageSubCommand;
@SuppressWarnings("unused") @SuppressWarnings("unused")
@CommandLine.Spec @CommandLine.Spec
@ -66,7 +62,7 @@ public class RocksDbSubCommand implements Runnable {
@SuppressWarnings("unused") @SuppressWarnings("unused")
@ParentCommand @ParentCommand
private RocksDbSubCommand parentCommand; private RocksDbSubCommand rocksDbSubCommand;
@Override @Override
public void run() { public void run() {
@ -74,42 +70,68 @@ public class RocksDbSubCommand implements Runnable {
final PrintWriter out = spec.commandLine().getOut(); final PrintWriter out = spec.commandLine().getOut();
final String dbPath = final String dbPath =
parentCommand rocksDbSubCommand
.parentCommand .storageSubCommand
.parentCommand .besuCommand
.dataDir() .dataDir()
.toString() .resolve(DATABASE_PATH)
.concat("/") .toString();
.concat(DATABASE_PATH);
RocksDbHelper.printTableHeader(out);
RocksDB.loadLibrary();
Options options = new Options(); final List<RocksDbHelper.ColumnFamilyUsage> columnFamilyUsages = new ArrayList<>();
options.setCreateIfMissing(true); RocksDbHelper.forEachColumnFamily(
dbPath,
// Open the RocksDB database with multiple column families (rocksdb, cfHandle) -> {
List<byte[]> cfNames; try {
try { columnFamilyUsages.add(
cfNames = RocksDB.listColumnFamilies(options, dbPath); RocksDbHelper.getAndPrintUsageForColumnFamily(rocksdb, cfHandle, out));
} catch (RocksDBException e) { } catch (RocksDBException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
final List<ColumnFamilyHandle> cfHandles = new ArrayList<>(); });
final List<ColumnFamilyDescriptor> cfDescriptors = new ArrayList<>(); RocksDbHelper.printTotals(out, columnFamilyUsages);
for (byte[] cfName : cfNames) { }
cfDescriptors.add(new ColumnFamilyDescriptor(cfName)); }
}
RocksDbUsageHelper.printTableHeader(out); @Command(
try (final RocksDB rocksdb = RocksDB.openReadOnly(dbPath, cfDescriptors, cfHandles)) { name = "x-stats",
for (ColumnFamilyHandle cfHandle : cfHandles) { description = "Print rocksdb stats",
RocksDbUsageHelper.printUsageForColumnFamily(rocksdb, cfHandle, out); mixinStandardHelpOptions = true,
} versionProvider = VersionProvider.class)
} catch (RocksDBException e) { static class RocksDbStats implements Runnable {
throw new RuntimeException(e);
} finally { @SuppressWarnings("unused")
for (ColumnFamilyHandle cfHandle : cfHandles) { @CommandLine.Spec
cfHandle.close(); private CommandLine.Model.CommandSpec spec;
}
} @SuppressWarnings("unused")
@ParentCommand
private RocksDbSubCommand rocksDbSubCommand;
@Override
public void run() {
final PrintWriter out = spec.commandLine().getOut();
final String dbPath =
rocksDbSubCommand
.storageSubCommand
.besuCommand
.dataDir()
.resolve(DATABASE_PATH)
.toString();
out.println("Column Family Stats...");
RocksDbHelper.forEachColumnFamily(
dbPath,
(rocksdb, cfHandle) -> {
try {
RocksDbHelper.printStatsForColumnFamily(rocksdb, cfHandle, out);
} catch (RocksDBException e) {
throw new RuntimeException(e);
}
});
} }
} }
} }

@ -1,105 +0,0 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.subcommands.storage;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier;
import java.io.PrintWriter;
import org.bouncycastle.util.Arrays;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** RocksDB Usage subcommand helper methods for formatting and printing. */
public class RocksDbUsageHelper {
private static final Logger LOG = LoggerFactory.getLogger(RocksDbUsageHelper.class);
static void printUsageForColumnFamily(
final RocksDB rocksdb, final ColumnFamilyHandle cfHandle, final PrintWriter out)
throws RocksDBException, NumberFormatException {
final String size = rocksdb.getProperty(cfHandle, "rocksdb.estimate-live-data-size");
final String numberOfKeys = rocksdb.getProperty(cfHandle, "rocksdb.estimate-num-keys");
boolean emptyColumnFamily = false;
if (!size.isBlank() && !numberOfKeys.isBlank()) {
try {
final long sizeLong = Long.parseLong(size);
final long numberOfKeysLong = Long.parseLong(numberOfKeys);
final String totalSstFilesSize =
rocksdb.getProperty(cfHandle, "rocksdb.total-sst-files-size");
final long totalSstFilesSizeLong =
!totalSstFilesSize.isBlank() ? Long.parseLong(totalSstFilesSize) : 0;
if (sizeLong == 0 && numberOfKeysLong == 0) {
emptyColumnFamily = true;
}
if (!emptyColumnFamily) {
printLine(
out,
getNameById(cfHandle.getName()),
rocksdb.getProperty(cfHandle, "rocksdb.estimate-num-keys"),
formatOutputSize(sizeLong),
formatOutputSize(totalSstFilesSizeLong));
}
} catch (NumberFormatException e) {
LOG.error("Failed to parse string into long: " + e.getMessage());
}
}
}
private static String formatOutputSize(final long size) {
if (size > (1024 * 1024 * 1024)) {
long sizeInGiB = size / (1024 * 1024 * 1024);
return sizeInGiB + " GiB";
} else if (size > (1024 * 1024)) {
long sizeInMiB = size / (1024 * 1024);
return sizeInMiB + " MiB";
} else if (size > 1024) {
long sizeInKiB = size / 1024;
return sizeInKiB + " KiB";
} else {
return size + " B";
}
}
private static String getNameById(final byte[] id) {
for (KeyValueSegmentIdentifier segment : KeyValueSegmentIdentifier.values()) {
if (Arrays.areEqual(segment.getId(), id)) {
return segment.getName();
}
}
return null; // id not found
}
static void printTableHeader(final PrintWriter out) {
out.format(
"| Column Family | Keys | Column Size | SST Files Size |\n");
out.format(
"|--------------------------------|-----------------|--------------|-----------------|\n");
}
static void printLine(
final PrintWriter out,
final String cfName,
final String keys,
final String columnSize,
final String sstFilesSize) {
final String format = "| %-30s | %-15s | %-12s | %-15s |\n";
out.format(format, cfName, keys, columnSize, sstFilesSize);
}
}

@ -24,6 +24,7 @@ import static org.hyperledger.besu.ethereum.chain.VariablesStorage.Keys.SEQ_NO_S
import org.hyperledger.besu.cli.BesuCommand; import org.hyperledger.besu.cli.BesuCommand;
import org.hyperledger.besu.cli.util.VersionProvider; import org.hyperledger.besu.cli.util.VersionProvider;
import org.hyperledger.besu.controller.BesuController;
import org.hyperledger.besu.ethereum.rlp.RLP; import org.hyperledger.besu.ethereum.rlp.RLP;
import org.hyperledger.besu.ethereum.storage.StorageProvider; import org.hyperledger.besu.ethereum.storage.StorageProvider;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier; import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier;
@ -48,7 +49,8 @@ import picocli.CommandLine.Spec;
subcommands = { subcommands = {
StorageSubCommand.RevertVariablesStorage.class, StorageSubCommand.RevertVariablesStorage.class,
RocksDbSubCommand.class, RocksDbSubCommand.class,
TrieLogSubCommand.class TrieLogSubCommand.class,
RevertMetadataSubCommand.class
}) })
public class StorageSubCommand implements Runnable { public class StorageSubCommand implements Runnable {
@ -57,7 +59,7 @@ public class StorageSubCommand implements Runnable {
@SuppressWarnings("unused") @SuppressWarnings("unused")
@ParentCommand @ParentCommand
BesuCommand parentCommand; BesuCommand besuCommand;
@SuppressWarnings("unused") @SuppressWarnings("unused")
@Spec @Spec
@ -97,21 +99,19 @@ public class StorageSubCommand implements Runnable {
public void run() { public void run() {
checkNotNull(parentCommand); checkNotNull(parentCommand);
final var storageProvider = getStorageProvider(); final var storageProvider = createBesuController().getStorageProvider();
revert(storageProvider); revert(storageProvider);
} }
private StorageProvider getStorageProvider() { private BesuController createBesuController() {
// init collection of ignorable segments return parentCommand.besuCommand.buildController();
parentCommand.parentCommand.setIgnorableStorageSegments();
return parentCommand.parentCommand.getStorageProvider();
} }
private void revert(final StorageProvider storageProvider) { private void revert(final StorageProvider storageProvider) {
final var variablesStorage = storageProvider.createVariablesStorage(); final var variablesStorage = storageProvider.createVariablesStorage();
final var blockchainStorage = final var blockchainStorage =
getStorageProvider().getStorageBySegmentIdentifier(KeyValueSegmentIdentifier.BLOCKCHAIN); storageProvider.getStorageBySegmentIdentifier(KeyValueSegmentIdentifier.BLOCKCHAIN);
final var blockchainUpdater = blockchainStorage.startTransaction(); final var blockchainUpdater = blockchainStorage.startTransaction();
final var variablesUpdater = variablesStorage.updater(); final var variablesUpdater = variablesStorage.updater();

@ -60,11 +60,12 @@ public class TrieLogHelper {
private static final int ROCKSDB_MAX_INSERTS_PER_TRANSACTION = 1000; private static final int ROCKSDB_MAX_INSERTS_PER_TRANSACTION = 1000;
private static final Logger LOG = LoggerFactory.getLogger(TrieLogHelper.class); private static final Logger LOG = LoggerFactory.getLogger(TrieLogHelper.class);
void prune( boolean prune(
final DataStorageConfiguration config, final DataStorageConfiguration config,
final BonsaiWorldStateKeyValueStorage rootWorldStateStorage, final BonsaiWorldStateKeyValueStorage rootWorldStateStorage,
final MutableBlockchain blockchain, final MutableBlockchain blockchain,
final Path dataDirectoryPath) { final Path dataDirectoryPath) {
final String batchFileNameBase = final String batchFileNameBase =
dataDirectoryPath.resolve(DATABASE_PATH).resolve(TRIE_LOG_FILE).toString(); dataDirectoryPath.resolve(DATABASE_PATH).resolve(TRIE_LOG_FILE).toString();
@ -82,10 +83,11 @@ public class TrieLogHelper {
lastBlockNumberToRetainTrieLogsFor, lastBlockNumberToRetainTrieLogsFor,
rootWorldStateStorage, rootWorldStateStorage,
layersToRetain)) { layersToRetain)) {
return; return false;
} }
final long numberOfBatches = calculateNumberOfBatches(layersToRetain); final long numberOfBatches = calculateNumberOfBatches(layersToRetain);
LOG.info("Retain {} trie logs, processing in {} batches", layersToRetain, numberOfBatches);
processTrieLogBatches( processTrieLogBatches(
rootWorldStateStorage, rootWorldStateStorage,
@ -102,7 +104,7 @@ public class TrieLogHelper {
.count(); .count();
if (countAfterPrune == layersToRetain) { if (countAfterPrune == layersToRetain) {
if (deleteFiles(batchFileNameBase, numberOfBatches)) { if (deleteFiles(batchFileNameBase, numberOfBatches)) {
LOG.info("Prune ran successfully. Enjoy some disk space back! \uD83D\uDE80"); return true;
} else { } else {
throw new IllegalStateException( throw new IllegalStateException(
"There was an error deleting the trie log backup files. Please ensure besu is working before deleting them manually."); "There was an error deleting the trie log backup files. Please ensure besu is working before deleting them manually.");
@ -110,8 +112,11 @@ public class TrieLogHelper {
} else { } else {
throw new IllegalStateException( throw new IllegalStateException(
String.format( String.format(
"Remaining trie logs (%d) did not match %s (%d). Trie logs backup files have not been deleted, it is safe to rerun the subcommand.", "Remaining trie logs (%d) did not match %s (%d). Trie logs backup files (in %s) have not been deleted, it is safe to rerun the subcommand.",
countAfterPrune, BONSAI_STORAGE_FORMAT_MAX_LAYERS_TO_LOAD, layersToRetain)); countAfterPrune,
BONSAI_STORAGE_FORMAT_MAX_LAYERS_TO_LOAD,
layersToRetain,
batchFileNameBase));
} }
} }
@ -131,7 +136,7 @@ public class TrieLogHelper {
final List<Hash> trieLogKeys = final List<Hash> trieLogKeys =
getTrieLogKeysForBlocks(blockchain, firstBlockOfBatch, lastBlockOfBatch); getTrieLogKeysForBlocks(blockchain, firstBlockOfBatch, lastBlockOfBatch);
LOG.info("Saving trie logs to retain in file (batch {})...", batchNumber); LOG.info("Saving trie logs to retain in file {} (batch {})...", batchFileName, batchNumber);
saveTrieLogBatches(batchFileName, rootWorldStateStorage, trieLogKeys); saveTrieLogBatches(batchFileName, rootWorldStateStorage, trieLogKeys);
} }
@ -319,7 +324,7 @@ public class TrieLogHelper {
File file = new File(batchFileName); File file = new File(batchFileName);
if (file.exists()) { if (file.exists()) {
LOG.error("File already exists, skipping file creation"); LOG.warn("File already exists {}, skipping file creation", batchFileName);
return; return;
} }
@ -354,7 +359,7 @@ public class TrieLogHelper {
final String batchFileName) { final String batchFileName) {
File file = new File(batchFileName); File file = new File(batchFileName);
if (file.exists()) { if (file.exists()) {
LOG.error("File already exists, skipping file creation"); LOG.warn("File already exists {}, skipping file creation", batchFileName);
return; return;
} }

@ -16,6 +16,9 @@ package org.hyperledger.besu.cli.subcommands.storage;
import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkNotNull;
import static org.hyperledger.besu.cli.subcommands.storage.RocksDbHelper.formatOutputSize;
import static org.hyperledger.besu.controller.BesuController.DATABASE_PATH;
import static org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.TRIE_LOG_STORAGE;
import org.hyperledger.besu.cli.util.VersionProvider; import org.hyperledger.besu.cli.util.VersionProvider;
import org.hyperledger.besu.controller.BesuController; import org.hyperledger.besu.controller.BesuController;
@ -25,16 +28,20 @@ import org.hyperledger.besu.ethereum.storage.StorageProvider;
import org.hyperledger.besu.ethereum.trie.diffbased.bonsai.storage.BonsaiWorldStateKeyValueStorage; import org.hyperledger.besu.ethereum.trie.diffbased.bonsai.storage.BonsaiWorldStateKeyValueStorage;
import org.hyperledger.besu.ethereum.trie.diffbased.common.trielog.TrieLogPruner; import org.hyperledger.besu.ethereum.trie.diffbased.common.trielog.TrieLogPruner;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration; import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageFormat; import org.hyperledger.besu.plugin.services.storage.DataStorageFormat;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter; import java.io.PrintWriter;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.core.config.Configurator; import org.apache.logging.log4j.core.config.Configurator;
import org.rocksdb.RocksDBException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import picocli.CommandLine; import picocli.CommandLine;
import picocli.CommandLine.Command; import picocli.CommandLine.Command;
@ -54,6 +61,8 @@ import picocli.CommandLine.ParentCommand;
}) })
public class TrieLogSubCommand implements Runnable { public class TrieLogSubCommand implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(TrieLogSubCommand.class);
@SuppressWarnings("UnusedVariable") @SuppressWarnings("UnusedVariable")
@ParentCommand @ParentCommand
private static StorageSubCommand parentCommand; private static StorageSubCommand parentCommand;
@ -69,7 +78,7 @@ public class TrieLogSubCommand implements Runnable {
} }
private static BesuController createBesuController() { private static BesuController createBesuController() {
return parentCommand.parentCommand.buildController(); return parentCommand.besuCommand.buildController();
} }
@Command( @Command(
@ -123,13 +132,72 @@ public class TrieLogSubCommand implements Runnable {
final TrieLogContext context = getTrieLogContext(); final TrieLogContext context = getTrieLogContext();
final Path dataDirectoryPath = final Path dataDirectoryPath =
Paths.get( Paths.get(
TrieLogSubCommand.parentCommand.parentCommand.dataDir().toAbsolutePath().toString()); TrieLogSubCommand.parentCommand.besuCommand.dataDir().toAbsolutePath().toString());
LOG.info("Estimating trie logs size before pruning...");
long sizeBefore = estimatedSizeOfTrieLogs();
LOG.info("Estimated trie logs size before pruning: {}", formatOutputSize(sizeBefore));
LOG.info("Starting pruning...");
final TrieLogHelper trieLogHelper = new TrieLogHelper(); final TrieLogHelper trieLogHelper = new TrieLogHelper();
trieLogHelper.prune( boolean success =
context.config(), trieLogHelper.prune(
context.rootWorldStateStorage(), context.config(),
context.blockchain(), context.rootWorldStateStorage(),
dataDirectoryPath); context.blockchain(),
dataDirectoryPath);
if (success) {
LOG.info("Finished pruning. Re-estimating trie logs size...");
final long sizeAfter = estimatedSizeOfTrieLogs();
LOG.info(
"Estimated trie logs size after pruning: {} (0 B estimate is normal when using default settings)",
formatOutputSize(sizeAfter));
long estimatedSaving = sizeBefore - sizeAfter;
LOG.info(
"Prune ran successfully. We estimate you freed up {}! \uD83D\uDE80",
formatOutputSize(estimatedSaving));
spec.commandLine()
.getOut()
.printf(
"Prune ran successfully. We estimate you freed up %s! \uD83D\uDE80\n",
formatOutputSize(estimatedSaving));
}
}
private long estimatedSizeOfTrieLogs() {
final String dbPath =
TrieLogSubCommand.parentCommand
.besuCommand
.dataDir()
.toString()
.concat("/")
.concat(DATABASE_PATH);
AtomicLong estimatedSaving = new AtomicLong(0L);
try {
RocksDbHelper.forEachColumnFamily(
dbPath,
(rocksdb, cfHandle) -> {
try {
if (Arrays.equals(cfHandle.getName(), TRIE_LOG_STORAGE.getId())) {
final long sstSize =
Long.parseLong(rocksdb.getProperty(cfHandle, "rocksdb.total-sst-files-size"));
final long blobSize =
Long.parseLong(rocksdb.getProperty(cfHandle, "rocksdb.total-blob-file-size"));
estimatedSaving.set(sstSize + blobSize);
}
} catch (RocksDBException | NumberFormatException e) {
throw new RuntimeException(e);
}
});
} catch (Exception e) {
LOG.warn("Error while estimating trie log size, returning 0 for estimate", e);
return 0L;
}
return estimatedSaving.get();
} }
} }
@ -169,7 +237,7 @@ public class TrieLogSubCommand implements Runnable {
trieLogFilePath = trieLogFilePath =
Paths.get( Paths.get(
TrieLogSubCommand.parentCommand TrieLogSubCommand.parentCommand
.parentCommand .besuCommand
.dataDir() .dataDir()
.resolve("trie-logs.bin") .resolve("trie-logs.bin")
.toAbsolutePath() .toAbsolutePath()
@ -219,7 +287,7 @@ public class TrieLogSubCommand implements Runnable {
trieLogFilePath = trieLogFilePath =
Paths.get( Paths.get(
TrieLogSubCommand.parentCommand TrieLogSubCommand.parentCommand
.parentCommand .besuCommand
.dataDir() .dataDir()
.resolve("trie-logs.bin") .resolve("trie-logs.bin")
.toAbsolutePath() .toAbsolutePath()

@ -31,6 +31,7 @@ import org.hyperledger.besu.ethereum.GasLimitCalculator;
import org.hyperledger.besu.ethereum.ProtocolContext; import org.hyperledger.besu.ethereum.ProtocolContext;
import org.hyperledger.besu.ethereum.api.jsonrpc.methods.JsonRpcMethods; import org.hyperledger.besu.ethereum.api.jsonrpc.methods.JsonRpcMethods;
import org.hyperledger.besu.ethereum.blockcreation.MiningCoordinator; import org.hyperledger.besu.ethereum.blockcreation.MiningCoordinator;
import org.hyperledger.besu.ethereum.chain.BadBlockManager;
import org.hyperledger.besu.ethereum.chain.Blockchain; import org.hyperledger.besu.ethereum.chain.Blockchain;
import org.hyperledger.besu.ethereum.chain.BlockchainStorage; import org.hyperledger.besu.ethereum.chain.BlockchainStorage;
import org.hyperledger.besu.ethereum.chain.ChainDataPruner; import org.hyperledger.besu.ethereum.chain.ChainDataPruner;
@ -88,11 +89,7 @@ import org.hyperledger.besu.ethereum.trie.diffbased.common.trielog.TrieLogPruner
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.VerkleWorldStateProvider; import org.hyperledger.besu.ethereum.trie.diffbased.verkle.VerkleWorldStateProvider;
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.storage.VerkleWorldStateKeyValueStorage; import org.hyperledger.besu.ethereum.trie.diffbased.verkle.storage.VerkleWorldStateKeyValueStorage;
import org.hyperledger.besu.ethereum.trie.forest.ForestWorldStateArchive; import org.hyperledger.besu.ethereum.trie.forest.ForestWorldStateArchive;
import org.hyperledger.besu.ethereum.trie.forest.pruner.MarkSweepPruner;
import org.hyperledger.besu.ethereum.trie.forest.pruner.Pruner;
import org.hyperledger.besu.ethereum.trie.forest.pruner.PrunerConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration; import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageFormat;
import org.hyperledger.besu.ethereum.worldstate.WorldStateArchive; import org.hyperledger.besu.ethereum.worldstate.WorldStateArchive;
import org.hyperledger.besu.ethereum.worldstate.WorldStateKeyValueStorage; import org.hyperledger.besu.ethereum.worldstate.WorldStateKeyValueStorage;
import org.hyperledger.besu.ethereum.worldstate.WorldStatePreimageStorage; import org.hyperledger.besu.ethereum.worldstate.WorldStatePreimageStorage;
@ -101,8 +98,7 @@ import org.hyperledger.besu.evm.internal.EvmConfiguration;
import org.hyperledger.besu.metrics.ObservableMetricsSystem; import org.hyperledger.besu.metrics.ObservableMetricsSystem;
import org.hyperledger.besu.plugin.services.MetricsSystem; import org.hyperledger.besu.plugin.services.MetricsSystem;
import org.hyperledger.besu.plugin.services.permissioning.NodeMessagePermissioningProvider; import org.hyperledger.besu.plugin.services.permissioning.NodeMessagePermissioningProvider;
import org.hyperledger.besu.plugin.services.txselection.PluginTransactionSelectorFactory; import org.hyperledger.besu.plugin.services.storage.DataStorageFormat;
import org.hyperledger.besu.plugin.services.txvalidator.PluginTransactionValidatorFactory;
import java.io.Closeable; import java.io.Closeable;
import java.math.BigInteger; import java.math.BigInteger;
@ -162,10 +158,6 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
GasLimitCalculator gasLimitCalculator; GasLimitCalculator gasLimitCalculator;
/** The Storage provider. */ /** The Storage provider. */
protected StorageProvider storageProvider; protected StorageProvider storageProvider;
/** The Is pruning enabled. */
protected boolean isPruningEnabled;
/** The Pruner configuration. */
protected PrunerConfiguration prunerConfiguration;
/** The Required blocks. */ /** The Required blocks. */
protected Map<Long, Hash> requiredBlocks = Collections.emptyMap(); protected Map<Long, Hash> requiredBlocks = Collections.emptyMap();
/** The Reorg logging threshold. */ /** The Reorg logging threshold. */
@ -180,20 +172,18 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
protected EvmConfiguration evmConfiguration; protected EvmConfiguration evmConfiguration;
/** The Max peers. */ /** The Max peers. */
protected int maxPeers; protected int maxPeers;
/** Manages a cache of bad blocks globally */
protected final BadBlockManager badBlockManager = new BadBlockManager();
private int peerLowerBound;
private int maxRemotelyInitiatedPeers; private int maxRemotelyInitiatedPeers;
/** The Chain pruner configuration. */ /** The Chain pruner configuration. */
protected ChainPrunerConfiguration chainPrunerConfiguration = ChainPrunerConfiguration.DEFAULT; protected ChainPrunerConfiguration chainPrunerConfiguration = ChainPrunerConfiguration.DEFAULT;
private NetworkingConfiguration networkingConfiguration; private NetworkingConfiguration networkingConfiguration;
private Boolean randomPeerPriority; private Boolean randomPeerPriority;
private Optional<PluginTransactionSelectorFactory> transactionSelectorFactory = Optional.empty();
/** the Dagger configured context that can provide dependencies */ /** the Dagger configured context that can provide dependencies */
protected Optional<BesuComponent> besuComponent = Optional.empty(); protected Optional<BesuComponent> besuComponent = Optional.empty();
private PluginTransactionValidatorFactory pluginTransactionValidatorFactory;
private int numberOfBlocksToCache = 0; private int numberOfBlocksToCache = 0;
/** /**
@ -377,28 +367,6 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
return this; return this;
} }
/**
* Is pruning enabled besu controller builder.
*
* @param isPruningEnabled the is pruning enabled
* @return the besu controller builder
*/
public BesuControllerBuilder isPruningEnabled(final boolean isPruningEnabled) {
this.isPruningEnabled = isPruningEnabled;
return this;
}
/**
* Pruning configuration besu controller builder.
*
* @param prunerConfiguration the pruner configuration
* @return the besu controller builder
*/
public BesuControllerBuilder pruningConfiguration(final PrunerConfiguration prunerConfiguration) {
this.prunerConfiguration = prunerConfiguration;
return this;
}
/** /**
* Genesis config overrides besu controller builder. * Genesis config overrides besu controller builder.
* *
@ -478,22 +446,10 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
return this; return this;
} }
/**
* Lower bound of peers where we stop actively trying to initiate new outgoing connections
*
* @param peerLowerBound lower bound of peers where we stop actively trying to initiate new
* outgoing connections
* @return the besu controller builder
*/
public BesuControllerBuilder lowerBoundPeers(final int peerLowerBound) {
this.peerLowerBound = peerLowerBound;
return this;
}
/** /**
* Maximum number of remotely initiated peer connections * Maximum number of remotely initiated peer connections
* *
* @param maxRemotelyInitiatedPeers aximum number of remotely initiated peer connections * @param maxRemotelyInitiatedPeers maximum number of remotely initiated peer connections
* @return the besu controller builder * @return the besu controller builder
*/ */
public BesuControllerBuilder maxRemotelyInitiatedPeers(final int maxRemotelyInitiatedPeers) { public BesuControllerBuilder maxRemotelyInitiatedPeers(final int maxRemotelyInitiatedPeers) {
@ -514,7 +470,7 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
} }
/** /**
* Chain pruning configuration besu controller builder. * Sets the number of blocks to cache.
* *
* @param numberOfBlocksToCache the number of blocks to cache * @param numberOfBlocksToCache the number of blocks to cache
* @return the besu controller builder * @return the besu controller builder
@ -547,30 +503,6 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
return this; return this;
} }
/**
* sets the transactionSelectorFactory in the builder
*
* @param transactionSelectorFactory the optional transaction selector factory
* @return the besu controller builder
*/
public BesuControllerBuilder transactionSelectorFactory(
final Optional<PluginTransactionSelectorFactory> transactionSelectorFactory) {
this.transactionSelectorFactory = transactionSelectorFactory;
return this;
}
/**
* sets the pluginTransactionValidatorFactory
*
* @param pluginTransactionValidatorFactory factory that creates plugin transaction Validators
* @return the besu controller builder
*/
public BesuControllerBuilder pluginTransactionValidatorFactory(
final PluginTransactionValidatorFactory pluginTransactionValidatorFactory) {
this.pluginTransactionValidatorFactory = pluginTransactionValidatorFactory;
return this;
}
/** /**
* Build besu controller. * Build besu controller.
* *
@ -592,12 +524,12 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
checkNotNull(gasLimitCalculator, "Missing gas limit calculator"); checkNotNull(gasLimitCalculator, "Missing gas limit calculator");
checkNotNull(evmConfiguration, "Missing evm config"); checkNotNull(evmConfiguration, "Missing evm config");
checkNotNull(networkingConfiguration, "Missing network configuration"); checkNotNull(networkingConfiguration, "Missing network configuration");
checkNotNull(dataStorageConfiguration, "Missing data storage configuration");
prepForBuild(); prepForBuild();
final ProtocolSchedule protocolSchedule = createProtocolSchedule(); final ProtocolSchedule protocolSchedule = createProtocolSchedule();
final GenesisState genesisState = final GenesisState genesisState =
GenesisState.fromConfig( GenesisState.fromConfig(dataStorageConfiguration, genesisConfig, protocolSchedule);
dataStorageConfiguration.getDataStorageFormat(), genesisConfig, protocolSchedule);
final VariablesStorage variablesStorage = storageProvider.createVariablesStorage(); final VariablesStorage variablesStorage = storageProvider.createVariablesStorage();
@ -605,7 +537,8 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
storageProvider.createWorldStateStorageCoordinator(dataStorageConfiguration); storageProvider.createWorldStateStorageCoordinator(dataStorageConfiguration);
final BlockchainStorage blockchainStorage = final BlockchainStorage blockchainStorage =
storageProvider.createBlockchainStorage(protocolSchedule, variablesStorage); storageProvider.createBlockchainStorage(
protocolSchedule, variablesStorage, dataStorageConfiguration);
final MutableBlockchain blockchain = final MutableBlockchain blockchain =
DefaultBlockchain.createMutable( DefaultBlockchain.createMutable(
@ -616,13 +549,14 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
dataDirectory.toString(), dataDirectory.toString(),
numberOfBlocksToCache); numberOfBlocksToCache);
final BonsaiCachedMerkleTrieLoader cachedMerkleTrieLoader = final BonsaiCachedMerkleTrieLoader bonsaiCachedMerkleTrieLoader =
besuComponent besuComponent
.map(BesuComponent::getCachedMerkleTrieLoader) .map(BesuComponent::getCachedMerkleTrieLoader)
.orElseGet(() -> new BonsaiCachedMerkleTrieLoader(metricsSystem)); .orElseGet(() -> new BonsaiCachedMerkleTrieLoader(metricsSystem));
final WorldStateArchive worldStateArchive = final WorldStateArchive worldStateArchive =
createWorldStateArchive(worldStateStorageCoordinator, blockchain, cachedMerkleTrieLoader); createWorldStateArchive(
worldStateStorageCoordinator, blockchain, bonsaiCachedMerkleTrieLoader);
if (blockchain.getChainHeadBlockNumber() < 1) { if (blockchain.getChainHeadBlockNumber() < 1) {
genesisState.writeStateTo(worldStateArchive.getMutable()); genesisState.writeStateTo(worldStateArchive.getMutable());
@ -630,17 +564,10 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
final ProtocolContext protocolContext = final ProtocolContext protocolContext =
createProtocolContext( createProtocolContext(
blockchain, blockchain, worldStateArchive, protocolSchedule, this::createConsensusContext);
worldStateArchive,
protocolSchedule,
this::createConsensusContext,
transactionSelectorFactory);
validateContext(protocolContext); validateContext(protocolContext);
if (chainPrunerConfiguration.getChainPruningEnabled()) { if (chainPrunerConfiguration.getChainPruningEnabled()) {
protocolContext
.safeConsensusContext(MergeContext.class)
.ifPresent(mergeContext -> mergeContext.setIsChainPruningEnabled(true));
final ChainDataPruner chainDataPruner = createChainPruner(blockchainStorage); final ChainDataPruner chainDataPruner = createChainPruner(blockchainStorage);
blockchain.observeBlockAdded(chainDataPruner); blockchain.observeBlockAdded(chainDataPruner);
LOG.info( LOG.info(
@ -653,25 +580,6 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
protocolSchedule.setPublicWorldStateArchiveForPrivacyBlockProcessor( protocolSchedule.setPublicWorldStateArchiveForPrivacyBlockProcessor(
protocolContext.getWorldStateArchive()); protocolContext.getWorldStateArchive());
Optional<Pruner> maybePruner = Optional.empty();
if (isPruningEnabled) {
if (dataStorageConfiguration.getDataStorageFormat().equals(DataStorageFormat.BONSAI)) {
LOG.warn(
"Cannot enable pruning with Bonsai data storage format. Disabling. Change the data storage format or disable pruning explicitly on the command line to remove this warning.");
} else {
maybePruner =
Optional.of(
new Pruner(
new MarkSweepPruner(
((ForestWorldStateArchive) worldStateArchive).getWorldStateStorage(),
blockchain,
storageProvider.getStorageBySegmentIdentifier(
KeyValueSegmentIdentifier.PRUNING_STATE),
metricsSystem),
blockchain,
prunerConfiguration));
}
}
final int maxMessageSize = ethereumWireProtocolConfiguration.getMaxMessageSize(); final int maxMessageSize = ethereumWireProtocolConfiguration.getMaxMessageSize();
final Supplier<ProtocolSpec> currentProtocolSpecSupplier = final Supplier<ProtocolSpec> currentProtocolSpecSupplier =
() -> protocolSchedule.getByBlockHeader(blockchain.getChainHeadHeader()); () -> protocolSchedule.getByBlockHeader(blockchain.getChainHeadHeader());
@ -684,7 +592,6 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
maxMessageSize, maxMessageSize,
messagePermissioningProviders, messagePermissioningProviders,
nodeKey.getPublicKey().getEncodedBytes(), nodeKey.getPublicKey().getEncodedBytes(),
peerLowerBound,
maxPeers, maxPeers,
maxRemotelyInitiatedPeers, maxRemotelyInitiatedPeers,
randomPeerPriority); randomPeerPriority);
@ -729,8 +636,8 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
metricsSystem, metricsSystem,
syncState, syncState,
transactionPoolConfiguration, transactionPoolConfiguration,
pluginTransactionValidatorFactory, besuComponent.map(BesuComponent::getBlobCache).orElse(new BlobCache()),
besuComponent.map(BesuComponent::getBlobCache).orElse(new BlobCache())); miningParameters);
final List<PeerValidator> peerValidators = createPeerValidators(protocolSchedule); final List<PeerValidator> peerValidators = createPeerValidators(protocolSchedule);
@ -747,9 +654,6 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
peerValidators, peerValidators,
Optional.empty()); Optional.empty());
final Optional<SnapProtocolManager> maybeSnapProtocolManager =
createSnapProtocolManager(peerValidators, ethPeers, snapMessages, worldStateArchive);
final PivotBlockSelector pivotBlockSelector = final PivotBlockSelector pivotBlockSelector =
createPivotSelector( createPivotSelector(
protocolSchedule, protocolContext, ethContext, syncState, metricsSystem); protocolSchedule, protocolContext, ethContext, syncState, metricsSystem);
@ -759,7 +663,6 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
protocolSchedule, protocolSchedule,
worldStateStorageCoordinator, worldStateStorageCoordinator,
protocolContext, protocolContext,
maybePruner,
ethContext, ethContext,
syncState, syncState,
ethProtocolManager, ethProtocolManager,
@ -767,6 +670,10 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
protocolContext.setSynchronizer(Optional.of(synchronizer)); protocolContext.setSynchronizer(Optional.of(synchronizer));
final Optional<SnapProtocolManager> maybeSnapProtocolManager =
createSnapProtocolManager(
protocolContext, worldStateStorageCoordinator, ethPeers, snapMessages);
final MiningCoordinator miningCoordinator = final MiningCoordinator miningCoordinator =
createMiningCoordinator( createMiningCoordinator(
protocolSchedule, protocolSchedule,
@ -850,7 +757,6 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
* @param protocolSchedule the protocol schedule * @param protocolSchedule the protocol schedule
* @param worldStateStorageCoordinator the world state storage * @param worldStateStorageCoordinator the world state storage
* @param protocolContext the protocol context * @param protocolContext the protocol context
* @param maybePruner the maybe pruner
* @param ethContext the eth context * @param ethContext the eth context
* @param syncState the sync state * @param syncState the sync state
* @param ethProtocolManager the eth protocol manager * @param ethProtocolManager the eth protocol manager
@ -861,7 +767,6 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
final ProtocolSchedule protocolSchedule, final ProtocolSchedule protocolSchedule,
final WorldStateStorageCoordinator worldStateStorageCoordinator, final WorldStateStorageCoordinator worldStateStorageCoordinator,
final ProtocolContext protocolContext, final ProtocolContext protocolContext,
final Optional<Pruner> maybePruner,
final EthContext ethContext, final EthContext ethContext,
final SyncState syncState, final SyncState syncState,
final EthProtocolManager ethProtocolManager, final EthProtocolManager ethProtocolManager,
@ -873,7 +778,6 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
protocolContext, protocolContext,
worldStateStorageCoordinator, worldStateStorageCoordinator,
ethProtocolManager.getBlockBroadcaster(), ethProtocolManager.getBlockBroadcaster(),
maybePruner,
ethContext, ethContext,
syncState, syncState,
dataDirectory, dataDirectory,
@ -1073,36 +977,35 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
* @param worldStateArchive the world state archive * @param worldStateArchive the world state archive
* @param protocolSchedule the protocol schedule * @param protocolSchedule the protocol schedule
* @param consensusContextFactory the consensus context factory * @param consensusContextFactory the consensus context factory
* @param transactionSelectorFactory optional transaction selector factory
* @return the protocol context * @return the protocol context
*/ */
protected ProtocolContext createProtocolContext( protected ProtocolContext createProtocolContext(
final MutableBlockchain blockchain, final MutableBlockchain blockchain,
final WorldStateArchive worldStateArchive, final WorldStateArchive worldStateArchive,
final ProtocolSchedule protocolSchedule, final ProtocolSchedule protocolSchedule,
final ConsensusContextFactory consensusContextFactory, final ConsensusContextFactory consensusContextFactory) {
final Optional<PluginTransactionSelectorFactory> transactionSelectorFactory) {
return ProtocolContext.init( return ProtocolContext.init(
blockchain, blockchain, worldStateArchive, protocolSchedule, consensusContextFactory, badBlockManager);
worldStateArchive,
protocolSchedule,
consensusContextFactory,
transactionSelectorFactory);
} }
private Optional<SnapProtocolManager> createSnapProtocolManager( private Optional<SnapProtocolManager> createSnapProtocolManager(
final List<PeerValidator> peerValidators, final ProtocolContext protocolContext,
final WorldStateStorageCoordinator worldStateStorageCoordinator,
final EthPeers ethPeers, final EthPeers ethPeers,
final EthMessages snapMessages, final EthMessages snapMessages) {
final WorldStateArchive worldStateArchive) {
return Optional.of( return Optional.of(
new SnapProtocolManager(peerValidators, ethPeers, snapMessages, worldStateArchive)); new SnapProtocolManager(
worldStateStorageCoordinator,
syncConfig.getSnapSyncConfiguration(),
ethPeers,
snapMessages,
protocolContext));
} }
WorldStateArchive createWorldStateArchive( WorldStateArchive createWorldStateArchive(
final WorldStateStorageCoordinator worldStateStorageCoordinator, final WorldStateStorageCoordinator worldStateStorageCoordinator,
final Blockchain blockchain, final Blockchain blockchain,
final BonsaiCachedMerkleTrieLoader cachedMerkleTrieLoader) { final BonsaiCachedMerkleTrieLoader bonsaiCachedMerkleTrieLoader) {
return switch (dataStorageConfiguration.getDataStorageFormat()) { return switch (dataStorageConfiguration.getDataStorageFormat()) {
case BONSAI -> { case BONSAI -> {
final BonsaiWorldStateKeyValueStorage worldStateKeyValueStorage = final BonsaiWorldStateKeyValueStorage worldStateKeyValueStorage =
@ -1111,7 +1014,7 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
worldStateKeyValueStorage, worldStateKeyValueStorage,
blockchain, blockchain,
Optional.of(dataStorageConfiguration.getBonsaiMaxLayersToLoad()), Optional.of(dataStorageConfiguration.getBonsaiMaxLayersToLoad()),
cachedMerkleTrieLoader, bonsaiCachedMerkleTrieLoader,
besuComponent.map(BesuComponent::getBesuPluginContext).orElse(null), besuComponent.map(BesuComponent::getBesuPluginContext).orElse(null),
evmConfiguration); evmConfiguration);
} }
@ -1121,7 +1024,9 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
yield new VerkleWorldStateProvider( yield new VerkleWorldStateProvider(
worldStateKeyValueStorage, worldStateKeyValueStorage,
blockchain, blockchain,
Optional.of(dataStorageConfiguration.getBonsaiMaxLayersToLoad()), Optional.of(
dataStorageConfiguration
.getBonsaiMaxLayersToLoad()), // TODO having verkle configuration or generic
besuComponent.map(BesuComponent::getBesuPluginContext).orElse(null), besuComponent.map(BesuComponent::getBesuPluginContext).orElse(null),
evmConfiguration); evmConfiguration);
} }
@ -1183,8 +1088,7 @@ public abstract class BesuControllerBuilder implements MiningParameterOverrides
final CheckpointConfigOptions checkpointConfigOptions = final CheckpointConfigOptions checkpointConfigOptions =
genesisConfig.getConfigOptions(genesisConfigOverrides).getCheckpointOptions(); genesisConfig.getConfigOptions(genesisConfigOverrides).getCheckpointOptions();
if (SyncMode.X_CHECKPOINT.equals(syncConfig.getSyncMode()) if (SyncMode.isCheckpointSync(syncConfig.getSyncMode()) && checkpointConfigOptions.isValid()) {
&& checkpointConfigOptions.isValid()) {
validators.add( validators.add(
new CheckpointBlocksPeerValidator( new CheckpointBlocksPeerValidator(
protocolSchedule, protocolSchedule,

@ -19,6 +19,7 @@ import static org.hyperledger.besu.consensus.clique.CliqueHelpers.installCliqueB
import org.hyperledger.besu.config.CliqueConfigOptions; import org.hyperledger.besu.config.CliqueConfigOptions;
import org.hyperledger.besu.consensus.clique.CliqueBlockInterface; import org.hyperledger.besu.consensus.clique.CliqueBlockInterface;
import org.hyperledger.besu.consensus.clique.CliqueContext; import org.hyperledger.besu.consensus.clique.CliqueContext;
import org.hyperledger.besu.consensus.clique.CliqueForksSchedulesFactory;
import org.hyperledger.besu.consensus.clique.CliqueMiningTracker; import org.hyperledger.besu.consensus.clique.CliqueMiningTracker;
import org.hyperledger.besu.consensus.clique.CliqueProtocolSchedule; import org.hyperledger.besu.consensus.clique.CliqueProtocolSchedule;
import org.hyperledger.besu.consensus.clique.blockcreation.CliqueBlockScheduler; import org.hyperledger.besu.consensus.clique.blockcreation.CliqueBlockScheduler;
@ -27,6 +28,7 @@ import org.hyperledger.besu.consensus.clique.blockcreation.CliqueMiningCoordinat
import org.hyperledger.besu.consensus.clique.jsonrpc.CliqueJsonRpcMethods; import org.hyperledger.besu.consensus.clique.jsonrpc.CliqueJsonRpcMethods;
import org.hyperledger.besu.consensus.common.BlockInterface; import org.hyperledger.besu.consensus.common.BlockInterface;
import org.hyperledger.besu.consensus.common.EpochManager; import org.hyperledger.besu.consensus.common.EpochManager;
import org.hyperledger.besu.consensus.common.ForksSchedule;
import org.hyperledger.besu.consensus.common.validator.blockbased.BlockValidatorProvider; import org.hyperledger.besu.consensus.common.validator.blockbased.BlockValidatorProvider;
import org.hyperledger.besu.datatypes.Address; import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.ethereum.ProtocolContext; import org.hyperledger.besu.ethereum.ProtocolContext;
@ -52,19 +54,17 @@ public class CliqueBesuControllerBuilder extends BesuControllerBuilder {
private Address localAddress; private Address localAddress;
private EpochManager epochManager; private EpochManager epochManager;
private long secondsBetweenBlocks;
private boolean createEmptyBlocks = true;
private final BlockInterface blockInterface = new CliqueBlockInterface(); private final BlockInterface blockInterface = new CliqueBlockInterface();
private ForksSchedule<CliqueConfigOptions> forksSchedule;
@Override @Override
protected void prepForBuild() { protected void prepForBuild() {
localAddress = Util.publicKeyToAddress(nodeKey.getPublicKey()); localAddress = Util.publicKeyToAddress(nodeKey.getPublicKey());
final CliqueConfigOptions cliqueConfig = configOptionsSupplier.get().getCliqueConfigOptions(); final CliqueConfigOptions cliqueConfig = configOptionsSupplier.get().getCliqueConfigOptions();
final long blocksPerEpoch = cliqueConfig.getEpochLength(); final long blocksPerEpoch = cliqueConfig.getEpochLength();
secondsBetweenBlocks = cliqueConfig.getBlockPeriodSeconds();
createEmptyBlocks = cliqueConfig.getCreateEmptyBlocks();
epochManager = new EpochManager(blocksPerEpoch); epochManager = new EpochManager(blocksPerEpoch);
forksSchedule = CliqueForksSchedulesFactory.create(configOptionsSupplier.get());
} }
@Override @Override
@ -92,9 +92,9 @@ public class CliqueBesuControllerBuilder extends BesuControllerBuilder {
clock, clock,
protocolContext.getConsensusContext(CliqueContext.class).getValidatorProvider(), protocolContext.getConsensusContext(CliqueContext.class).getValidatorProvider(),
localAddress, localAddress,
secondsBetweenBlocks), forksSchedule),
epochManager, epochManager,
createEmptyBlocks, forksSchedule,
ethProtocolManager.ethContext().getScheduler()); ethProtocolManager.ethContext().getScheduler());
final CliqueMiningCoordinator miningCoordinator = final CliqueMiningCoordinator miningCoordinator =
new CliqueMiningCoordinator( new CliqueMiningCoordinator(
@ -102,6 +102,18 @@ public class CliqueBesuControllerBuilder extends BesuControllerBuilder {
miningExecutor, miningExecutor,
syncState, syncState,
new CliqueMiningTracker(localAddress, protocolContext)); new CliqueMiningTracker(localAddress, protocolContext));
// Update the next block period in seconds according to the transition schedule
protocolContext
.getBlockchain()
.observeBlockAdded(
o ->
miningParameters.setBlockPeriodSeconds(
forksSchedule
.getFork(o.getBlock().getHeader().getNumber() + 1)
.getValue()
.getBlockPeriodSeconds()));
miningCoordinator.addMinedBlockObserver(ethProtocolManager); miningCoordinator.addMinedBlockObserver(ethProtocolManager);
// Clique mining is implicitly enabled. // Clique mining is implicitly enabled.
@ -113,10 +125,13 @@ public class CliqueBesuControllerBuilder extends BesuControllerBuilder {
protected ProtocolSchedule createProtocolSchedule() { protected ProtocolSchedule createProtocolSchedule() {
return CliqueProtocolSchedule.create( return CliqueProtocolSchedule.create(
configOptionsSupplier.get(), configOptionsSupplier.get(),
forksSchedule,
nodeKey, nodeKey,
privacyParameters, privacyParameters,
isRevertReasonEnabled, isRevertReasonEnabled,
evmConfiguration); evmConfiguration,
miningParameters,
badBlockManager);
} }
@Override @Override

@ -56,13 +56,11 @@ import org.hyperledger.besu.ethereum.eth.transactions.TransactionPoolConfigurati
import org.hyperledger.besu.ethereum.mainnet.ProtocolSchedule; import org.hyperledger.besu.ethereum.mainnet.ProtocolSchedule;
import org.hyperledger.besu.ethereum.p2p.config.SubProtocolConfiguration; import org.hyperledger.besu.ethereum.p2p.config.SubProtocolConfiguration;
import org.hyperledger.besu.ethereum.storage.StorageProvider; import org.hyperledger.besu.ethereum.storage.StorageProvider;
import org.hyperledger.besu.ethereum.trie.forest.pruner.PrunerConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration; import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.ethereum.worldstate.WorldStateArchive; import org.hyperledger.besu.ethereum.worldstate.WorldStateArchive;
import org.hyperledger.besu.evm.internal.EvmConfiguration; import org.hyperledger.besu.evm.internal.EvmConfiguration;
import org.hyperledger.besu.metrics.ObservableMetricsSystem; import org.hyperledger.besu.metrics.ObservableMetricsSystem;
import org.hyperledger.besu.plugin.services.permissioning.NodeMessagePermissioningProvider; import org.hyperledger.besu.plugin.services.permissioning.NodeMessagePermissioningProvider;
import org.hyperledger.besu.plugin.services.txselection.PluginTransactionSelectorFactory;
import java.math.BigInteger; import java.math.BigInteger;
import java.nio.file.Path; import java.nio.file.Path;
@ -175,14 +173,9 @@ public class ConsensusScheduleBesuControllerBuilder extends BesuControllerBuilde
final MutableBlockchain blockchain, final MutableBlockchain blockchain,
final WorldStateArchive worldStateArchive, final WorldStateArchive worldStateArchive,
final ProtocolSchedule protocolSchedule, final ProtocolSchedule protocolSchedule,
final ConsensusContextFactory consensusContextFactory, final ConsensusContextFactory consensusContextFactory) {
final Optional<PluginTransactionSelectorFactory> transactionSelectorFactory) {
return MigratingProtocolContext.init( return MigratingProtocolContext.init(
blockchain, blockchain, worldStateArchive, protocolSchedule, consensusContextFactory, badBlockManager);
worldStateArchive,
protocolSchedule,
consensusContextFactory,
transactionSelectorFactory);
} }
@Override @Override
@ -374,20 +367,6 @@ public class ConsensusScheduleBesuControllerBuilder extends BesuControllerBuilde
return super.isRevertReasonEnabled(isRevertReasonEnabled); return super.isRevertReasonEnabled(isRevertReasonEnabled);
} }
@Override
public BesuControllerBuilder isPruningEnabled(final boolean isPruningEnabled) {
besuControllerBuilderSchedule.values().forEach(b -> b.isPruningEnabled(isPruningEnabled));
return super.isPruningEnabled(isPruningEnabled);
}
@Override
public BesuControllerBuilder pruningConfiguration(final PrunerConfiguration prunerConfiguration) {
besuControllerBuilderSchedule
.values()
.forEach(b -> b.pruningConfiguration(prunerConfiguration));
return super.pruningConfiguration(prunerConfiguration);
}
@Override @Override
public BesuControllerBuilder genesisConfigOverrides( public BesuControllerBuilder genesisConfigOverrides(
final Map<String, String> genesisConfigOverrides) { final Map<String, String> genesisConfigOverrides) {

@ -234,12 +234,19 @@ public class IbftBesuControllerBuilder extends BftBesuControllerBuilder {
blockchain, blockchain,
bftEventQueue); bftEventQueue);
// Update the next block period in seconds according to the transition schedule
protocolContext
.getBlockchain()
.observeBlockAdded(
o ->
miningParameters.setBlockPeriodSeconds(
forksSchedule
.getFork(o.getBlock().getHeader().getNumber() + 1)
.getValue()
.getBlockPeriodSeconds()));
if (syncState.isInitialSyncPhaseDone()) { if (syncState.isInitialSyncPhaseDone()) {
LOG.info("Starting IBFT mining coordinator");
ibftMiningCoordinator.enable(); ibftMiningCoordinator.enable();
ibftMiningCoordinator.start();
} else {
LOG.info("IBFT mining coordinator not starting while initial sync in progress");
} }
syncState.subscribeCompletionReached( syncState.subscribeCompletionReached(
@ -278,7 +285,9 @@ public class IbftBesuControllerBuilder extends BftBesuControllerBuilder {
privacyParameters, privacyParameters,
isRevertReasonEnabled, isRevertReasonEnabled,
bftExtraDataCodec().get(), bftExtraDataCodec().get(),
evmConfiguration); evmConfiguration,
miningParameters,
badBlockManager);
} }
@Override @Override

@ -91,7 +91,12 @@ public class MainnetBesuControllerBuilder extends BesuControllerBuilder {
@Override @Override
protected ProtocolSchedule createProtocolSchedule() { protected ProtocolSchedule createProtocolSchedule() {
return MainnetProtocolSchedule.fromConfig( return MainnetProtocolSchedule.fromConfig(
configOptionsSupplier.get(), privacyParameters, isRevertReasonEnabled, evmConfiguration); configOptionsSupplier.get(),
privacyParameters,
isRevertReasonEnabled,
evmConfiguration,
miningParameters,
badBlockManager);
} }
@Override @Override

@ -173,7 +173,11 @@ public class MergeBesuControllerBuilder extends BesuControllerBuilder {
@Override @Override
protected ProtocolSchedule createProtocolSchedule() { protected ProtocolSchedule createProtocolSchedule() {
return MergeProtocolSchedule.create( return MergeProtocolSchedule.create(
configOptionsSupplier.get(), privacyParameters, isRevertReasonEnabled); configOptionsSupplier.get(),
privacyParameters,
isRevertReasonEnabled,
miningParameters,
badBlockManager);
} }
@Override @Override
@ -198,7 +202,6 @@ public class MergeBesuControllerBuilder extends BesuControllerBuilder {
.getTerminalTotalDifficulty() .getTerminalTotalDifficulty()
.map(Difficulty::of) .map(Difficulty::of)
.orElse(Difficulty.ZERO)) .orElse(Difficulty.ZERO))
.setCheckpointPostMergeSync(syncConfig.isCheckpointPostMergeEnabled())
.setPostMergeAtGenesis(isPostMergeAtGenesis); .setPostMergeAtGenesis(isPostMergeAtGenesis);
blockchain blockchain

@ -274,12 +274,19 @@ public class QbftBesuControllerBuilder extends BftBesuControllerBuilder {
blockchain, blockchain,
bftEventQueue); bftEventQueue);
// Update the next block period in seconds according to the transition schedule
protocolContext
.getBlockchain()
.observeBlockAdded(
o ->
miningParameters.setBlockPeriodSeconds(
qbftForksSchedule
.getFork(o.getBlock().getHeader().getNumber() + 1)
.getValue()
.getBlockPeriodSeconds()));
if (syncState.isInitialSyncPhaseDone()) { if (syncState.isInitialSyncPhaseDone()) {
LOG.info("Starting QBFT mining coordinator");
miningCoordinator.enable(); miningCoordinator.enable();
miningCoordinator.start();
} else {
LOG.info("QBFT mining coordinator not starting while initial sync in progress");
} }
syncState.subscribeCompletionReached( syncState.subscribeCompletionReached(
@ -318,7 +325,9 @@ public class QbftBesuControllerBuilder extends BftBesuControllerBuilder {
privacyParameters, privacyParameters,
isRevertReasonEnabled, isRevertReasonEnabled,
bftExtraDataCodec().get(), bftExtraDataCodec().get(),
evmConfiguration); evmConfiguration,
miningParameters,
badBlockManager);
} }
@Override @Override

@ -52,15 +52,12 @@ import org.hyperledger.besu.ethereum.eth.transactions.TransactionPool;
import org.hyperledger.besu.ethereum.eth.transactions.TransactionPoolConfiguration; import org.hyperledger.besu.ethereum.eth.transactions.TransactionPoolConfiguration;
import org.hyperledger.besu.ethereum.mainnet.ProtocolSchedule; import org.hyperledger.besu.ethereum.mainnet.ProtocolSchedule;
import org.hyperledger.besu.ethereum.storage.StorageProvider; import org.hyperledger.besu.ethereum.storage.StorageProvider;
import org.hyperledger.besu.ethereum.trie.forest.pruner.Pruner;
import org.hyperledger.besu.ethereum.trie.forest.pruner.PrunerConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration; import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.ethereum.worldstate.WorldStateArchive; import org.hyperledger.besu.ethereum.worldstate.WorldStateArchive;
import org.hyperledger.besu.ethereum.worldstate.WorldStateStorageCoordinator; import org.hyperledger.besu.ethereum.worldstate.WorldStateStorageCoordinator;
import org.hyperledger.besu.evm.internal.EvmConfiguration; import org.hyperledger.besu.evm.internal.EvmConfiguration;
import org.hyperledger.besu.metrics.ObservableMetricsSystem; import org.hyperledger.besu.metrics.ObservableMetricsSystem;
import org.hyperledger.besu.plugin.services.permissioning.NodeMessagePermissioningProvider; import org.hyperledger.besu.plugin.services.permissioning.NodeMessagePermissioningProvider;
import org.hyperledger.besu.plugin.services.txselection.PluginTransactionSelectorFactory;
import java.math.BigInteger; import java.math.BigInteger;
import java.nio.file.Path; import java.nio.file.Path;
@ -189,15 +186,10 @@ public class TransitionBesuControllerBuilder extends BesuControllerBuilder {
final MutableBlockchain blockchain, final MutableBlockchain blockchain,
final WorldStateArchive worldStateArchive, final WorldStateArchive worldStateArchive,
final ProtocolSchedule protocolSchedule, final ProtocolSchedule protocolSchedule,
final ConsensusContextFactory consensusContextFactory, final ConsensusContextFactory consensusContextFactory) {
final Optional<PluginTransactionSelectorFactory> transactionSelectorFactory) {
final ProtocolContext protocolContext = final ProtocolContext protocolContext =
super.createProtocolContext( super.createProtocolContext(
blockchain, blockchain, worldStateArchive, protocolSchedule, consensusContextFactory);
worldStateArchive,
protocolSchedule,
consensusContextFactory,
transactionSelectorFactory);
transitionProtocolSchedule.setProtocolContext(protocolContext); transitionProtocolSchedule.setProtocolContext(protocolContext);
return protocolContext; return protocolContext;
} }
@ -225,7 +217,6 @@ public class TransitionBesuControllerBuilder extends BesuControllerBuilder {
final ProtocolSchedule protocolSchedule, final ProtocolSchedule protocolSchedule,
final WorldStateStorageCoordinator worldStateStorageCoordinator, final WorldStateStorageCoordinator worldStateStorageCoordinator,
final ProtocolContext protocolContext, final ProtocolContext protocolContext,
final Optional<Pruner> maybePruner,
final EthContext ethContext, final EthContext ethContext,
final SyncState syncState, final SyncState syncState,
final EthProtocolManager ethProtocolManager, final EthProtocolManager ethProtocolManager,
@ -237,7 +228,6 @@ public class TransitionBesuControllerBuilder extends BesuControllerBuilder {
protocolSchedule, protocolSchedule,
worldStateStorageCoordinator, worldStateStorageCoordinator,
protocolContext, protocolContext,
maybePruner,
ethContext, ethContext,
syncState, syncState,
ethProtocolManager, ethProtocolManager,
@ -255,6 +245,7 @@ public class TransitionBesuControllerBuilder extends BesuControllerBuilder {
return sync; return sync;
} }
@SuppressWarnings("UnusedVariable")
private void initTransitionWatcher( private void initTransitionWatcher(
final ProtocolContext protocolContext, final TransitionCoordinator composedCoordinator) { final ProtocolContext protocolContext, final TransitionCoordinator composedCoordinator) {
@ -392,18 +383,6 @@ public class TransitionBesuControllerBuilder extends BesuControllerBuilder {
return propagateConfig(z -> z.isRevertReasonEnabled(isRevertReasonEnabled)); return propagateConfig(z -> z.isRevertReasonEnabled(isRevertReasonEnabled));
} }
@Override
public BesuControllerBuilder isPruningEnabled(final boolean isPruningEnabled) {
super.isPruningEnabled(isPruningEnabled);
return propagateConfig(z -> z.isPruningEnabled(isPruningEnabled));
}
@Override
public BesuControllerBuilder pruningConfiguration(final PrunerConfiguration prunerConfiguration) {
super.pruningConfiguration(prunerConfiguration);
return propagateConfig(z -> z.pruningConfiguration(prunerConfiguration));
}
@Override @Override
public BesuControllerBuilder genesisConfigOverrides( public BesuControllerBuilder genesisConfigOverrides(
final Map<String, String> genesisConfigOverrides) { final Map<String, String> genesisConfigOverrides) {

@ -14,25 +14,38 @@
*/ */
package org.hyperledger.besu.services; package org.hyperledger.besu.services;
import org.hyperledger.besu.datatypes.Wei;
import org.hyperledger.besu.ethereum.core.MiningParameters;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.plugin.services.BesuConfiguration; import org.hyperledger.besu.plugin.services.BesuConfiguration;
import org.hyperledger.besu.plugin.services.storage.DataStorageFormat;
import java.nio.file.Path; import java.nio.file.Path;
/** A concrete implementation of BesuConfiguration which is used in Besu plugin framework. */ /** A concrete implementation of BesuConfiguration which is used in Besu plugin framework. */
public class BesuConfigurationImpl implements BesuConfiguration { public class BesuConfigurationImpl implements BesuConfiguration {
private Path storagePath;
private final Path storagePath; private Path dataPath;
private final Path dataPath; private DataStorageConfiguration dataStorageConfiguration;
private MiningParameters miningParameters;
/** /**
* BesuConfigurationImpl Constructor. * Post creation initialization
* *
* @param dataPath The Path representing data folder * @param dataPath The Path representing data folder
* @param storagePath The path representing storage folder * @param storagePath The path representing storage folder
* @param dataStorageConfiguration The data storage configuration
* @param miningParameters The mining parameters
*/ */
public BesuConfigurationImpl(final Path dataPath, final Path storagePath) { public void init(
final Path dataPath,
final Path storagePath,
final DataStorageConfiguration dataStorageConfiguration,
final MiningParameters miningParameters) {
this.dataPath = dataPath; this.dataPath = dataPath;
this.storagePath = storagePath; this.storagePath = storagePath;
this.dataStorageConfiguration = dataStorageConfiguration;
this.miningParameters = miningParameters;
} }
@Override @Override
@ -44,4 +57,48 @@ public class BesuConfigurationImpl implements BesuConfiguration {
public Path getDataPath() { public Path getDataPath() {
return dataPath; return dataPath;
} }
@Override
public DataStorageFormat getDatabaseFormat() {
return dataStorageConfiguration.getDataStorageFormat();
}
@Override
public Wei getMinGasPrice() {
return miningParameters.getMinTransactionGasPrice();
}
@Override
public org.hyperledger.besu.plugin.services.storage.DataStorageConfiguration
getDataStorageConfiguration() {
return new DataStoreConfigurationImpl(dataStorageConfiguration);
}
/**
* A concrete implementation of DataStorageConfiguration which is used in Besu plugin framework.
*/
public static class DataStoreConfigurationImpl
implements org.hyperledger.besu.plugin.services.storage.DataStorageConfiguration {
private final DataStorageConfiguration dataStorageConfiguration;
/**
* Instantiate the concrete implementation of the plugin DataStorageConfiguration.
*
* @param dataStorageConfiguration The Ethereum core module data storage configuration
*/
public DataStoreConfigurationImpl(final DataStorageConfiguration dataStorageConfiguration) {
this.dataStorageConfiguration = dataStorageConfiguration;
}
@Override
public DataStorageFormat getDatabaseFormat() {
return dataStorageConfiguration.getDataStorageFormat();
}
@Override
public boolean getReceiptCompactionEnabled() {
return dataStorageConfiguration.getReceiptCompactionEnabled();
}
}
} }

@ -15,8 +15,13 @@
package org.hyperledger.besu.services; package org.hyperledger.besu.services;
import org.hyperledger.besu.ethereum.chain.Blockchain; import org.hyperledger.besu.datatypes.Hash;
import org.hyperledger.besu.datatypes.Wei;
import org.hyperledger.besu.ethereum.ProtocolContext;
import org.hyperledger.besu.ethereum.core.BlockBody; import org.hyperledger.besu.ethereum.core.BlockBody;
import org.hyperledger.besu.ethereum.mainnet.ProtocolSchedule;
import org.hyperledger.besu.ethereum.mainnet.feemarket.BaseFeeMarket;
import org.hyperledger.besu.ethereum.mainnet.feemarket.FeeMarket;
import org.hyperledger.besu.plugin.Unstable; import org.hyperledger.besu.plugin.Unstable;
import org.hyperledger.besu.plugin.data.BlockContext; import org.hyperledger.besu.plugin.data.BlockContext;
import org.hyperledger.besu.plugin.data.BlockHeader; import org.hyperledger.besu.plugin.data.BlockHeader;
@ -29,15 +34,21 @@ import java.util.function.Supplier;
@Unstable @Unstable
public class BlockchainServiceImpl implements BlockchainService { public class BlockchainServiceImpl implements BlockchainService {
private final Blockchain blockchain; private ProtocolContext protocolContext;
private ProtocolSchedule protocolSchedule;
/** Create a new instance */
public BlockchainServiceImpl() {}
/** /**
* Instantiates a new Blockchain service. * Instantiates a new Blockchain service.
* *
* @param blockchain the blockchain * @param protocolContext the protocol context
* @param protocolSchedule the protocol schedule
*/ */
public BlockchainServiceImpl(final Blockchain blockchain) { public void init(final ProtocolContext protocolContext, final ProtocolSchedule protocolSchedule) {
this.blockchain = blockchain; this.protocolContext = protocolContext;
this.protocolSchedule = protocolSchedule;
} }
/** /**
@ -48,11 +59,39 @@ public class BlockchainServiceImpl implements BlockchainService {
*/ */
@Override @Override
public Optional<BlockContext> getBlockByNumber(final long number) { public Optional<BlockContext> getBlockByNumber(final long number) {
return blockchain return protocolContext
.getBlockchain()
.getBlockByNumber(number) .getBlockByNumber(number)
.map(block -> blockContext(block::getHeader, block::getBody)); .map(block -> blockContext(block::getHeader, block::getBody));
} }
@Override
public Hash getChainHeadHash() {
return protocolContext.getBlockchain().getChainHeadHash();
}
@Override
public BlockHeader getChainHeadHeader() {
return protocolContext.getBlockchain().getChainHeadHeader();
}
@Override
public Optional<Wei> getNextBlockBaseFee() {
final var chainHeadHeader = protocolContext.getBlockchain().getChainHeadHeader();
final var protocolSpec =
protocolSchedule.getForNextBlockHeader(chainHeadHeader, System.currentTimeMillis());
return Optional.of(protocolSpec.getFeeMarket())
.filter(FeeMarket::implementsBaseFee)
.map(BaseFeeMarket.class::cast)
.map(
feeMarket ->
feeMarket.computeBaseFee(
chainHeadHeader.getNumber() + 1,
chainHeadHeader.getBaseFee().orElse(Wei.ZERO),
chainHeadHeader.getGasUsed(),
feeMarket.targetGasUsed(chainHeadHeader)));
}
private static BlockContext blockContext( private static BlockContext blockContext(
final Supplier<BlockHeader> blockHeaderSupplier, final Supplier<BlockHeader> blockHeaderSupplier,
final Supplier<BlockBody> blockBodySupplier) { final Supplier<BlockBody> blockBodySupplier) {

@ -1,35 +0,0 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.services;
import org.hyperledger.besu.plugin.services.PluginTransactionValidatorService;
import org.hyperledger.besu.plugin.services.txvalidator.PluginTransactionValidatorFactory;
/** The Transaction Validation service implementation. */
public class PluginTransactionValidatorServiceImpl implements PluginTransactionValidatorService {
private PluginTransactionValidatorFactory factory;
@Override
public PluginTransactionValidatorFactory get() {
return factory;
}
@Override
public void registerTransactionValidatorFactory(
final PluginTransactionValidatorFactory transactionValidatorFactory) {
factory = transactionValidatorFactory;
}
}

@ -24,6 +24,7 @@ import org.hyperledger.besu.plugin.services.rpc.PluginRpcRequest;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -58,7 +59,10 @@ public class RpcEndpointServiceImpl implements RpcEndpointService {
namespaces.stream() namespaces.stream()
.anyMatch( .anyMatch(
namespace -> namespace ->
entry.getKey().toUpperCase().startsWith(namespace.toUpperCase()))) entry
.getKey()
.toUpperCase(Locale.ROOT)
.startsWith(namespace.toUpperCase(Locale.ROOT))))
.map(entry -> new PluginJsonRpcMethod(entry.getKey(), entry.getValue())) .map(entry -> new PluginJsonRpcMethod(entry.getKey(), entry.getValue()))
.collect(Collectors.toMap(PluginJsonRpcMethod::getName, e -> e)); .collect(Collectors.toMap(PluginJsonRpcMethod::getName, e -> e));
} }
@ -71,6 +75,7 @@ public class RpcEndpointServiceImpl implements RpcEndpointService {
*/ */
public boolean hasNamespace(final String namespace) { public boolean hasNamespace(final String namespace) {
return rpcMethods.keySet().stream() return rpcMethods.keySet().stream()
.anyMatch(key -> key.toUpperCase().startsWith(namespace.toUpperCase())); .anyMatch(
key -> key.toUpperCase(Locale.ROOT).startsWith(namespace.toUpperCase(Locale.ROOT)));
} }
} }

@ -15,6 +15,7 @@
package org.hyperledger.besu.services; package org.hyperledger.besu.services;
import org.hyperledger.besu.plugin.services.TransactionSelectionService; import org.hyperledger.besu.plugin.services.TransactionSelectionService;
import org.hyperledger.besu.plugin.services.txselection.PluginTransactionSelector;
import org.hyperledger.besu.plugin.services.txselection.PluginTransactionSelectorFactory; import org.hyperledger.besu.plugin.services.txselection.PluginTransactionSelectorFactory;
import java.util.Optional; import java.util.Optional;
@ -25,13 +26,15 @@ public class TransactionSelectionServiceImpl implements TransactionSelectionServ
private Optional<PluginTransactionSelectorFactory> factory = Optional.empty(); private Optional<PluginTransactionSelectorFactory> factory = Optional.empty();
@Override @Override
public Optional<PluginTransactionSelectorFactory> get() { public PluginTransactionSelector createPluginTransactionSelector() {
return factory; return factory
.map(PluginTransactionSelectorFactory::create)
.orElse(PluginTransactionSelector.ACCEPT_ALL);
} }
@Override @Override
public void registerTransactionSelectorFactory( public void registerPluginTransactionSelectorFactory(
final PluginTransactionSelectorFactory transactionSelectorFactory) { final PluginTransactionSelectorFactory pluginTransactionSelectorFactory) {
factory = Optional.ofNullable(transactionSelectorFactory); factory = Optional.ofNullable(pluginTransactionSelectorFactory);
} }
} }

@ -45,6 +45,12 @@
<Logger name="org.apache.tuweni.discovery.DNSResolver"> <Logger name="org.apache.tuweni.discovery.DNSResolver">
<RegexFilter regex="DNS query error with .*" onMatch="DENY" onMismatch="NEUTRAL" /> <RegexFilter regex="DNS query error with .*" onMatch="DENY" onMismatch="NEUTRAL" />
</Logger> </Logger>
<Logger name="io.vertx.core.dns.DnsException">
<RegexFilter regex="DNS query error occurred:.*" onMatch="DENY" onMismatch="NEUTRAL" />
</Logger>
<Logger name="org.hyperledger.besu.ethereum.eth.transactions">
<MarkerFilter marker="INVALID_TX_REMOVED" onMatch="DENY" onMismatch="NEUTRAL" />
</Logger>
<Root level="${sys:root.log.level}"> <Root level="${sys:root.log.level}">
<AppenderRef ref="Router" /> <AppenderRef ref="Router" />
</Root> </Root>

@ -13,12 +13,6 @@
"config-key": "sync-mode", "config-key": "sync-mode",
"available-options": "org.hyperledger.besu.ethereum.eth.sync.SyncMode" "available-options": "org.hyperledger.besu.ethereum.eth.sync.SyncMode"
}, },
{
"prompt-type": "CONFIRM",
"question": "Do you want to enable pruning?",
"config-key": "pruning-enabled",
"default-option": "no"
},
{ {
"prompt-type": "INPUT", "prompt-type": "INPUT",
"question": "What is the data directory ?", "question": "What is the data directory ?",

@ -28,10 +28,12 @@ import org.hyperledger.besu.consensus.merge.MergeProtocolSchedule;
import org.hyperledger.besu.consensus.merge.PostMergeContext; import org.hyperledger.besu.consensus.merge.PostMergeContext;
import org.hyperledger.besu.consensus.merge.TransitionProtocolSchedule; import org.hyperledger.besu.consensus.merge.TransitionProtocolSchedule;
import org.hyperledger.besu.consensus.merge.TransitionUtils; import org.hyperledger.besu.consensus.merge.TransitionUtils;
import org.hyperledger.besu.ethereum.chain.BadBlockManager;
import org.hyperledger.besu.ethereum.chain.Blockchain; import org.hyperledger.besu.ethereum.chain.Blockchain;
import org.hyperledger.besu.ethereum.chain.GenesisState; import org.hyperledger.besu.ethereum.chain.GenesisState;
import org.hyperledger.besu.ethereum.core.BlockHeader; import org.hyperledger.besu.ethereum.core.BlockHeader;
import org.hyperledger.besu.ethereum.core.MilestoneStreamingProtocolSchedule; import org.hyperledger.besu.ethereum.core.MilestoneStreamingProtocolSchedule;
import org.hyperledger.besu.ethereum.core.MiningParameters;
import org.hyperledger.besu.ethereum.forkid.ForkId; import org.hyperledger.besu.ethereum.forkid.ForkId;
import org.hyperledger.besu.ethereum.forkid.ForkIdManager; import org.hyperledger.besu.ethereum.forkid.ForkIdManager;
import org.hyperledger.besu.ethereum.mainnet.DefaultProtocolSchedule; import org.hyperledger.besu.ethereum.mainnet.DefaultProtocolSchedule;
@ -107,8 +109,9 @@ public class ForkIdsNetworkConfigTest {
new ForkId(Bytes.ofUnsignedInt(0xb715077dL), 13773000L), new ForkId(Bytes.ofUnsignedInt(0xb715077dL), 13773000L),
new ForkId(Bytes.ofUnsignedInt(0x20c327fcL), 15050000L), new ForkId(Bytes.ofUnsignedInt(0x20c327fcL), 15050000L),
new ForkId(Bytes.ofUnsignedInt(0xf0afd0e3L), 1681338455L), new ForkId(Bytes.ofUnsignedInt(0xf0afd0e3L), 1681338455L),
new ForkId(Bytes.ofUnsignedInt(0xdce96c2dL), 0L), new ForkId(Bytes.ofUnsignedInt(0xdce96c2dL), 1710338135L),
new ForkId(Bytes.ofUnsignedInt(0xdce96c2dL), 0L)) new ForkId(Bytes.ofUnsignedInt(0x9f3d2254L), 0L),
new ForkId(Bytes.ofUnsignedInt(0x9f3d2254L), 0L))
}, },
new Object[] { new Object[] {
NetworkName.MORDOR, NetworkName.MORDOR,
@ -185,10 +188,14 @@ public class ForkIdsNetworkConfigTest {
final GenesisConfigOptions configOptions = genesisConfigFile.getConfigOptions(); final GenesisConfigOptions configOptions = genesisConfigFile.getConfigOptions();
MilestoneStreamingProtocolSchedule preMergeProtocolSchedule = MilestoneStreamingProtocolSchedule preMergeProtocolSchedule =
new MilestoneStreamingProtocolSchedule( new MilestoneStreamingProtocolSchedule(
(DefaultProtocolSchedule) MainnetProtocolSchedule.fromConfig(configOptions)); (DefaultProtocolSchedule)
MainnetProtocolSchedule.fromConfig(
configOptions, MiningParameters.MINING_DISABLED, new BadBlockManager()));
MilestoneStreamingProtocolSchedule postMergeProtocolSchedule = MilestoneStreamingProtocolSchedule postMergeProtocolSchedule =
new MilestoneStreamingProtocolSchedule( new MilestoneStreamingProtocolSchedule(
(DefaultProtocolSchedule) MergeProtocolSchedule.create(configOptions, false)); (DefaultProtocolSchedule)
MergeProtocolSchedule.create(
configOptions, false, MiningParameters.MINING_DISABLED, new BadBlockManager()));
final MilestoneStreamingTransitionProtocolSchedule schedule = final MilestoneStreamingTransitionProtocolSchedule schedule =
new MilestoneStreamingTransitionProtocolSchedule( new MilestoneStreamingTransitionProtocolSchedule(
preMergeProtocolSchedule, postMergeProtocolSchedule); preMergeProtocolSchedule, postMergeProtocolSchedule);

@ -41,6 +41,7 @@ import org.hyperledger.besu.ethereum.p2p.config.NetworkingConfiguration;
import org.hyperledger.besu.ethereum.privacy.storage.PrivacyStorageProvider; import org.hyperledger.besu.ethereum.privacy.storage.PrivacyStorageProvider;
import org.hyperledger.besu.ethereum.privacy.storage.keyvalue.PrivacyKeyValueStorageProviderBuilder; import org.hyperledger.besu.ethereum.privacy.storage.keyvalue.PrivacyKeyValueStorageProviderBuilder;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier; import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.evm.internal.EvmConfiguration; import org.hyperledger.besu.evm.internal.EvmConfiguration;
import org.hyperledger.besu.evm.precompile.PrecompiledContract; import org.hyperledger.besu.evm.precompile.PrecompiledContract;
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem; import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
@ -97,11 +98,15 @@ public class PrivacyTest {
private BesuController setUpControllerWithPrivacyEnabled(final boolean flexibleEnabled) private BesuController setUpControllerWithPrivacyEnabled(final boolean flexibleEnabled)
throws IOException, URISyntaxException { throws IOException, URISyntaxException {
final Path dbDir = Files.createTempDirectory(dataDir, "database"); final Path dbDir = Files.createTempDirectory(dataDir, "database");
final var miningParameters = MiningParameters.newDefault();
final var dataStorageConfiguration = DataStorageConfiguration.DEFAULT_FOREST_CONFIG;
final PrivacyParameters privacyParameters = final PrivacyParameters privacyParameters =
new PrivacyParameters.Builder() new PrivacyParameters.Builder()
.setEnabled(true) .setEnabled(true)
.setEnclaveUrl(new URI("http://127.0.0.1:8000")) .setEnclaveUrl(new URI("http://127.0.0.1:8000"))
.setStorageProvider(createKeyValueStorageProvider(dataDir, dbDir)) .setStorageProvider(
createKeyValueStorageProvider(
dataDir, dbDir, dataStorageConfiguration, miningParameters))
.setEnclaveFactory(new EnclaveFactory(vertx)) .setEnclaveFactory(new EnclaveFactory(vertx))
.setFlexiblePrivacyGroupsEnabled(flexibleEnabled) .setFlexiblePrivacyGroupsEnabled(flexibleEnabled)
.build(); .build();
@ -111,7 +116,8 @@ public class PrivacyTest {
.ethProtocolConfiguration(EthProtocolConfiguration.defaultConfig()) .ethProtocolConfiguration(EthProtocolConfiguration.defaultConfig())
.storageProvider(new InMemoryKeyValueStorageProvider()) .storageProvider(new InMemoryKeyValueStorageProvider())
.networkId(BigInteger.ONE) .networkId(BigInteger.ONE)
.miningParameters(MiningParameters.newDefault()) .miningParameters(miningParameters)
.dataStorageConfiguration(dataStorageConfiguration)
.nodeKey(NodeKeyUtils.generate()) .nodeKey(NodeKeyUtils.generate())
.metricsSystem(new NoOpMetricsSystem()) .metricsSystem(new NoOpMetricsSystem())
.dataDirectory(dataDir) .dataDirectory(dataDir)
@ -125,7 +131,12 @@ public class PrivacyTest {
} }
private PrivacyStorageProvider createKeyValueStorageProvider( private PrivacyStorageProvider createKeyValueStorageProvider(
final Path dataDir, final Path dbDir) { final Path dataDir,
final Path dbDir,
final DataStorageConfiguration dataStorageConfiguration,
final MiningParameters miningParameters) {
final var besuConfiguration = new BesuConfigurationImpl();
besuConfiguration.init(dataDir, dbDir, dataStorageConfiguration, miningParameters);
return new PrivacyKeyValueStorageProviderBuilder() return new PrivacyKeyValueStorageProviderBuilder()
.withStorageFactory( .withStorageFactory(
new RocksDBKeyValuePrivacyStorageFactory( new RocksDBKeyValuePrivacyStorageFactory(
@ -138,7 +149,7 @@ public class PrivacyTest {
DEFAULT_IS_HIGH_SPEC), DEFAULT_IS_HIGH_SPEC),
Arrays.asList(KeyValueSegmentIdentifier.values()), Arrays.asList(KeyValueSegmentIdentifier.values()),
RocksDBMetricsFactory.PRIVATE_ROCKS_DB_METRICS))) RocksDBMetricsFactory.PRIVATE_ROCKS_DB_METRICS)))
.withCommonConfiguration(new BesuConfigurationImpl(dataDir, dbDir)) .withCommonConfiguration(besuConfiguration)
.withMetricsSystem(new NoOpMetricsSystem()) .withMetricsSystem(new NoOpMetricsSystem())
.build(); .build();
} }

@ -57,6 +57,7 @@ import org.hyperledger.besu.ethereum.p2p.peers.EnodeURLImpl;
import org.hyperledger.besu.ethereum.storage.StorageProvider; import org.hyperledger.besu.ethereum.storage.StorageProvider;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier; import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueStorageProviderBuilder; import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueStorageProviderBuilder;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.evm.internal.EvmConfiguration; import org.hyperledger.besu.evm.internal.EvmConfiguration;
import org.hyperledger.besu.metrics.ObservableMetricsSystem; import org.hyperledger.besu.metrics.ObservableMetricsSystem;
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem; import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
@ -163,12 +164,13 @@ public final class RunnerTest {
final Path dataDirAhead = Files.createTempDirectory(temp, "db-ahead"); final Path dataDirAhead = Files.createTempDirectory(temp, "db-ahead");
final Path dbAhead = dataDirAhead.resolve("database"); final Path dbAhead = dataDirAhead.resolve("database");
final int blockCount = 500; final int blockCount = 500;
final NodeKey aheadDbNodeKey = NodeKeyUtils.createFrom(KeyPairUtil.loadKeyPair(dbAhead)); final NodeKey aheadDbNodeKey = NodeKeyUtils.createFrom(KeyPairUtil.loadKeyPair(dataDirAhead));
final NodeKey behindDbNodeKey = NodeKeyUtils.generate(); final NodeKey behindDbNodeKey = NodeKeyUtils.generate();
final SynchronizerConfiguration syncConfigAhead = final SynchronizerConfiguration syncConfigAhead =
SynchronizerConfiguration.builder().syncMode(SyncMode.FULL).build(); SynchronizerConfiguration.builder().syncMode(SyncMode.FULL).build();
final ObservableMetricsSystem noOpMetricsSystem = new NoOpMetricsSystem(); final ObservableMetricsSystem noOpMetricsSystem = new NoOpMetricsSystem();
final var miningParameters = MiningParameters.newDefault();
final var dataStorageConfiguration = DataStorageConfiguration.DEFAULT_FOREST_CONFIG;
// Setup Runner with blocks // Setup Runner with blocks
final BesuController controllerAhead = final BesuController controllerAhead =
getController( getController(
@ -176,8 +178,10 @@ public final class RunnerTest {
syncConfigAhead, syncConfigAhead,
dataDirAhead, dataDirAhead,
aheadDbNodeKey, aheadDbNodeKey,
createKeyValueStorageProvider(dataDirAhead, dbAhead), createKeyValueStorageProvider(
noOpMetricsSystem); dataDirAhead, dbAhead, dataStorageConfiguration, miningParameters),
noOpMetricsSystem,
miningParameters);
setupState( setupState(
blockCount, controllerAhead.getProtocolSchedule(), controllerAhead.getProtocolContext()); blockCount, controllerAhead.getProtocolSchedule(), controllerAhead.getProtocolContext());
@ -232,7 +236,8 @@ public final class RunnerTest {
dataDirBehind, dataDirBehind,
behindDbNodeKey, behindDbNodeKey,
new InMemoryKeyValueStorageProvider(), new InMemoryKeyValueStorageProvider(),
noOpMetricsSystem); noOpMetricsSystem,
miningParameters);
final EnodeURL aheadEnode = runnerAhead.getLocalEnode().get(); final EnodeURL aheadEnode = runnerAhead.getLocalEnode().get();
final EthNetworkConfig behindEthNetworkConfiguration = final EthNetworkConfig behindEthNetworkConfiguration =
@ -375,7 +380,13 @@ public final class RunnerTest {
return GenesisConfigFile.fromConfig(jsonNode); return GenesisConfigFile.fromConfig(jsonNode);
} }
private StorageProvider createKeyValueStorageProvider(final Path dataDir, final Path dbDir) { private StorageProvider createKeyValueStorageProvider(
final Path dataDir,
final Path dbDir,
final DataStorageConfiguration dataStorageConfiguration,
final MiningParameters miningParameters) {
final var besuConfiguration = new BesuConfigurationImpl();
besuConfiguration.init(dataDir, dbDir, dataStorageConfiguration, miningParameters);
return new KeyValueStorageProviderBuilder() return new KeyValueStorageProviderBuilder()
.withStorageFactory( .withStorageFactory(
new RocksDBKeyValueStorageFactory( new RocksDBKeyValueStorageFactory(
@ -387,7 +398,7 @@ public final class RunnerTest {
DEFAULT_IS_HIGH_SPEC), DEFAULT_IS_HIGH_SPEC),
Arrays.asList(KeyValueSegmentIdentifier.values()), Arrays.asList(KeyValueSegmentIdentifier.values()),
RocksDBMetricsFactory.PUBLIC_ROCKS_DB_METRICS)) RocksDBMetricsFactory.PUBLIC_ROCKS_DB_METRICS))
.withCommonConfiguration(new BesuConfigurationImpl(dataDir, dbDir)) .withCommonConfiguration(besuConfiguration)
.withMetricsSystem(new NoOpMetricsSystem()) .withMetricsSystem(new NoOpMetricsSystem())
.build(); .build();
} }
@ -443,26 +454,27 @@ public final class RunnerTest {
final Path dataDir, final Path dataDir,
final NodeKey nodeKey, final NodeKey nodeKey,
final StorageProvider storageProvider, final StorageProvider storageProvider,
final ObservableMetricsSystem metricsSystem) { final ObservableMetricsSystem metricsSystem,
final MiningParameters miningParameters) {
return new MainnetBesuControllerBuilder() return new MainnetBesuControllerBuilder()
.genesisConfigFile(genesisConfig) .genesisConfigFile(genesisConfig)
.synchronizerConfiguration(syncConfig) .synchronizerConfiguration(syncConfig)
.ethProtocolConfiguration(EthProtocolConfiguration.defaultConfig()) .ethProtocolConfiguration(EthProtocolConfiguration.defaultConfig())
.dataDirectory(dataDir) .dataDirectory(dataDir)
.networkId(NETWORK_ID) .networkId(NETWORK_ID)
.miningParameters(MiningParameters.newDefault()) .miningParameters(miningParameters)
.nodeKey(nodeKey) .nodeKey(nodeKey)
.storageProvider(storageProvider) .storageProvider(storageProvider)
.metricsSystem(metricsSystem) .metricsSystem(metricsSystem)
.privacyParameters(PrivacyParameters.DEFAULT) .privacyParameters(PrivacyParameters.DEFAULT)
.clock(TestClock.fixed()) .clock(TestClock.fixed())
.transactionPoolConfiguration(TransactionPoolConfiguration.DEFAULT) .transactionPoolConfiguration(TransactionPoolConfiguration.DEFAULT)
.dataStorageConfiguration(DataStorageConfiguration.DEFAULT_FOREST_CONFIG)
.gasLimitCalculator(GasLimitCalculator.constant()) .gasLimitCalculator(GasLimitCalculator.constant())
.evmConfiguration(EvmConfiguration.DEFAULT) .evmConfiguration(EvmConfiguration.DEFAULT)
.networkConfiguration(NetworkingConfiguration.create()) .networkConfiguration(NetworkingConfiguration.create())
.randomPeerPriority(Boolean.FALSE) .randomPeerPriority(Boolean.FALSE)
.maxPeers(25) .maxPeers(25)
.lowerBoundPeers(25)
.maxRemotelyInitiatedPeers(15) .maxRemotelyInitiatedPeers(15)
.build(); .build();
} }

@ -432,8 +432,7 @@ public abstract class JsonBlockImporterTest {
return createController(genesisConfigFile); return createController(genesisConfigFile);
} }
protected BesuController createController(final GenesisConfigFile genesisConfigFile) protected BesuController createController(final GenesisConfigFile genesisConfigFile) {
throws IOException {
return new BesuController.Builder() return new BesuController.Builder()
.fromGenesisConfig(genesisConfigFile, SyncMode.FAST) .fromGenesisConfig(genesisConfigFile, SyncMode.FAST)
.synchronizerConfiguration(SynchronizerConfiguration.builder().build()) .synchronizerConfiguration(SynchronizerConfiguration.builder().build())

@ -32,7 +32,7 @@ import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.ethereum.api.graphql.GraphQLConfiguration; import org.hyperledger.besu.ethereum.api.graphql.GraphQLConfiguration;
import org.hyperledger.besu.ethereum.api.jsonrpc.JsonRpcConfiguration; import org.hyperledger.besu.ethereum.api.jsonrpc.JsonRpcConfiguration;
import org.hyperledger.besu.ethereum.api.jsonrpc.websocket.WebSocketConfiguration; import org.hyperledger.besu.ethereum.api.jsonrpc.websocket.WebSocketConfiguration;
import org.hyperledger.besu.ethereum.core.ImmutableMiningParameters; import org.hyperledger.besu.ethereum.core.MiningParameters;
import org.hyperledger.besu.ethereum.eth.sync.SyncMode; import org.hyperledger.besu.ethereum.eth.sync.SyncMode;
import org.hyperledger.besu.ethereum.eth.sync.SynchronizerConfiguration; import org.hyperledger.besu.ethereum.eth.sync.SynchronizerConfiguration;
import org.hyperledger.besu.ethereum.p2p.peers.EnodeURLImpl; import org.hyperledger.besu.ethereum.p2p.peers.EnodeURLImpl;
@ -179,11 +179,9 @@ public class CascadingDefaultProviderTest extends CommandTestAbstract {
verify(mockControllerBuilder).synchronizerConfiguration(syncConfigurationCaptor.capture()); verify(mockControllerBuilder).synchronizerConfiguration(syncConfigurationCaptor.capture());
final SynchronizerConfiguration syncConfig = syncConfigurationCaptor.getValue(); final SynchronizerConfiguration syncConfig = syncConfigurationCaptor.getValue();
assertThat(syncConfig.getSyncMode()).isEqualTo(SyncMode.FAST); assertThat(syncConfig.getSyncMode()).isEqualTo(SyncMode.SNAP);
assertThat(syncConfig.getFastSyncMinimumPeerCount()).isEqualTo(5); assertThat(syncConfig.getFastSyncMinimumPeerCount()).isEqualTo(5);
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty(); assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
} }
@ -200,14 +198,11 @@ public class CascadingDefaultProviderTest extends CommandTestAbstract {
setEnvironmentVariable("BESU_MINER_COINBASE", expectedCoinbase); setEnvironmentVariable("BESU_MINER_COINBASE", expectedCoinbase);
parseCommand("--config-file", configFile); parseCommand("--config-file", configFile);
verify(mockControllerBuilder) final var captMiningParameters = ArgumentCaptor.forClass(MiningParameters.class);
.miningParameters( verify(mockControllerBuilder).miningParameters(captMiningParameters.capture());
ImmutableMiningParameters.builder()
.mutableInitValues( assertThat(captMiningParameters.getValue().getCoinbase())
ImmutableMiningParameters.MutableInitValues.builder() .contains(Address.fromHexString(expectedCoinbase));
.coinbase(Address.fromHexString(expectedCoinbase))
.build())
.build());
} }
/** /**
@ -222,14 +217,11 @@ public class CascadingDefaultProviderTest extends CommandTestAbstract {
setEnvironmentVariable("BESU_MINER_COINBASE", "0x0000000000000000000000000000000000000004"); setEnvironmentVariable("BESU_MINER_COINBASE", "0x0000000000000000000000000000000000000004");
parseCommand("--config-file", configFile, "--miner-coinbase", expectedCoinbase); parseCommand("--config-file", configFile, "--miner-coinbase", expectedCoinbase);
verify(mockControllerBuilder) final var captMiningParameters = ArgumentCaptor.forClass(MiningParameters.class);
.miningParameters( verify(mockControllerBuilder).miningParameters(captMiningParameters.capture());
ImmutableMiningParameters.builder()
.mutableInitValues( assertThat(captMiningParameters.getValue().getCoinbase())
ImmutableMiningParameters.MutableInitValues.builder() .contains(Address.fromHexString(expectedCoinbase));
.coinbase(Address.fromHexString(expectedCoinbase))
.build())
.build());
} }
/** /**

@ -75,21 +75,24 @@ import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.ethereum.worldstate.WorldStateArchive; import org.hyperledger.besu.ethereum.worldstate.WorldStateArchive;
import org.hyperledger.besu.metrics.prometheus.MetricsConfiguration; import org.hyperledger.besu.metrics.prometheus.MetricsConfiguration;
import org.hyperledger.besu.pki.config.PkiKeyStoreConfiguration; import org.hyperledger.besu.pki.config.PkiKeyStoreConfiguration;
import org.hyperledger.besu.plugin.services.BesuConfiguration;
import org.hyperledger.besu.plugin.services.PicoCLIOptions; import org.hyperledger.besu.plugin.services.PicoCLIOptions;
import org.hyperledger.besu.plugin.services.StorageService; import org.hyperledger.besu.plugin.services.StorageService;
import org.hyperledger.besu.plugin.services.TransactionSelectionService;
import org.hyperledger.besu.plugin.services.securitymodule.SecurityModule; import org.hyperledger.besu.plugin.services.securitymodule.SecurityModule;
import org.hyperledger.besu.plugin.services.storage.KeyValueStorageFactory; import org.hyperledger.besu.plugin.services.storage.KeyValueStorageFactory;
import org.hyperledger.besu.plugin.services.storage.PrivacyKeyValueStorageFactory; import org.hyperledger.besu.plugin.services.storage.PrivacyKeyValueStorageFactory;
import org.hyperledger.besu.plugin.services.storage.SegmentIdentifier; import org.hyperledger.besu.plugin.services.storage.SegmentIdentifier;
import org.hyperledger.besu.services.BesuConfigurationImpl;
import org.hyperledger.besu.services.BesuPluginContextImpl; import org.hyperledger.besu.services.BesuPluginContextImpl;
import org.hyperledger.besu.services.BlockchainServiceImpl;
import org.hyperledger.besu.services.PermissioningServiceImpl; import org.hyperledger.besu.services.PermissioningServiceImpl;
import org.hyperledger.besu.services.PluginTransactionValidatorServiceImpl;
import org.hyperledger.besu.services.PrivacyPluginServiceImpl; import org.hyperledger.besu.services.PrivacyPluginServiceImpl;
import org.hyperledger.besu.services.RpcEndpointServiceImpl; import org.hyperledger.besu.services.RpcEndpointServiceImpl;
import org.hyperledger.besu.services.SecurityModuleServiceImpl; import org.hyperledger.besu.services.SecurityModuleServiceImpl;
import org.hyperledger.besu.services.StorageServiceImpl; import org.hyperledger.besu.services.StorageServiceImpl;
import org.hyperledger.besu.services.TransactionPoolValidatorServiceImpl;
import org.hyperledger.besu.services.TransactionSelectionServiceImpl; import org.hyperledger.besu.services.TransactionSelectionServiceImpl;
import org.hyperledger.besu.services.TransactionSimulationServiceImpl;
import org.hyperledger.besu.services.kvstore.InMemoryKeyValueStorage; import org.hyperledger.besu.services.kvstore.InMemoryKeyValueStorage;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
@ -165,6 +168,9 @@ public abstract class CommandTestAbstract {
"clique", "clique",
new JsonObject().put("blockperiodseconds", POA_BLOCK_PERIOD_SECONDS))); new JsonObject().put("blockperiodseconds", POA_BLOCK_PERIOD_SECONDS)));
protected static final JsonObject GENESIS_WITH_ZERO_BASE_FEE_MARKET =
new JsonObject().put("config", new JsonObject().put("zeroBaseFee", true));
protected final PrintStream originalOut = System.out; protected final PrintStream originalOut = System.out;
protected final PrintStream originalErr = System.err; protected final PrintStream originalErr = System.err;
protected final ByteArrayOutputStream commandOutput = new ByteArrayOutputStream(); protected final ByteArrayOutputStream commandOutput = new ByteArrayOutputStream();
@ -205,9 +211,10 @@ public abstract class CommandTestAbstract {
@Mock protected JsonBlockImporter jsonBlockImporter; @Mock protected JsonBlockImporter jsonBlockImporter;
@Mock protected RlpBlockImporter rlpBlockImporter; @Mock protected RlpBlockImporter rlpBlockImporter;
@Mock protected StorageServiceImpl storageService; @Mock protected StorageServiceImpl storageService;
@Mock protected TransactionSelectionServiceImpl txSelectionService;
@Mock protected SecurityModuleServiceImpl securityModuleService; @Mock protected SecurityModuleServiceImpl securityModuleService;
@Mock protected SecurityModule securityModule; @Mock protected SecurityModule securityModule;
@Mock protected BesuConfiguration commonPluginConfiguration; @Mock protected BesuConfigurationImpl commonPluginConfiguration;
@Mock protected KeyValueStorageFactory rocksDBStorageFactory; @Mock protected KeyValueStorageFactory rocksDBStorageFactory;
@Mock protected PrivacyKeyValueStorageFactory rocksDBSPrivacyStorageFactory; @Mock protected PrivacyKeyValueStorageFactory rocksDBSPrivacyStorageFactory;
@Mock protected PicoCLIOptions cliOptions; @Mock protected PicoCLIOptions cliOptions;
@ -217,6 +224,7 @@ public abstract class CommandTestAbstract {
@Mock protected WorldStateArchive mockWorldStateArchive; @Mock protected WorldStateArchive mockWorldStateArchive;
@Mock protected TransactionPool mockTransactionPool; @Mock protected TransactionPool mockTransactionPool;
@Mock protected PrivacyPluginServiceImpl privacyPluginService; @Mock protected PrivacyPluginServiceImpl privacyPluginService;
@Mock protected StorageProvider storageProvider;
@SuppressWarnings("PrivateStaticFinalLoggers") // @Mocks are inited by JUnit @SuppressWarnings("PrivateStaticFinalLoggers") // @Mocks are inited by JUnit
@Mock @Mock
@ -275,8 +283,6 @@ public abstract class CommandTestAbstract {
when(mockControllerBuilder.clock(any())).thenReturn(mockControllerBuilder); when(mockControllerBuilder.clock(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.isRevertReasonEnabled(false)).thenReturn(mockControllerBuilder); when(mockControllerBuilder.isRevertReasonEnabled(false)).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.storageProvider(any())).thenReturn(mockControllerBuilder); when(mockControllerBuilder.storageProvider(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.isPruningEnabled(anyBoolean())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.pruningConfiguration(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.genesisConfigOverrides(any())).thenReturn(mockControllerBuilder); when(mockControllerBuilder.genesisConfigOverrides(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.gasLimitCalculator(any())).thenReturn(mockControllerBuilder); when(mockControllerBuilder.gasLimitCalculator(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.requiredBlocks(any())).thenReturn(mockControllerBuilder); when(mockControllerBuilder.requiredBlocks(any())).thenReturn(mockControllerBuilder);
@ -288,12 +294,8 @@ public abstract class CommandTestAbstract {
when(mockControllerBuilder.maxPeers(anyInt())).thenReturn(mockControllerBuilder); when(mockControllerBuilder.maxPeers(anyInt())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.chainPruningConfiguration(any())).thenReturn(mockControllerBuilder); when(mockControllerBuilder.chainPruningConfiguration(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.maxPeers(anyInt())).thenReturn(mockControllerBuilder); when(mockControllerBuilder.maxPeers(anyInt())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.lowerBoundPeers(anyInt())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.maxRemotelyInitiatedPeers(anyInt())) when(mockControllerBuilder.maxRemotelyInitiatedPeers(anyInt()))
.thenReturn(mockControllerBuilder); .thenReturn(mockControllerBuilder);
when(mockControllerBuilder.transactionSelectorFactory(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.pluginTransactionValidatorFactory(any()))
.thenReturn(mockControllerBuilder);
when(mockControllerBuilder.besuComponent(any(BesuComponent.class))) when(mockControllerBuilder.besuComponent(any(BesuComponent.class)))
.thenReturn(mockControllerBuilder); .thenReturn(mockControllerBuilder);
when(mockControllerBuilder.cacheLastBlocks(any())).thenReturn(mockControllerBuilder); when(mockControllerBuilder.cacheLastBlocks(any())).thenReturn(mockControllerBuilder);
@ -313,6 +315,7 @@ public abstract class CommandTestAbstract {
when(mockProtocolContext.getBlockchain()).thenReturn(mockMutableBlockchain); when(mockProtocolContext.getBlockchain()).thenReturn(mockMutableBlockchain);
lenient().when(mockProtocolContext.getWorldStateArchive()).thenReturn(mockWorldStateArchive); lenient().when(mockProtocolContext.getWorldStateArchive()).thenReturn(mockWorldStateArchive);
when(mockController.getTransactionPool()).thenReturn(mockTransactionPool); when(mockController.getTransactionPool()).thenReturn(mockTransactionPool);
when(mockController.getStorageProvider()).thenReturn(storageProvider);
when(mockRunnerBuilder.vertx(any())).thenReturn(mockRunnerBuilder); when(mockRunnerBuilder.vertx(any())).thenReturn(mockRunnerBuilder);
when(mockRunnerBuilder.besuController(any())).thenReturn(mockRunnerBuilder); when(mockRunnerBuilder.besuController(any())).thenReturn(mockRunnerBuilder);
@ -378,6 +381,9 @@ public abstract class CommandTestAbstract {
lenient() lenient()
.when(mockBesuPluginContext.getService(StorageService.class)) .when(mockBesuPluginContext.getService(StorageService.class))
.thenReturn(Optional.of(storageService)); .thenReturn(Optional.of(storageService));
lenient()
.when(mockBesuPluginContext.getService(TransactionSelectionService.class))
.thenReturn(Optional.of(txSelectionService));
lenient() lenient()
.doReturn(mockPkiBlockCreationConfiguration) .doReturn(mockPkiBlockCreationConfiguration)
@ -565,7 +571,9 @@ public abstract class CommandTestAbstract {
pkiBlockCreationConfigProvider, pkiBlockCreationConfigProvider,
rpcEndpointServiceImpl, rpcEndpointServiceImpl,
new TransactionSelectionServiceImpl(), new TransactionSelectionServiceImpl(),
new PluginTransactionValidatorServiceImpl()); new TransactionPoolValidatorServiceImpl(),
new TransactionSimulationServiceImpl(),
new BlockchainServiceImpl());
} }
@Override @Override
@ -739,4 +747,15 @@ public abstract class CommandTestAbstract {
assertThat(stringArgumentCaptor.getAllValues().get(2)).isEqualTo(mainOption); assertThat(stringArgumentCaptor.getAllValues().get(2)).isEqualTo(mainOption);
} }
/**
* Check logger calls
*
* <p>Here we check the calls to logger and not the result of the log line as we don't test the
* logger itself but the fact that we call it.
*
* @param stringToLog the string that is logged
*/
void verifyMultiOptionsConstraintLoggerCall(final String stringToLog) {
verify(mockLogger, atLeast(1)).warn(stringToLog);
}
} }

@ -22,6 +22,7 @@ import org.hyperledger.besu.cli.CommandTestAbstract;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.function.BiFunction;
import java.util.function.Consumer; import java.util.function.Consumer;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@ -113,10 +114,18 @@ public abstract class AbstractCLIOptionsTest<D, T extends CLIOptions<D>>
protected abstract T getOptionsFromBesuCommand(final TestBesuCommand besuCommand); protected abstract T getOptionsFromBesuCommand(final TestBesuCommand besuCommand);
protected void internalTestSuccess(final Consumer<D> assertion, final String... args) { protected void internalTestSuccess(final Consumer<D> assertion, final String... args) {
internalTestSuccess((bc, conf) -> conf, assertion, args);
}
protected void internalTestSuccess(
final BiFunction<TestBesuCommand, D, D> runtimeConf,
final Consumer<D> assertion,
final String... args) {
final TestBesuCommand cmd = parseCommand(args); final TestBesuCommand cmd = parseCommand(args);
final T options = getOptionsFromBesuCommand(cmd); final T options = getOptionsFromBesuCommand(cmd);
final D config = options.toDomainObject(); final D config = runtimeConf.apply(cmd, options.toDomainObject());
assertion.accept(config); assertion.accept(config);
assertThat(commandOutput.toString(UTF_8)).isEmpty(); assertThat(commandOutput.toString(UTF_8)).isEmpty();

@ -34,7 +34,6 @@ import java.io.IOException;
import java.nio.file.Path; import java.nio.file.Path;
import java.time.Duration; import java.time.Duration;
import java.util.Optional; import java.util.Optional;
import java.util.OptionalInt;
import org.apache.tuweni.bytes.Bytes; import org.apache.tuweni.bytes.Bytes;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@ -316,6 +315,7 @@ public class MiningOptionsTest extends AbstractCLIOptionsTest<MiningParameters,
@Test @Test
public void blockTxsSelectionMaxTimeDefaultValue() { public void blockTxsSelectionMaxTimeDefaultValue() {
internalTestSuccess( internalTestSuccess(
this::runtimeConfiguration,
miningParams -> miningParams ->
assertThat(miningParams.getNonPoaBlockTxsSelectionMaxTime()) assertThat(miningParams.getNonPoaBlockTxsSelectionMaxTime())
.isEqualTo(DEFAULT_NON_POA_BLOCK_TXS_SELECTION_MAX_TIME)); .isEqualTo(DEFAULT_NON_POA_BLOCK_TXS_SELECTION_MAX_TIME));
@ -324,6 +324,7 @@ public class MiningOptionsTest extends AbstractCLIOptionsTest<MiningParameters,
@Test @Test
public void blockTxsSelectionMaxTimeOption() { public void blockTxsSelectionMaxTimeOption() {
internalTestSuccess( internalTestSuccess(
this::runtimeConfiguration,
miningParams -> assertThat(miningParams.getBlockTxsSelectionMaxTime()).isEqualTo(1700L), miningParams -> assertThat(miningParams.getBlockTxsSelectionMaxTime()).isEqualTo(1700L),
"--block-txs-selection-max-time", "--block-txs-selection-max-time",
"1700"); "1700");
@ -343,6 +344,7 @@ public class MiningOptionsTest extends AbstractCLIOptionsTest<MiningParameters,
@Test @Test
public void poaBlockTxsSelectionMaxTimeDefaultValue() { public void poaBlockTxsSelectionMaxTimeDefaultValue() {
internalTestSuccess( internalTestSuccess(
this::runtimeConfiguration,
miningParams -> miningParams ->
assertThat(miningParams.getPoaBlockTxsSelectionMaxTime()) assertThat(miningParams.getPoaBlockTxsSelectionMaxTime())
.isEqualTo(DEFAULT_POA_BLOCK_TXS_SELECTION_MAX_TIME)); .isEqualTo(DEFAULT_POA_BLOCK_TXS_SELECTION_MAX_TIME));
@ -352,6 +354,7 @@ public class MiningOptionsTest extends AbstractCLIOptionsTest<MiningParameters,
public void poaBlockTxsSelectionMaxTimeOption() throws IOException { public void poaBlockTxsSelectionMaxTimeOption() throws IOException {
final Path genesisFileIBFT2 = createFakeGenesisFile(VALID_GENESIS_IBFT2_POST_LONDON); final Path genesisFileIBFT2 = createFakeGenesisFile(VALID_GENESIS_IBFT2_POST_LONDON);
internalTestSuccess( internalTestSuccess(
this::runtimeConfiguration,
miningParams -> miningParams ->
assertThat(miningParams.getPoaBlockTxsSelectionMaxTime()) assertThat(miningParams.getPoaBlockTxsSelectionMaxTime())
.isEqualTo(PositiveNumber.fromInt(80)), .isEqualTo(PositiveNumber.fromInt(80)),
@ -365,6 +368,7 @@ public class MiningOptionsTest extends AbstractCLIOptionsTest<MiningParameters,
public void poaBlockTxsSelectionMaxTimeOptionOver100Percent() throws IOException { public void poaBlockTxsSelectionMaxTimeOptionOver100Percent() throws IOException {
final Path genesisFileClique = createFakeGenesisFile(VALID_GENESIS_CLIQUE_POST_LONDON); final Path genesisFileClique = createFakeGenesisFile(VALID_GENESIS_CLIQUE_POST_LONDON);
internalTestSuccess( internalTestSuccess(
this::runtimeConfiguration,
miningParams -> { miningParams -> {
assertThat(miningParams.getPoaBlockTxsSelectionMaxTime()) assertThat(miningParams.getPoaBlockTxsSelectionMaxTime())
.isEqualTo(PositiveNumber.fromInt(200)); .isEqualTo(PositiveNumber.fromInt(200));
@ -412,16 +416,19 @@ public class MiningOptionsTest extends AbstractCLIOptionsTest<MiningParameters,
@Override @Override
protected MiningOptions getOptionsFromBesuCommand(final TestBesuCommand besuCommand) { protected MiningOptions getOptionsFromBesuCommand(final TestBesuCommand besuCommand) {
final var miningOptions = besuCommand.getMiningOptions(); return besuCommand.getMiningOptions();
miningOptions.setGenesisBlockPeriodSeconds(
besuCommand.getActualGenesisConfigOptions().isPoa()
? OptionalInt.of(POA_BLOCK_PERIOD_SECONDS)
: OptionalInt.empty());
return miningOptions;
} }
@Override @Override
protected String[] getNonOptionFields() { protected String[] getNonOptionFields() {
return new String[] {"maybeGenesisBlockPeriodSeconds"}; return new String[] {"transactionSelectionService"};
}
private MiningParameters runtimeConfiguration(
final TestBesuCommand besuCommand, final MiningParameters miningParameters) {
if (besuCommand.getActualGenesisConfigOptions().isPoa()) {
miningParameters.setBlockPeriodSeconds(POA_BLOCK_PERIOD_SECONDS);
}
return miningParameters;
} }
} }

@ -176,7 +176,6 @@ public class NetworkingOptionsTest
NetworkingConfiguration.DEFAULT_INITIATE_CONNECTIONS_FREQUENCY_SEC + 10); NetworkingConfiguration.DEFAULT_INITIATE_CONNECTIONS_FREQUENCY_SEC + 10);
config.setCheckMaintainedConnectionsFrequency( config.setCheckMaintainedConnectionsFrequency(
NetworkingConfiguration.DEFAULT_CHECK_MAINTAINED_CONNECTIONS_FREQUENCY_SEC + 10); NetworkingConfiguration.DEFAULT_CHECK_MAINTAINED_CONNECTIONS_FREQUENCY_SEC + 10);
config.setPeerLowerBound(NetworkingConfiguration.DEFAULT_PEER_LOWER_BOUND - 10);
return config; return config;
} }

@ -168,6 +168,31 @@ public class TransactionPoolOptionsTest
"101"); "101");
} }
@Test
public void blobPriceBump() {
final Percentage blobPriceBump = Percentage.fromInt(50);
internalTestSuccess(
config -> assertThat(config.getBlobPriceBump()).isEqualTo(blobPriceBump),
"--tx-pool-blob-price-bump",
blobPriceBump.toString());
}
@Test
public void invalidBlobPriceBumpShouldFail() {
internalTestFailure(
"Invalid value: 101, should be a number between 0 and 100 inclusive",
"--tx-pool-blob-price-bump",
"101");
}
@Test
public void defaultBlobPriceBump() {
internalTestSuccess(
config ->
assertThat(config.getBlobPriceBump())
.isEqualTo(TransactionPoolConfiguration.DEFAULT_BLOB_PRICE_BUMP));
}
@Test @Test
public void txFeeCap() { public void txFeeCap() {
final Wei txFeeCap = Wei.fromEth(2); final Wei txFeeCap = Wei.fromEth(2);
@ -370,4 +395,9 @@ public class TransactionPoolOptionsTest
protected TransactionPoolOptions getOptionsFromBesuCommand(final TestBesuCommand besuCommand) { protected TransactionPoolOptions getOptionsFromBesuCommand(final TestBesuCommand besuCommand) {
return besuCommand.getTransactionPoolOptions(); return besuCommand.getTransactionPoolOptions();
} }
@Override
protected String[] getNonOptionFields() {
return new String[] {"transactionPoolValidatorService"};
}
} }

@ -20,8 +20,8 @@ import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.
import org.hyperledger.besu.cli.options.AbstractCLIOptionsTest; import org.hyperledger.besu.cli.options.AbstractCLIOptionsTest;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration; import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageFormat;
import org.hyperledger.besu.ethereum.worldstate.ImmutableDataStorageConfiguration; import org.hyperledger.besu.ethereum.worldstate.ImmutableDataStorageConfiguration;
import org.hyperledger.besu.plugin.services.storage.DataStorageFormat;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@ -88,6 +88,46 @@ public class DataStorageOptionsTest
"511"); "511");
} }
@Test
public void bonsaiCodeUsingCodeHashEnabledCanBeEnabled() {
internalTestSuccess(
dataStorageConfiguration ->
assertThat(
dataStorageConfiguration.getUnstable().getBonsaiCodeStoredByCodeHashEnabled())
.isEqualTo(true),
"--Xbonsai-code-using-code-hash-enabled",
"true");
}
@Test
public void bonsaiCodeUsingCodeHashEnabledCanBeDisabled() {
internalTestSuccess(
dataStorageConfiguration ->
assertThat(
dataStorageConfiguration.getUnstable().getBonsaiCodeStoredByCodeHashEnabled())
.isEqualTo(false),
"--Xbonsai-code-using-code-hash-enabled",
"false");
}
@Test
public void receiptCompactionCanBeEnabled() {
internalTestSuccess(
dataStorageConfiguration ->
assertThat(dataStorageConfiguration.getReceiptCompactionEnabled()).isEqualTo(true),
"--receipt-compaction-enabled",
"true");
}
@Test
public void receiptCompactionCanBeDisabled() {
internalTestSuccess(
dataStorageConfiguration ->
assertThat(dataStorageConfiguration.getReceiptCompactionEnabled()).isEqualTo(false),
"--receipt-compaction-enabled",
"false");
}
@Override @Override
protected DataStorageConfiguration createDefaultDomainObject() { protected DataStorageConfiguration createDefaultDomainObject() {
return DataStorageConfiguration.DEFAULT_CONFIG; return DataStorageConfiguration.DEFAULT_CONFIG;

@ -25,17 +25,14 @@ import static org.hyperledger.besu.ethereum.core.VariablesStorageHelper.populate
import static org.hyperledger.besu.ethereum.core.VariablesStorageHelper.populateVariablesStorage; import static org.hyperledger.besu.ethereum.core.VariablesStorageHelper.populateVariablesStorage;
import static org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.BLOCKCHAIN; import static org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.BLOCKCHAIN;
import static org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.VARIABLES; import static org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.VARIABLES;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import org.hyperledger.besu.cli.CommandTestAbstract; import org.hyperledger.besu.cli.CommandTestAbstract;
import org.hyperledger.besu.ethereum.storage.keyvalue.VariablesKeyValueStorage;
import org.hyperledger.besu.services.kvstore.InMemoryKeyValueStorage; import org.hyperledger.besu.services.kvstore.InMemoryKeyValueStorage;
import org.hyperledger.besu.services.kvstore.SegmentedInMemoryKeyValueStorage; import org.hyperledger.besu.services.kvstore.SegmentedInMemoryKeyValueStorage;
import org.hyperledger.besu.services.kvstore.SegmentedKeyValueStorageAdapter; import org.hyperledger.besu.services.kvstore.SegmentedKeyValueStorageAdapter;
import java.util.List;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoExtension;
@ -67,10 +64,10 @@ public class StorageSubCommandTest extends CommandTestAbstract {
final var kvVariables = new SegmentedKeyValueStorageAdapter(VARIABLES, kvVariablesSeg); final var kvVariables = new SegmentedKeyValueStorageAdapter(VARIABLES, kvVariablesSeg);
final var kvBlockchainSeg = new SegmentedInMemoryKeyValueStorage(); final var kvBlockchainSeg = new SegmentedInMemoryKeyValueStorage();
final var kvBlockchain = new SegmentedKeyValueStorageAdapter(BLOCKCHAIN, kvBlockchainSeg); final var kvBlockchain = new SegmentedKeyValueStorageAdapter(BLOCKCHAIN, kvBlockchainSeg);
when(rocksDBStorageFactory.create(eq(List.of(VARIABLES)), any(), any())) when(storageProvider.createVariablesStorage())
.thenReturn(kvVariablesSeg); .thenReturn(new VariablesKeyValueStorage(kvVariables));
when(rocksDBStorageFactory.create(eq(List.of(BLOCKCHAIN)), any(), any())) when(storageProvider.getStorageBySegmentIdentifier(BLOCKCHAIN)).thenReturn(kvBlockchain);
.thenReturn(kvBlockchainSeg);
final var variableValues = getSampleVariableValues(); final var variableValues = getSampleVariableValues();
assertNoVariablesInStorage(kvBlockchain); assertNoVariablesInStorage(kvBlockchain);
populateVariablesStorage(kvVariables, variableValues); populateVariablesStorage(kvVariables, variableValues);
@ -87,10 +84,9 @@ public class StorageSubCommandTest extends CommandTestAbstract {
final var kvVariables = new SegmentedKeyValueStorageAdapter(VARIABLES, kvVariablesSeg); final var kvVariables = new SegmentedKeyValueStorageAdapter(VARIABLES, kvVariablesSeg);
final var kvBlockchainSeg = new SegmentedInMemoryKeyValueStorage(); final var kvBlockchainSeg = new SegmentedInMemoryKeyValueStorage();
final var kvBlockchain = new SegmentedKeyValueStorageAdapter(BLOCKCHAIN, kvBlockchainSeg); final var kvBlockchain = new SegmentedKeyValueStorageAdapter(BLOCKCHAIN, kvBlockchainSeg);
when(rocksDBStorageFactory.create(eq(List.of(VARIABLES)), any(), any())) when(storageProvider.createVariablesStorage())
.thenReturn(kvVariablesSeg); .thenReturn(new VariablesKeyValueStorage(kvVariables));
when(rocksDBStorageFactory.create(eq(List.of(BLOCKCHAIN)), any(), any())) when(storageProvider.getStorageBySegmentIdentifier(BLOCKCHAIN)).thenReturn(kvBlockchain);
.thenReturn(kvBlockchainSeg);
final var variableValues = getSampleVariableValues(); final var variableValues = getSampleVariableValues();
variableValues.remove(FINALIZED_BLOCK_HASH); variableValues.remove(FINALIZED_BLOCK_HASH);
@ -108,8 +104,9 @@ public class StorageSubCommandTest extends CommandTestAbstract {
public void doesNothingWhenVariablesAlreadyReverted() { public void doesNothingWhenVariablesAlreadyReverted() {
final var kvVariables = new InMemoryKeyValueStorage(); final var kvVariables = new InMemoryKeyValueStorage();
final var kvBlockchain = new InMemoryKeyValueStorage(); final var kvBlockchain = new InMemoryKeyValueStorage();
when(rocksDBStorageFactory.create(eq(VARIABLES), any(), any())).thenReturn(kvVariables); when(storageProvider.createVariablesStorage())
when(rocksDBStorageFactory.create(eq(BLOCKCHAIN), any(), any())).thenReturn(kvBlockchain); .thenReturn(new VariablesKeyValueStorage(kvVariables));
when(storageProvider.getStorageBySegmentIdentifier(BLOCKCHAIN)).thenReturn(kvBlockchain);
final var variableValues = getSampleVariableValues(); final var variableValues = getSampleVariableValues();
assertNoVariablesInStorage(kvVariables); assertNoVariablesInStorage(kvVariables);

@ -19,7 +19,7 @@ import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.Unstable.DEFAULT_BONSAI_TRIE_LOG_PRUNING_WINDOW_SIZE; import static org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration.Unstable.DEFAULT_BONSAI_TRIE_LOG_PRUNING_WINDOW_SIZE;
import static org.hyperledger.besu.ethereum.worldstate.DataStorageFormat.BONSAI; import static org.hyperledger.besu.plugin.services.storage.DataStorageFormat.BONSAI;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.spy; import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ -87,7 +87,9 @@ class TrieLogHelperTest {
inMemoryWorldState = inMemoryWorldState =
new BonsaiWorldStateKeyValueStorage( new BonsaiWorldStateKeyValueStorage(
storageProvider, new NoOpMetricsSystem(), DataStorageConfiguration.DEFAULT_CONFIG); storageProvider,
new NoOpMetricsSystem(),
DataStorageConfiguration.DEFAULT_BONSAI_CONFIG);
createTrieLog(blockHeader1); createTrieLog(blockHeader1);
@ -291,8 +293,8 @@ class TrieLogHelperTest {
nonValidatingTrieLogHelper.prune( nonValidatingTrieLogHelper.prune(
dataStorageConfiguration, inMemoryWorldStateSpy, blockchain, dataDir)) dataStorageConfiguration, inMemoryWorldStateSpy, blockchain, dataDir))
.isInstanceOf(RuntimeException.class) .isInstanceOf(RuntimeException.class)
.hasMessage( .hasMessageContaining(
"Remaining trie logs (0) did not match --bonsai-historical-block-limit (3). Trie logs backup files have not been deleted, it is safe to rerun the subcommand."); "Remaining trie logs (0) did not match --bonsai-historical-block-limit (3)");
} }
@Test @Test

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save