Merge branch 'main' into feature/worldstate-refactor

pull/6209/head
Karim TAAM 10 months ago committed by GitHub
commit 1ca33624f4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 114
      .github/workflows/acceptance-tests.yml
  2. 76
      .github/workflows/artifacts.yml
  3. 36
      .github/workflows/checks.yml
  4. 41
      .github/workflows/codeql.yml
  5. 10
      .github/workflows/dco-merge-group.yml
  6. 20
      .github/workflows/dco.yml
  7. 113
      .github/workflows/docker.yml
  8. 11
      .github/workflows/gradle-wrapper-validation.yml
  9. 73
      .github/workflows/integration-tests.yml
  10. 121
      .github/workflows/nightly.yml
  11. 49
      .github/workflows/parallel-unit-tests.yml
  12. 8
      .github/workflows/pr-checklist-on-open.yml
  13. 103
      .github/workflows/pre-review.yml
  14. 147
      .github/workflows/reference-tests.yml
  15. 11
      .github/workflows/release.yml
  16. 24
      .github/workflows/repolinter.yml
  17. 12
      .github/workflows/sonarcloud.yml
  18. 14
      CHANGELOG.md
  19. 5
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/condition/priv/PrivConditions.java
  20. 40
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/condition/priv/PrivateSyncingStatusCondition.java
  21. 40
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/transaction/privacy/PrivSyncingTransactions.java
  22. 4
      acceptance-tests/dsl/src/main/java/org/hyperledger/besu/tests/acceptance/dsl/transaction/privacy/PrivacyTransactions.java
  23. 3
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/BftPrivacyClusterAcceptanceTest.java
  24. 1
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/DeployPrivateSmartContractAcceptanceTest.java
  25. 3
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/EnclaveErrorAcceptanceTest.java
  26. 4
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/FlexiblePrivacyAcceptanceTest.java
  27. 2
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PluginPrivacySigningAcceptanceTest.java
  28. 1
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PrivCallAcceptanceTest.java
  29. 3
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PrivDebugGetStateRootFlexibleGroupAcceptanceTest.java
  30. 3
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PrivDebugGetStateRootOffchainGroupAcceptanceTest.java
  31. 1
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PrivGetCodeAcceptanceTest.java
  32. 1
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PrivGetLogsAcceptanceTest.java
  33. 3
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PrivGetPrivateTransactionAcceptanceTest.java
  34. 4
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PrivacyClusterAcceptanceTest.java
  35. 4
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PrivacyGroupAcceptanceTest.java
  36. 2
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PrivacyReceiptAcceptanceTest.java
  37. 3
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PrivateContractPublicStateAcceptanceTest.java
  38. 1
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PrivateGenesisAcceptanceTest.java
  39. 1
      acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/privacy/PrivateLogFilterAcceptanceTest.java
  40. 10
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/01_cancun_prepare_payload.json
  41. 8
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/02_cancun_getPayloadV3.json
  42. 6
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/03_cancun_newPayloadV3.json
  43. 8
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/04_cancun_forkchoiceUpdatedV3.json
  44. 8
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/05_eip6110_forkchoiceUpdatedV3.json
  45. 10
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/06_eip6110_getPayloadV6110.json
  46. 6
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/09_eip6110_newPayloadV6110.json
  47. 8
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/10_eip6110_forkchoiceUpdatedV3.json
  48. 6
      acceptance-tests/tests/src/test/resources/jsonrpc/engine/eip6110/test-cases/11_eip6110_getPayloadV6110.json
  49. 2
      besu/src/main/java/org/hyperledger/besu/RunnerBuilder.java
  50. 259
      besu/src/main/java/org/hyperledger/besu/cli/BesuCommand.java
  51. 112
      besu/src/main/java/org/hyperledger/besu/cli/options/stable/GraphQlOptions.java
  52. 206
      besu/src/main/java/org/hyperledger/besu/cli/options/stable/PermissionsOptions.java
  53. 2
      besu/src/main/java/org/hyperledger/besu/cli/options/unstable/EvmOptions.java
  54. 1
      besu/src/main/java/org/hyperledger/besu/cli/options/unstable/MetricsCLIOptions.java
  55. 4
      besu/src/main/java/org/hyperledger/besu/cli/options/unstable/NetworkingOptions.java
  56. 477
      besu/src/test/java/org/hyperledger/besu/cli/BesuCommandTest.java
  57. 93
      besu/src/test/java/org/hyperledger/besu/cli/options/GraphQlOptionsTest.java
  58. 453
      besu/src/test/java/org/hyperledger/besu/cli/options/PermissionsOptionsTest.java
  59. 15
      build.gradle
  60. 7
      config/src/main/java/org/hyperledger/besu/config/GenesisConfigOptions.java
  61. 22
      config/src/main/java/org/hyperledger/besu/config/JsonGenesisConfigOptions.java
  62. 18
      config/src/main/java/org/hyperledger/besu/config/StubGenesisConfigOptions.java
  63. 88
      config/src/test/java/org/hyperledger/besu/config/GenesisConfigFileTest.java
  64. 83
      config/src/test/java/org/hyperledger/besu/config/GenesisConfigOptionsTest.java
  65. 1
      config/src/test/resources/all_forks.json
  66. 9
      consensus/qbft/src/main/java/org/hyperledger/besu/consensus/qbft/jsonrpc/methods/QbftGetValidatorsByBlockNumber.java
  67. 30
      consensus/qbft/src/test/java/org/hyperledger/besu/consensus/qbft/jsonrpc/methods/QbftGetValidatorsByBlockNumberTest.java
  68. 26
      ethereum/api/build.gradle
  69. 20
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/chain/GenesisState.java
  70. 8
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/mainnet/MainnetPrecompiledContractRegistries.java
  71. 10
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/mainnet/MainnetProtocolSpecFactory.java
  72. 44
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/mainnet/MainnetProtocolSpecs.java
  73. 2
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/mainnet/ProtocolScheduleBuilder.java
  74. 24
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/storage/keyvalue/KeyValueSegmentIdentifier.java
  75. 27
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/trie/bonsai/worldview/BonsaiWorldStateUpdateAccumulator.java
  76. 39
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/util/LogUtil.java
  77. 79
      ethereum/core/src/test/java/org/hyperledger/besu/ethereum/chain/GenesisStateTest.java
  78. 27
      ethereum/core/src/test/java/org/hyperledger/besu/ethereum/mainnet/ProtocolScheduleBuilderTest.java
  79. 4077
      ethereum/core/src/test/resources/org/hyperledger/besu/ethereum/chain/genesis_prague.json
  80. 15
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/EthPeer.java
  81. 12
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/EthPeers.java
  82. 15
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/EthProtocolManager.java
  83. 14
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/task/AbstractGetHeadersFromPeerTask.java
  84. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/task/AbstractPeerRequestTask.java
  85. 11
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/task/AbstractRetryingSwitchingPeerTask.java
  86. 4
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/task/BufferedGetPooledTransactionsFromPeerFetcher.java
  87. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/task/GetHeadersFromPeerByHashTask.java
  88. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/task/GetHeadersFromPeerByNumberTask.java
  89. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/task/GetNodeDataFromPeerTask.java
  90. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/task/GetPooledTransactionsFromPeerTask.java
  91. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/task/GetReceiptsFromPeerTask.java
  92. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/task/RetryingGetBlocksFromPeersTask.java
  93. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/manager/task/RetryingGetHeadersEndingAtFromPeerByHashTask.java
  94. 6
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/sync/ChainHeadTracker.java
  95. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/sync/TrailingPeerLimiter.java
  96. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/sync/backwardsync/BackwardSyncContext.java
  97. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/sync/fastsync/PivotSelectorFromPeers.java
  98. 6
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/sync/fastsync/SyncTargetManager.java
  99. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/sync/tasks/RetryingGetHeaderFromPeerByHashTask.java
  100. 2
      ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/transactions/NewPooledTransactionHashesMessageProcessor.java
  101. Some files were not shown because too many files have changed in this diff Show More

@ -0,0 +1,114 @@
name: acceptance-tests
on:
pull_request:
pull_request_review:
types: [submitted]
env:
GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false"
total-runners: 16
jobs:
shouldRun:
name: checks to ensure we should run
# necessary because there is no single PR approved event, need to check all comments/approvals/denials
runs-on: ubuntu-22.04
outputs:
shouldRun: ${{steps.shouldRun.outputs.result}}
steps:
- name: required check
id: shouldRun
uses: actions/github-script@v7.0.1
env:
# fun fact, this changes based on incoming event, it will be different when we run this on pushes to main
RELEVANT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
with:
script: |
const { RELEVANT_SHA } = process.env;
const { data: { statuses } } = await github.rest.repos.getCombinedStatusForRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: RELEVANT_SHA,
});
const acceptanceTested = statuses && statuses.filter(({ context }) => context === 'acceptance-tests');
const alreadyRun = acceptanceTested && acceptanceTested.find(({ state }) => state === 'success') > 0;
const { data: reviews } = await github.rest.pulls.listReviews({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.issue.number,
});
const approvingReviews = reviews && reviews.filter(review => review.state === 'APPROVED');
const shouldRun = !alreadyRun && github.actor != 'dependabot[bot]' && (approvingReviews.length > 0);
console.log("tests should be run = %j", shouldRun);
console.log("alreadyRun = %j", alreadyRun);
console.log("approvingReviews = %j", approvingReviews.length);
return shouldRun;
acceptanceTestEthereum:
runs-on: ubuntu-22.04
name: "Acceptance Runner"
needs: shouldRun
permissions:
statuses: write
checks: write
if: ${{ needs.shouldRun.outputs.shouldRun == 'true'}}
strategy:
fail-fast: true
matrix:
runner_index: [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15]
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: get acceptance test report
uses: dawidd6/action-download-artifact@v2
with:
branch: main
name_is_regexp: true
name: 'acceptance-node-\d*\d-test-results'
path: tmp/junit-xml-reports-downloaded
if_no_artifact_found: true
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: Split tests
id: split-tests
uses: r7kamura/split-tests-by-timings@v0
with:
reports: tmp/junit-xml-reports-downloaded
glob: 'acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/**/*Test.java'
total: ${{env.total-runners}}
index: ${{ matrix.runner_index }}
- name: write out test list
run: echo "${{ steps.split-tests.outputs.paths }}" >> testList.txt
- name: format gradle args
#regex means: first truncate file paths to align with package name, then swap path delimiter with package delimiter,
#then drop file extension, then insert --tests option between each.
run: cat testList.txt | sed -e 's@acceptance-tests/tests/src/test/java/@--tests\ @g;s@/@.@g;s/\.java//g' > gradleArgs.txt
- name: run acceptance tests
run: ./gradlew acceptanceTest `cat gradleArgs.txt` -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: cleanup tempfiles
run: rm testList.txt gradleArgs.txt
- name: Upload Acceptance Test Results
uses: actions/upload-artifact@v3.1.0
with:
name: acceptance-node-${{matrix.runner_index}}-test-results
path: 'acceptance-tests/tests/build/test-results/acceptanceTest/TEST-*.xml'
- name: Publish Test Report
uses: mikepenz/action-junit-report@v4
if: (success() || failure()) # always run even if the build step fails
with:
report_paths: 'acceptance-tests/tests/build/test-results/acceptanceTest/TEST-*.xml'
acceptance-tests:
runs-on: ubuntu-22.04
needs: [ acceptanceTestEthereum ]
permissions:
checks: write
statuses: write
steps:
- name: consolidation
run: echo "consolidating statuses"

@ -0,0 +1,76 @@
name: artifacts
on:
release:
types:
- prereleased
jobs:
artifacts:
runs-on: ubuntu-22.04
permissions:
contents: write
steps:
- name: checkout
uses: actions/checkout@v4.1.1
- name: Set up JDK 17
uses: actions/setup-java@v4.0.0
with:
distribution: 'temurin'
java-version: '17'
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: assemble distributions
run:
./gradlew -Prelease.releaseVersion=${{github.ref_name}} assemble -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: hashes
id: hashes
run: |
cd build/distributions
echo "zipSha=$(shasum -a 256 besu*.zip)" >> $GITHUB_OUTPUT
echo "tarSha=$(shasum -a 256 besu*.tar.gz)" >> $GITHUB_OUTPUT
- name: upload tarball
uses: actions/upload-artifact@v3
with:
path: 'build/distributions/besu*.tar.gz'
name: besu-${{ github.ref_name }}.tar.gz
- name: upload zipfile
uses: actions/upload-artifact@v3
with:
path: 'build/distributions/besu*.zip'
name: besu-${{ github.ref_name }}.zip
- name: Upload Release assets
uses: softprops/action-gh-release@v1
with:
append_body: true
files: |
build/distributions/besu*.tar.gz
build/distributions/besu*.zip
body: |
${{steps.hashes.outputs.tarSha}}
${{steps.hashes.outputs.zipSha}}
testWindows:
runs-on: windows-2022
needs: artifacts
timeout-minutes: 10
if: ${{ github.actor != 'dependabot[bot]' }}
steps:
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: adopt
java-version: 17
- name: Download zip
uses: actions/download-artifact@v3
with:
name: besu-${{ github.ref_name }}.zip
- name: test Besu
run: |
unzip besu-*.zip -d besu-tmp
cd besu-tmp
mv besu-* ../besu
cd ..
besu\bin\besu.bat --help
besu\bin\besu.bat --version

@ -1,36 +0,0 @@
name: checks
on:
push:
branches: [ main ]
pull_request:
workflow_dispatch:
jobs:
spotless:
runs-on: [besu-research-ubuntu-16]
if: ${{ github.actor != 'dependabot[bot]' }}
steps:
- name: Checkout Repo
uses: actions/checkout@v4
- name: Set up Java
uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: 17
cache: gradle
- name: spotless
run: ./gradlew --no-daemon --parallel clean spotlessCheck
javadoc_17:
runs-on: [besu-research-ubuntu-8]
if: ${{ github.actor != 'dependabot[bot]' }}
steps:
- name: Checkout Repo
uses: actions/checkout@v4
- name: Set up Java 17
uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: 17
cache: gradle
- name: javadoc (JDK 17)
run: ./gradlew --no-daemon clean javadoc

@ -14,40 +14,29 @@ name: "CodeQL"
on: on:
push: push:
branches: [ main ] branches: [ main ]
pull_request: pull_request:
branches: [ main ] branches: [ main ]
paths-ignore: paths-ignore:
- '**/*.json' - '**/*.json'
- '**/*.md' - '**/*.md'
- '**/*.properties' - '**/*.properties'
- '**/*.txt' - '**/*.txt'
jobs: jobs:
analyze: analyze:
name: Analyze name: Analyze
runs-on: [besu-research-ubuntu-16] runs-on: ubuntu-22.04
permissions: permissions:
actions: read actions: read
contents: read contents: read
security-events: write security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'java' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4.1.1
- name: Set up Java - name: Set up Java
uses: actions/setup-java@v4 uses: actions/setup-java@v4.0.0
with: with:
distribution: 'temurin' distribution: 'temurin'
java-version: 17 java-version: 17
cache: gradle
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@v2 uses: github/codeql-action/init@v2
@ -58,10 +47,10 @@ jobs:
# Prefix the list here with "+" to use these queries and those in the config file. # Prefix the list here with "+" to use these queries and those in the config file.
queries: security-and-quality,security-extended queries: security-and-quality,security-extended
# Autobuild failed (OOM) - name: setup gradle
# Hence, supply memory args for gradle build uses: gradle/gradle-build-action@v2.12.0
- run: | - name: compileJava noscan
JAVA_OPTS="-Xmx1000M" ./gradlew --no-scan compileJava run: |
JAVA_OPTS="-Xmx2048M" ./gradlew --no-scan compileJava
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2 uses: github/codeql-action/analyze@v2

@ -1,10 +0,0 @@
name: dco
on:
merge_group:
jobs:
dco:
runs-on: [besu-research-ubuntu-8]
if: ${{ github.actor != 'dependabot[bot]' }}
steps:
- run: echo "This DCO job runs on merge_queue event and doesn't check PR contents"

@ -1,20 +0,0 @@
name: dco
on:
pull_request:
workflow_dispatch:
jobs:
dco:
runs-on: [besu-research-ubuntu-8]
if: ${{ github.actor != 'dependabot[bot]' }}
steps:
- run: echo "This DCO job runs on pull_request event and workflow_dispatch"
- name: Get PR Commits
id: 'get-pr-commits'
uses: tim-actions/get-pr-commits@v1.2.0
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: DCO Check
uses: tim-actions/dco@v1.1.0
with:
commits: ${{ steps.get-pr-commits.outputs.commits }}

@ -0,0 +1,113 @@
name: docker
on:
release:
types:
- prereleased
env:
registry: ghcr.io
jobs:
hadolint:
runs-on: ubuntu-22.04
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: hadoLint_openj9-jdk_17
run: docker run --rm -i hadolint/hadolint < docker/openj9-jdk-17/Dockerfile
- name: hadoLint_openjdk_17
run: docker run --rm -i hadolint/hadolint < docker/openjdk-17/Dockerfile
- name: hadoLint_openjdk_17_debug
run: docker run --rm -i hadolint/hadolint < docker/openjdk-17-debug/Dockerfile
- name: hadoLint_openjdk_latest
run: docker run --rm -i hadolint/hadolint < docker/openjdk-latest/Dockerfile
- name: hadoLint_graalvm
run: docker run --rm -i hadolint/hadolint < docker/graalvm/Dockerfile
buildDocker:
needs: hadolint
permissions:
contents: read
packages: write
strategy:
fail-fast: false
matrix:
platform:
- ubuntu-22.04
- [self-hosted, ARM64]
runs-on: ${{ matrix.platform }}
steps:
- name: Prepare
id: prep
run: |
platform=${{ matrix.platform }}
if [ "$platform" = 'ubuntu-22.04' ]; then
echo "PLATFORM_PAIR=linux-amd64" >> $GITHUB_OUTPUT
echo "ARCH=amd64" >> $GITHUB_OUTPUT
else
echo "PLATFORM_PAIR=linux-arm64" >> $GITHUB_OUTPUT
echo "ARCH=arm64" >> $GITHUB_OUTPUT
fi
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: short sha
id: shortSha
run: echo "sha=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: install goss
run: |
mkdir -p docker/reports
curl -L https://github.com/aelsabbahy/goss/releases/download/v0.4.4/goss-${{ steps.prep.outputs.PLATFORM_PAIR }} -o ./docker/tests/goss-${{ steps.prep.outputs.PLATFORM_PAIR }}
- name: build and test docker
uses: gradle/gradle-build-action@v2.12.0
env:
architecture: ${{ steps.prep.outputs.ARCH }}
with:
arguments: testDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Prelease.releaseVersion=${{ github.ref_name }}
- name: login to ghcr
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.registry }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: publish
env:
architecture: ${{ steps.prep.outputs.ARCH }}
run: ./gradlew --no-daemon dockerUpload -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Prelease.releaseVersion=${{ github.ref_name }}
multiArch:
needs: buildDocker
runs-on: ubuntu-22.04
permissions:
contents: read
packages: write
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: login to ghcr
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.registry }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: multi-arch docker
run: ./gradlew manifestDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Prelease.releaseVersion=${{ github.ref_name }}

@ -1,11 +0,0 @@
# SPDX-License-Identifier: Apache-2.0
name: "Validate Gradle Wrapper"
on: [push, pull_request]
jobs:
validation:
name: "Gradle Wrapper Validation"
runs-on: [besu-research-ubuntu-8]
steps:
- uses: actions/checkout@v4
- uses: gradle/wrapper-validation-action@v1

@ -0,0 +1,73 @@
name: integration-tests
on:
pull_request:
pull_request_review:
types:
- submitted
env:
GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false"
jobs:
shouldRun:
name: checks to ensure we should run
runs-on: ubuntu-22.04
outputs:
shouldRun: ${{steps.shouldRun.outputs.result}}
steps:
- name: required check
id: shouldRun
uses: actions/github-script@v7.0.1
env:
# fun fact, this changes based on incoming event, it will be different when we run this on pushes to main
RELEVANT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
with:
script: |
const { RELEVANT_SHA } = process.env;
const { data: { statuses } } = await github.rest.repos.getCombinedStatusForRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: RELEVANT_SHA,
});
const intTested = statuses && statuses.filter(({ context }) => context === 'integration-tests');
const alreadyRun = intTested && intTested.find(({ state }) => state === 'success') > 0;
const { data: reviews } = await github.rest.pulls.listReviews({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.issue.number,
});
const approvingReviews = reviews && reviews.filter(review => review.state === 'APPROVED');
const shouldRun = !alreadyRun && github.actor != 'dependabot[bot]' && (approvingReviews.length > 0);
console.log("tests should be run = %j", shouldRun);
console.log("alreadyRun = %j", alreadyRun);
console.log("approvingReviews = %j", approvingReviews.length);
return shouldRun;
integration-tests:
runs-on: ubuntu-22.04
needs: shouldRun
if: ${{ needs.shouldRun.outputs.shouldRun == 'true' }}
permissions:
statuses: write
checks: write
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: run integration tests
run: ./gradlew integrationTest compileJmh -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: Publish Test Report
uses: mikepenz/action-junit-report@v4
if: (success() || failure())
with:
report_paths: '**/build/test-results/integrationTest/TEST-*.xml'

@ -0,0 +1,121 @@
name: nightly
on:
workflow_dispatch:
schedule:
# * is a special character in YAML so you have to quote this string
# expression evaluates to midnight every night
- cron: '0 0 * * *'
env:
nightly-tag: develop
registry: ghcr.io
jobs:
hadolint:
runs-on: ubuntu-22.04
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: hadoLint_openj9-jdk_17
run: docker run --rm -i hadolint/hadolint < docker/openj9-jdk-17/Dockerfile
- name: hadoLint_openjdk_17
run: docker run --rm -i hadolint/hadolint < docker/openjdk-17/Dockerfile
- name: hadoLint_openjdk_17_debug
run: docker run --rm -i hadolint/hadolint < docker/openjdk-17-debug/Dockerfile
- name: hadoLint_openjdk_latest
run: docker run --rm -i hadolint/hadolint < docker/openjdk-latest/Dockerfile
- name: hadoLint_graalvm
run: docker run --rm -i hadolint/hadolint < docker/graalvm/Dockerfile
buildDocker:
needs: hadolint
permissions:
contents: read
packages: write
strategy:
fail-fast: false
matrix:
platform:
- ubuntu-22.04
- [self-hosted, ARM64]
runs-on: ${{ matrix.platform }}
steps:
- name: Prepare
id: prep
run: |
platform=${{ matrix.platform }}
if [ "$platform" = 'ubuntu-22.04' ]; then
echo "PLATFORM_PAIR=linux-amd64" >> $GITHUB_OUTPUT
echo "ARCH=amd64" >> $GITHUB_OUTPUT
else
echo "PLATFORM_PAIR=linux-arm64" >> $GITHUB_OUTPUT
echo "ARCH=arm64" >> $GITHUB_OUTPUT
fi
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: short sha
id: shortSha
run: echo "sha=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: build image
uses: gradle/gradle-build-action@v2.12.0
with:
arguments: distDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main
- name: install goss
run: |
mkdir -p docker/reports
curl -L https://github.com/aelsabbahy/goss/releases/download/v0.4.4/goss-${{ steps.prep.outputs.PLATFORM_PAIR }} -o ./docker/tests/goss-${{ steps.prep.outputs.PLATFORM_PAIR }}
- name: test docker
uses: gradle/gradle-build-action@v2.12.0
env:
architecture: ${{ steps.prep.outputs.ARCH }}
with:
arguments: testDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main
- name: login to ghcr
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.registry }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: publish
env:
architecture: ${{ steps.prep.outputs.ARCH }}
run: ./gradlew --no-daemon dockerUpload -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main
multiArch:
permissions:
contents: read
packages: write
needs: buildDocker
runs-on: ubuntu-22.04
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: Login to DockerHub
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.registry }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: multi-arch docker
run: ./gradlew manifestDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main

@ -0,0 +1,49 @@
name: parallel-unit-tests
#experimental work in progress - trying to figure out how to split tests across multi-modules by runtime
on:
workflow_dispatch:
env:
GRADLE_OPTS: "-Dorg.gradle.daemon=false"
total-runners: 4
jobs:
junit:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
runner_index:
- 0
- 1
- 2
- 3
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Split tests
id: split-tests
uses: chaosaffe/split-tests@v1-alpha.1
with:
glob: '**/src/test/java/**/*.java'
split-total: ${{ env.total-runners }}
split-index: ${{ matrix.runner_index }}
line-count: true
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: adopt
java-version: 17
cache: gradle
- name: write out test list
run: echo "${{ steps.split-tests.outputs.test-suite }}" >> testList.txt
- name: debug testfile paths
run: cat testList.txt
- name: format gradle args
# regex means: truncate file paths to align with package name, replacing with tests switch, then drop file extension,
# then swap path delimiter with package delimiter
run: cat testList.txt | sed -e 's/[^ ]*src\/test\/java\//--tests\ /g' -e 's/\.java//g' -e 's/\//\./g' >> gradleArgs.txt
- name: debug test class list
run: cat gradleArgs.txt
- name: run unit tests
run: ./gradlew test `cat gradleArgs.txt`

@ -6,9 +6,11 @@ on:
jobs: jobs:
checklist: checklist:
name: "add checklist as a comment on newly opened PRs" name: "add checklist as a comment on newly opened PRs"
runs-on: [besu-research-ubuntu-8] runs-on: ubuntu-22.04
permissions:
pull-requests: write
steps: steps:
- uses: actions/github-script@v5 - uses: actions/github-script@v7.0.1
with: with:
github-token: ${{secrets.GITHUB_TOKEN}} github-token: ${{secrets.GITHUB_TOKEN}}
script: | script: |
@ -16,5 +18,5 @@ jobs:
issue_number: context.issue.number, issue_number: context.issue.number,
owner: context.repo.owner, owner: context.repo.owner,
repo: context.repo.repo, repo: context.repo.repo,
body: '- [ ] I thought about documentation and added the `doc-change-required` label to this PR if [updates are required](https://wiki.hyperledger.org/display/BESU/Documentation).\n- [ ] I thought about the changelog and included a [changelog update if required](https://wiki.hyperledger.org/display/BESU/Changelog).\n- [ ] If my PR includes database changes (e.g. KeyValueSegmentIdentifier) I have thought about compatibility and performed forwards and backwards compatibility tests' body: '- [ ] I thought about documentation and added the `doc-change-required` label to this PR if [updates are required](https://wiki.hyperledger.org/display/BESU/Documentation).\n- [ ] I thought about the changelog and included a [changelog update if required](https://wiki.hyperledger.org/display/BESU/Changelog).\n- [ ] If my PR includes database changes (e.g. KeyValueSegmentIdentifier) I have thought about compatibility and performed forwards and backwards compatibility tests\n- [ ] I thought about running CI.\n- [ ] If I did not run CI, I ran as much locally as possible before pushing.\n-'
}) })

@ -0,0 +1,103 @@
name: pre-review
on:
pull_request:
workflow_dispatch:
permissions:
statuses: write
checks: write
jobs:
repolint:
name: "Repository Linting"
runs-on: ubuntu-22.04
container: ghcr.io/todogroup/repolinter:v0.11.2
steps:
- name: Checkout Code
uses: actions/checkout@v4.1.1
- name: Lint Repo
run: bundle exec /app/bin/repolinter.js --rulesetUrl https://raw.githubusercontent.com/hyperledger-labs/hyperledger-community-management-tools/main/repo_structure/repolint.json --format markdown
gradle-wrapper:
name: "Gradle Wrapper Validation"
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4.1.1
- uses: gradle/wrapper-validation-action@v1.1.0
spotless:
runs-on: ubuntu-22.04
if: ${{ github.actor != 'dependabot[bot]' }}
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: Setup Gradle
uses: gradle/gradle-build-action@v2.12.0
- name: run spotless
run: ./gradlew spotlessCheck -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
compile:
runs-on: ubuntu-22.04
timeout-minutes: 30
needs: [spotless, gradle-wrapper, repolint]
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: Setup Gradle
uses: gradle/gradle-build-action@v2.12.0
- name: Gradle Compile
run: ./gradlew build -x test -x spotlessCheck -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
unitTests:
env:
GRADLEW_UNIT_TEST_ARGS: ${{matrix.gradle_args}}
runs-on: ubuntu-22.04
needs: [ compile ]
permissions:
checks: write
statuses: write
strategy:
fail-fast: true
matrix:
gradle_args:
- "test -x besu:test -x consensus:test -x crypto:test -x ethereum:eth:test -x ethereum:api:test -x ethereum:core:test"
- "besu:test consensus:test crypto:test"
- "ethereum:api:testBonsai"
- "ethereum:api:testForest"
- "ethereum:api:testRemainder"
- "ethereum:core:test"
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: Setup Gradle
uses: gradle/gradle-build-action@v2.12.0
- name: run unit tests
id: unitTest
run: ./gradlew $GRADLEW_UNIT_TEST_ARGS -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: Publish Test Report
uses: mikepenz/action-junit-report@v4
if: success() || failure() # always run even if the build step fails
with:
report_paths: '**/test-results/**/TEST-*.xml'
annotate_only: true
pre-review:
runs-on: ubuntu-22.04
needs: [unitTests]
permissions:
checks: write
statuses: write
steps:
- name: consolidation
run: echo "consolidating statuses"

@ -0,0 +1,147 @@
name: reference-tests
on:
pull_request:
pull_request_review:
types:
- submitted
env:
GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false"
total-runners: 6
jobs:
shouldRun:
name: checks to ensure we should run
# necessary because there is no single PR approved event, need to check all comments/approvals/denials
# might also be a job running, and additional approvals
runs-on: ubuntu-22.04
outputs:
shouldRun: ${{steps.shouldRun.outputs.result}}
steps:
- name: required check
id: shouldRun
uses: actions/github-script@v7.0.1
env:
# fun fact, this changes based on incoming event, it will be different when we run this on pushes to main
RELEVANT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
with:
script: |
const { RELEVANT_SHA } = process.env;
const { data: { statuses } } = await github.rest.repos.getCombinedStatusForRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: RELEVANT_SHA,
});
const refTested = statuses && statuses.filter(({ context }) => context === 'reference-tests');
const alreadyRun = refTested && refTested.find(({ state }) => state === 'success') > 0;
const { data: reviews } = await github.rest.pulls.listReviews({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.issue.number,
});
const approvingReviews = reviews && reviews.filter(review => review.state === 'APPROVED');
const shouldRun = !alreadyRun && github.actor != 'dependabot[bot]' && (approvingReviews.length > 0);
console.log("tests should be run = %j", shouldRun);
console.log("alreadyRun = %j", alreadyRun);
console.log("approvingReviews = %j", approvingReviews.length);
return shouldRun;
prepareReferenceTestEthereum:
runs-on: ubuntu-22.04
needs: shouldRun
if: ${{ needs.shouldRun.outputs.shouldRun == 'true' }}
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
with:
submodules: recursive
set-safe-directory: true
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: execute generate reference tests
run: ./gradlew ethereum:referencetests:blockchainReferenceTests ethereum:referencetests:generalstateReferenceTests ethereum:referencetests:generalstateRegressionReferenceTests -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: store generated tests
uses: actions/upload-artifact@v3
with:
name: 'reference-tests'
path: 'ethereum/referencetests/build/generated/sources/reference-test/**/*.java'
referenceTestEthereum:
runs-on: ubuntu-22.04
permissions:
statuses: write
checks: write
needs:
- prepareReferenceTestEthereum
if: ${{ needs.shouldRun.outputs.shouldRun == 'true' }}
strategy:
fail-fast: true
matrix:
runner_index: [0,1,2,3,4,5]
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
with:
submodules: recursive
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: adopt-openj9
java-version: 17
- name: retrieve generated tests
uses: actions/download-artifact@v3.0.2
with:
name: 'reference-tests'
path: 'ethereum/referencetests/build/generated/sources/reference-test/'
- name: get reference test report
uses: dawidd6/action-download-artifact@v2
with:
branch: main
name_is_regexp: true
name: 'reference-test-node-\d*\d-results'
path: tmp/ref-xml-reports-downloaded
if_no_artifact_found: true
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: Split tests
id: split-tests
uses: r7kamura/split-tests-by-timings@v0
with:
reports: tmp/ref-xml-reports-downloaded
glob: 'ethereum/referencetests/build/generated/sources/reference-test/**/*.java'
total: ${{env.total-runners}}
index: ${{ matrix.runner_index }}
- name: compose gradle args
run: echo ${{ steps.split-tests.outputs.paths }} | sed -e 's/^.*java\///' -e 's@/@.@g' -e 's/\.java//' -e 's/^/--tests /' > refTestArgs.txt
- name: run reference tests
run: ./gradlew ethereum:referenceTests:referenceTests `cat refTestArgs.txt` -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: Upload Test Report
uses: actions/upload-artifact@v3
if: always() # always run even if the previous step fails
with:
name: reference-test-node-${{matrix.runner_index}}-results
path: '**/build/test-results/referenceTests/TEST-*.xml'
- name: Publish Test Report
uses: mikepenz/action-junit-report@v4
if: success() || failure() # always run even if the build step fails
with:
report_paths: '**/build/test-results/referenceTest/TEST-*.xml'
reference-tests:
runs-on: ubuntu-22.04
needs: [ referenceTestEthereum ]
permissions:
checks: write
statuses: write
steps:
- name: consolidation
run: echo "consolidating statuses"

@ -1,13 +1,14 @@
name: release besu name: release besu
on: on:
workflow_dispatch:
release: release:
types: released types: [released]
jobs: jobs:
dockerPromoteX64: dockerPromoteX64:
runs-on: [besu-research-ubuntu-16] runs-on: ubuntu-22.04
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4.1.1
- uses: actions/setup-java@v4 - uses: actions/setup-java@v4.0.0
with: with:
distribution: 'temurin' # See 'Supported distributions' for available options distribution: 'temurin' # See 'Supported distributions' for available options
java-version: '17' java-version: '17'
@ -15,7 +16,7 @@ jobs:
- name: Login to DockerHub - name: Login to DockerHub
run: echo '${{ secrets.DOCKER_PASSWORD_RW }}' | docker login -u '${{ secrets.DOCKER_USER_RW }}' --password-stdin run: echo '${{ secrets.DOCKER_PASSWORD_RW }}' | docker login -u '${{ secrets.DOCKER_USER_RW }}' --password-stdin
- name: Setup Gradle - name: Setup Gradle
uses: gradle/gradle-build-action@v2 uses: gradle/gradle-build-action@v2.12.0
- name: Docker upload - name: Docker upload
run: ./gradlew "-Prelease.releaseVersion=${{ github.ref_name }}" "-PdockerOrgName=${{ secrets.DOCKER_ORG }}" dockerUploadRelease run: ./gradlew "-Prelease.releaseVersion=${{ github.ref_name }}" "-PdockerOrgName=${{ secrets.DOCKER_ORG }}" dockerUploadRelease
- name: Docker manifest - name: Docker manifest

@ -1,24 +0,0 @@
# SPDX-License-Identifier: Apache-2.0
# Hyperledger Repolinter Action
name: Repolinter
on:
workflow_dispatch:
push:
branches:
- master
- main
pull_request:
branches:
- master
- main
jobs:
build:
runs-on: [besu-research-ubuntu-16]
container: ghcr.io/todogroup/repolinter:v0.10.1
steps:
- name: Checkout Code
uses: actions/checkout@v4
- name: Lint Repo
run: bundle exec /app/bin/repolinter.js --rulesetUrl https://raw.githubusercontent.com/hyperledger-labs/hyperledger-community-management-tools/main/repo_structure/repolint.json --format markdown

@ -14,24 +14,26 @@ permissions:
jobs: jobs:
Analysis: Analysis:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.repository == 'hyperledger/besu'
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v4 uses: actions/checkout@v4.1.1
- name: Set up JDK 17 - name: Set up JDK 17
uses: actions/setup-java@v4 uses: actions/setup-java@v4.0.0
with: with:
distribution: 'temurin' distribution: 'temurin'
java-version: '17' java-version: '17'
cache: gradle
- name: Cache SonarCloud packages - name: Cache SonarCloud packages
uses: actions/cache@v3 uses: actions/cache@v3
with: with:
path: ~/.sonar/cache path: ~/.sonar/cache
key: ${{ runner.os }}-sonar key: ${{ runner.os }}-sonar
restore-keys: ${{ runner.os }}-sonar restore-keys: ${{ runner.os }}-sonar
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: Build and analyze - name: Build and analyze
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
run: ./gradlew build sonarqube --continue --info SONAR_ORGANIZATION: ${{ env.SONAR_ORGANIZATION }}
SONAR_PROJECT_KEY: $ {{ env.SONAR_PROJECT_KEY }}
run: ./gradlew build sonarqube --continue --info -Dorg.gradle.parallel=true -Dorg.gradle.caching=true

@ -3,18 +3,26 @@
## 24.1.2-SNAPSHOT ## 24.1.2-SNAPSHOT
### Breaking Changes ### Breaking Changes
- Following the OpenMetrics convention, the updated Prometheus client adds the `_total` suffix to every metrics of type counter, with the effect that some existing metrics have been renamed to have this suffix. If you are using the official Besu Grafana dashboard [(available here)](https://grafana.com/grafana/dashboards/16455-besu-full/), just update it to the latest revision, that accepts the old and the new name of the affected metrics. If you have a custom dashboards or use the metrics in other ways, then you need to manually update it to support the new naming.
- The `trace-filter` method in JSON-RPC API now has a default block range limit of 1000, adjustable with `--rpc-max-trace-filter-range` (thanks @alyokaz) [#6446](https://github.com/hyperledger/besu/pull/6446) - The `trace-filter` method in JSON-RPC API now has a default block range limit of 1000, adjustable with `--rpc-max-trace-filter-range` (thanks @alyokaz) [#6446](https://github.com/hyperledger/besu/pull/6446)
- Requesting the Ethereum Node Record (ENR) to acquire the fork id from bonded peers is now enabled by default, so the following change has been made [#5628](https://github.com/hyperledger/besu/pull/5628): - Requesting the Ethereum Node Record (ENR) to acquire the fork id from bonded peers is now enabled by default, so the following change has been made [#5628](https://github.com/hyperledger/besu/pull/5628):
- `--Xfilter-on-enr-fork-id` has been removed. To disable the feature use `--filter-on-enr-fork-id=false`. - `--Xfilter-on-enr-fork-id` has been removed. To disable the feature use `--filter-on-enr-fork-id=false`.
- `--engine-jwt-enabled` has been removed. Use `--engine-jwt-disabled` instead. [#6491](https://github.com/hyperledger/besu/pull/6491)
### Deprecations ### Deprecations
### Additions and Improvements ### Additions and Improvements
- Upgrade Prometheus and Opentelemetry dependencies [#6422](https://github.com/hyperledger/besu/pull/6422)
- Add `OperationTracer.tracePrepareTransaction`, where the sender account has not yet been altered[#6453](https://github.com/hyperledger/besu/pull/6453) - Add `OperationTracer.tracePrepareTransaction`, where the sender account has not yet been altered[#6453](https://github.com/hyperledger/besu/pull/6453)
- Improve the high spec flag by limiting it to a few column families [#6354](https://github.com/hyperledger/besu/pull/6354) - Improve the high spec flag by limiting it to a few column families [#6354](https://github.com/hyperledger/besu/pull/6354)
- Log blob count when importing a block via Engine API [#6466](https://github.com/hyperledger/besu/pull/6466) - Log blob count when importing a block via Engine API [#6466](https://github.com/hyperledger/besu/pull/6466)
- Introduce `--Xbonsai-limit-trie-logs-enabled` experimental feature which by default will only retain the latest 512 trie logs, saving about 3GB per week in database growth [#5390](https://github.com/hyperledger/besu/issues/5390) - Introduce `--Xbonsai-limit-trie-logs-enabled` experimental feature which by default will only retain the latest 512 trie logs, saving about 3GB per week in database growth [#5390](https://github.com/hyperledger/besu/issues/5390)
- Introduce `besu storage x-trie-log prune` experimental offline subcommand which will prune all redundant trie logs except the latest 512 [#6303](https://github.com/hyperledger/besu/pull/6303) - Introduce `besu storage x-trie-log prune` experimental offline subcommand which will prune all redundant trie logs except the latest 512 [#6303](https://github.com/hyperledger/besu/pull/6303)
- Github Actions based build.
- Introduce caching mechanism to optimize Keccak hash calculations for account storage slots during block processing [#6452](https://github.com/hyperledger/besu/pull/6452)
- Added configuration options for `pragueTime` to genesis file for Prague fork development [#6473](https://github.com/hyperledger/besu/pull/6473)
- Moving trielog storage to RocksDB's blobdb to improve write amplications [#6289](https://github.com/hyperledger/besu/pull/6289)
### Bug fixes ### Bug fixes
- Fix the way an advertised host configured with `--p2p-host` is treated when communicating with the originator of a PING packet [#6225](https://github.com/hyperledger/besu/pull/6225) - Fix the way an advertised host configured with `--p2p-host` is treated when communicating with the originator of a PING packet [#6225](https://github.com/hyperledger/besu/pull/6225)
@ -28,7 +36,7 @@
- New `EXECUTION_HALTED` error returned if there is an error executing or simulating a transaction, with the reason for execution being halted. Replaces the generic `INTERNAL_ERROR` return code in certain cases which some applications may be checking for [#6343](https://github.com/hyperledger/besu/pull/6343) - New `EXECUTION_HALTED` error returned if there is an error executing or simulating a transaction, with the reason for execution being halted. Replaces the generic `INTERNAL_ERROR` return code in certain cases which some applications may be checking for [#6343](https://github.com/hyperledger/besu/pull/6343)
- The Besu Docker images with `openjdk-latest` tags since 23.10.3 were incorrectly using UID 1001 instead of 1000 for the container's `besu` user. The user now uses 1000 again. Containers created from or migrated to images using UID 1001 will need to chown their persistent database files to UID 1000 (thanks @h4l) [#6360](https://github.com/hyperledger/besu/pull/6360) - The Besu Docker images with `openjdk-latest` tags since 23.10.3 were incorrectly using UID 1001 instead of 1000 for the container's `besu` user. The user now uses 1000 again. Containers created from or migrated to images using UID 1001 will need to chown their persistent database files to UID 1000 (thanks @h4l) [#6360](https://github.com/hyperledger/besu/pull/6360)
- The deprecated `--privacy-onchain-groups-enabled` option has now been removed. Use the `--privacy-flexible-groups-enabled` option instead. [#6411](https://github.com/hyperledger/besu/pull/6411) - The deprecated `--privacy-onchain-groups-enabled` option has now been removed. Use the `--privacy-flexible-groups-enabled` option instead. [#6411](https://github.com/hyperledger/besu/pull/6411)
- The time that can be spent selecting transactions during block creation is not capped at 5 seconds for PoS and PoW networks, and for PoA networks, at 75% of the block period specified in the genesis, this to prevent possible DoS in case a single transaction is taking too long to execute, and to have a stable block production rate, but it could be a breaking change if an existing network used to have transactions that takes more time to executed that the newly introduced limit, if it is mandatory for these network to keep processing these long processing transaction, then the default value of `block-txs-selection-max-time` or `poa-block-txs-selection-max-time` needs to be tuned accordingly. [#6423](https://github.com/hyperledger/besu/pull/6423) - The time that can be spent selecting transactions during block creation is not capped at 5 seconds for PoS and PoW networks, and for PoA networks, at 75% of the block period specified in the genesis. This is to prevent possible DoS attacks in case a single transaction is taking too long to execute, and to have a stable block production rate. This could be a breaking change if an existing network needs to accept transactions that take more time to executed than the newly introduced limit. If it is mandatory for these networks to keep processing these long processing transaction, then the default value of `block-txs-selection-max-time` or `poa-block-txs-selection-max-time` needs to be tuned accordingly. [#6423](https://github.com/hyperledger/besu/pull/6423)
### Deprecations ### Deprecations
@ -42,6 +50,7 @@
- Upgrade Mockito [#6397](https://github.com/hyperledger/besu/pull/6397) - Upgrade Mockito [#6397](https://github.com/hyperledger/besu/pull/6397)
- Upgrade `tech.pegasys.discovery:discovery` [#6414](https://github.com/hyperledger/besu/pull/6414) - Upgrade `tech.pegasys.discovery:discovery` [#6414](https://github.com/hyperledger/besu/pull/6414)
- Options to tune the max allowed time that can be spent selecting transactions during block creation are now stable [#6423](https://github.com/hyperledger/besu/pull/6423) - Options to tune the max allowed time that can be spent selecting transactions during block creation are now stable [#6423](https://github.com/hyperledger/besu/pull/6423)
- Support for "pending" in `qbft_getValidatorsByBlockNumber` [#6436](https://github.com/hyperledger/besu/pull/6436)
### Bug fixes ### Bug fixes
- INTERNAL_ERROR from `eth_estimateGas` JSON/RPC calls [#6344](https://github.com/hyperledger/besu/issues/6344) - INTERNAL_ERROR from `eth_estimateGas` JSON/RPC calls [#6344](https://github.com/hyperledger/besu/issues/6344)
@ -61,6 +70,7 @@ Note, due to a CI race with the release job, the initial published version of 24
~~https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.1/besu-24.1.1.zip / sha256 b6b64f939e0bb4937ce90fc647e0a7073ce3e359c10352b502059955070a60c6 ~~https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.1/besu-24.1.1.zip / sha256 b6b64f939e0bb4937ce90fc647e0a7073ce3e359c10352b502059955070a60c6
https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.1/besu-24.1.1.tar.gz / sha256 cfcae04c30769bf338b0740ac65870f9346d3469931bb46cdba3b2f65d311e7a~~ https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.1/besu-24.1.1.tar.gz / sha256 cfcae04c30769bf338b0740ac65870f9346d3469931bb46cdba3b2f65d311e7a~~
## 24.1.0 ## 24.1.0
### Breaking Changes ### Breaking Changes

@ -19,6 +19,7 @@ import org.hyperledger.besu.datatypes.Hash;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.RpcErrorType; import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.RpcErrorType;
import org.hyperledger.besu.ethereum.privacy.PrivateTransaction; import org.hyperledger.besu.ethereum.privacy.PrivateTransaction;
import org.hyperledger.besu.tests.acceptance.dsl.condition.Condition; import org.hyperledger.besu.tests.acceptance.dsl.condition.Condition;
import org.hyperledger.besu.tests.acceptance.dsl.privacy.condition.PrivateCondition;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.Transaction; import org.hyperledger.besu.tests.acceptance.dsl.transaction.Transaction;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.privacy.PrivacyTransactions; import org.hyperledger.besu.tests.acceptance.dsl.transaction.privacy.PrivacyTransactions;
@ -112,4 +113,8 @@ public class PrivConditions {
final Transaction<?> transaction, final RpcErrorType error) { final Transaction<?> transaction, final RpcErrorType error) {
return new ExpectJsonRpcError(transaction, error); return new ExpectJsonRpcError(transaction, error);
} }
public PrivateCondition syncingStatus(final boolean isSyncing) {
return new PrivateSyncingStatusCondition(transactions.syncing(), isSyncing);
}
} }

@ -0,0 +1,40 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.tests.acceptance.dsl.condition.priv;
import static org.assertj.core.api.Assertions.assertThat;
import org.hyperledger.besu.tests.acceptance.dsl.WaitUtils;
import org.hyperledger.besu.tests.acceptance.dsl.privacy.PrivacyNode;
import org.hyperledger.besu.tests.acceptance.dsl.privacy.condition.PrivateCondition;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.privacy.PrivSyncingTransactions;
public class PrivateSyncingStatusCondition implements PrivateCondition {
private final PrivSyncingTransactions transaction;
private final boolean syncingMiningStatus;
public PrivateSyncingStatusCondition(
final PrivSyncingTransactions transaction, final boolean syncingStatus) {
this.transaction = transaction;
this.syncingMiningStatus = syncingStatus;
}
@Override
public void verify(final PrivacyNode node) {
WaitUtils.waitFor(
10, () -> assertThat(node.execute(transaction)).isEqualTo(syncingMiningStatus));
}
}

@ -0,0 +1,40 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.tests.acceptance.dsl.transaction.privacy;
import static org.assertj.core.api.Assertions.assertThat;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.NodeRequests;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.Transaction;
import java.io.IOException;
import org.web3j.protocol.core.methods.response.EthSyncing;
public class PrivSyncingTransactions implements Transaction<Boolean> {
PrivSyncingTransactions() {}
@Override
public Boolean execute(final NodeRequests node) {
try {
EthSyncing response = node.eth().ethSyncing().send();
assertThat(response).isNotNull();
return response.isSyncing();
} catch (final IOException e) {
throw new RuntimeException(e);
}
}
}

@ -62,4 +62,8 @@ public class PrivacyTransactions {
public PrivGetTransactionReceiptTransaction getTransactionReceipt(final Hash transactionHash) { public PrivGetTransactionReceiptTransaction getTransactionReceipt(final Hash transactionHash) {
return new PrivGetTransactionReceiptTransaction(transactionHash); return new PrivGetTransactionReceiptTransaction(transactionHash);
} }
public PrivSyncingTransactions syncing() {
return new PrivSyncingTransactions();
}
} }

@ -111,6 +111,9 @@ public class BftPrivacyClusterAcceptanceTest extends PrivacyAcceptanceTestBase {
charlie = createNode(containerNetwork, "node3", 2); charlie = createNode(containerNetwork, "node3", 2);
privacyCluster.start(alice, bob, charlie); privacyCluster.start(alice, bob, charlie);
alice.verify(priv.syncingStatus(false));
bob.verify(priv.syncingStatus(false));
charlie.verify(priv.syncingStatus(false));
} }
private PrivacyNode createNode( private PrivacyNode createNode(

@ -51,6 +51,7 @@ public class DeployPrivateSmartContractAcceptanceTest extends ParameterizedEncla
restriction == UNRESTRICTED); restriction == UNRESTRICTED);
privacyCluster.start(minerNode); privacyCluster.start(minerNode);
minerNode.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -94,6 +94,9 @@ public class EnclaveErrorAcceptanceTest extends PrivacyAcceptanceTestBase {
"0xBB"); "0xBB");
privacyCluster.start(alice, bob); privacyCluster.start(alice, bob);
alice.verify(priv.syncingStatus(false));
bob.verify(priv.syncingStatus(false));
final byte[] wrongPublicKeyBytes = final byte[] wrongPublicKeyBytes =
EnclaveEncryptorType.EC.equals(enclaveEncryptorType) EnclaveEncryptorType.EC.equals(enclaveEncryptorType)
? getSECP256r1PublicKeyByteArray() ? getSECP256r1PublicKeyByteArray()

@ -111,6 +111,10 @@ public class FlexiblePrivacyAcceptanceTest extends FlexiblePrivacyAcceptanceTest
enclaveType, enclaveType,
Optional.of(containerNetwork)); Optional.of(containerNetwork));
privacyCluster.start(alice, bob, charlie); privacyCluster.start(alice, bob, charlie);
alice.verify(priv.syncingStatus(false));
bob.verify(priv.syncingStatus(false));
charlie.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -93,6 +93,8 @@ public class PluginPrivacySigningAcceptanceTest extends PrivacyAcceptanceTestBas
Optional.empty()); Optional.empty());
privacyCluster.start(minerNode); privacyCluster.start(minerNode);
minerNode.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -76,6 +76,7 @@ public class PrivCallAcceptanceTest extends ParameterizedEnclaveTestBase {
restriction == UNRESTRICTED); restriction == UNRESTRICTED);
privacyCluster.start(minerNode); privacyCluster.start(minerNode);
minerNode.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -76,6 +76,9 @@ public class PrivDebugGetStateRootFlexibleGroupAcceptanceTest
Optional.of(containerNetwork)); Optional.of(containerNetwork));
privacyCluster.start(aliceNode, bobNode); privacyCluster.start(aliceNode, bobNode);
aliceNode.verify(priv.syncingStatus(false));
bobNode.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -72,6 +72,9 @@ public class PrivDebugGetStateRootOffchainGroupAcceptanceTest extends Parameteri
"0xBB"); "0xBB");
privacyCluster.start(aliceNode, bobNode); privacyCluster.start(aliceNode, bobNode);
aliceNode.verify(priv.syncingStatus(false));
bobNode.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -55,6 +55,7 @@ public class PrivGetCodeAcceptanceTest extends ParameterizedEnclaveTestBase {
restriction == UNRESTRICTED); restriction == UNRESTRICTED);
privacyCluster.start(alice); privacyCluster.start(alice);
alice.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -65,6 +65,7 @@ public class PrivGetLogsAcceptanceTest extends ParameterizedEnclaveTestBase {
restriction == UNRESTRICTED); restriction == UNRESTRICTED);
privacyCluster.start(node); privacyCluster.start(node);
node.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -75,6 +75,9 @@ public class PrivGetPrivateTransactionAcceptanceTest extends ParameterizedEnclav
"0xBB"); "0xBB");
privacyCluster.start(alice, bob); privacyCluster.start(alice, bob);
alice.verify(priv.syncingStatus(false));
bob.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -108,6 +108,10 @@ public class PrivacyClusterAcceptanceTest extends PrivacyAcceptanceTestBase {
false, false,
false); false);
privacyCluster.start(alice, bob, charlie); privacyCluster.start(alice, bob, charlie);
alice.verify(priv.syncingStatus(false));
bob.verify(priv.syncingStatus(false));
charlie.verify(priv.syncingStatus(false));
} }
@After @After

@ -94,6 +94,10 @@ public class PrivacyGroupAcceptanceTest extends PrivacyAcceptanceTestBase {
false, false,
false); false);
privacyCluster.start(alice, bob, charlie); privacyCluster.start(alice, bob, charlie);
alice.verify(priv.syncingStatus(false));
bob.verify(priv.syncingStatus(false));
charlie.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -61,6 +61,8 @@ public class PrivacyReceiptAcceptanceTest extends ParameterizedEnclaveTestBase {
restriction == UNRESTRICTED, restriction == UNRESTRICTED,
"0xAA"); "0xAA");
privacyCluster.start(alice); privacyCluster.start(alice);
alice.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -75,6 +75,9 @@ public class PrivateContractPublicStateAcceptanceTest extends ParameterizedEncla
restriction == UNRESTRICTED); restriction == UNRESTRICTED);
privacyCluster.start(minerNode, transactionNode); privacyCluster.start(minerNode, transactionNode);
minerNode.verify(priv.syncingStatus(false));
transactionNode.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -60,6 +60,7 @@ public class PrivateGenesisAcceptanceTest extends ParameterizedEnclaveTestBase {
"AA"); "AA");
privacyCluster.start(alice); privacyCluster.start(alice);
alice.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -60,6 +60,7 @@ public class PrivateLogFilterAcceptanceTest extends ParameterizedEnclaveTestBase
restriction == UNRESTRICTED); restriction == UNRESTRICTED);
privacyCluster.start(node); privacyCluster.start(node);
node.verify(priv.syncingStatus(false));
} }
@Test @Test

@ -4,8 +4,8 @@
"method": "engine_forkchoiceUpdatedV3", "method": "engine_forkchoiceUpdatedV3",
"params": [ "params": [
{ {
"headBlockHash": "0x26118cf71453320edcebbc4ebb34af5b578087a32385b80108bf691fa23efc42", "headBlockHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"safeBlockHash": "0x26118cf71453320edcebbc4ebb34af5b578087a32385b80108bf691fa23efc42", "safeBlockHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"finalizedBlockHash": "0x0000000000000000000000000000000000000000000000000000000000000000" "finalizedBlockHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
}, },
{ {
@ -24,11 +24,11 @@
"result": { "result": {
"payloadStatus": { "payloadStatus": {
"status": "VALID", "status": "VALID",
"latestValidHash": "0x26118cf71453320edcebbc4ebb34af5b578087a32385b80108bf691fa23efc42", "latestValidHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"validationError": null "validationError": null
}, },
"payloadId": "0x282643c14de2dfef" "payloadId": "0x282643d459a6f711"
} }
}, },
"statusCode" : 200 "statusCode": 200
} }

@ -3,7 +3,7 @@
"jsonrpc": "2.0", "jsonrpc": "2.0",
"method": "engine_getPayloadV3", "method": "engine_getPayloadV3",
"params": [ "params": [
"0x282643c14de2dfef" "0x282643d459a6f711"
], ],
"id": 67 "id": 67
}, },
@ -12,7 +12,7 @@
"id": 67, "id": 67,
"result": { "result": {
"executionPayload": { "executionPayload": {
"parentHash": "0x26118cf71453320edcebbc4ebb34af5b578087a32385b80108bf691fa23efc42", "parentHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", "feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x9b8c4a9a86cb49252075c0db2f0e72fb1e49350a0f70ea36f26f700201961e62", "stateRoot": "0x9b8c4a9a86cb49252075c0db2f0e72fb1e49350a0f70ea36f26f700201961e62",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
@ -28,8 +28,8 @@
"withdrawals": [], "withdrawals": [],
"blockNumber": "0x1", "blockNumber": "0x1",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
"blockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356", "blobGasUsed": "0x0",
"blobGasUsed": "0x0" "blockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315"
}, },
"blockValue": "0x0", "blockValue": "0x0",
"blobsBundle": { "blobsBundle": {

@ -4,7 +4,7 @@
"method": "engine_newPayloadV3", "method": "engine_newPayloadV3",
"params": [ "params": [
{ {
"parentHash": "0x26118cf71453320edcebbc4ebb34af5b578087a32385b80108bf691fa23efc42", "parentHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", "feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x9b8c4a9a86cb49252075c0db2f0e72fb1e49350a0f70ea36f26f700201961e62", "stateRoot": "0x9b8c4a9a86cb49252075c0db2f0e72fb1e49350a0f70ea36f26f700201961e62",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
@ -17,7 +17,7 @@
"transactions": [], "transactions": [],
"withdrawals": [], "withdrawals": [],
"blockNumber": "0x1", "blockNumber": "0x1",
"blockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356", "blockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
"excessBlobGas": "0x0", "excessBlobGas": "0x0",
"blobGasUsed": "0x0" "blobGasUsed": "0x0"
@ -32,7 +32,7 @@
"id": 67, "id": 67,
"result": { "result": {
"status": "VALID", "status": "VALID",
"latestValidHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356", "latestValidHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"validationError": null "validationError": null
} }
}, },

@ -4,9 +4,9 @@
"method": "engine_forkchoiceUpdatedV3", "method": "engine_forkchoiceUpdatedV3",
"params": [ "params": [
{ {
"headBlockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356", "headBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"safeBlockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356", "safeBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"finalizedBlockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356" "finalizedBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315"
}, },
null null
], ],
@ -18,7 +18,7 @@
"result": { "result": {
"payloadStatus": { "payloadStatus": {
"status": "VALID", "status": "VALID",
"latestValidHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356", "latestValidHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"validationError": null "validationError": null
}, },
"payloadId": null "payloadId": null

@ -4,8 +4,8 @@
"method": "engine_forkchoiceUpdatedV3", "method": "engine_forkchoiceUpdatedV3",
"params": [ "params": [
{ {
"headBlockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356", "headBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"safeBlockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356", "safeBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"finalizedBlockHash": "0x0000000000000000000000000000000000000000000000000000000000000000" "finalizedBlockHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
}, },
{ {
@ -24,10 +24,10 @@
"result": { "result": {
"payloadStatus": { "payloadStatus": {
"status": "VALID", "status": "VALID",
"latestValidHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356", "latestValidHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"validationError": null "validationError": null
}, },
"payloadId": "0x282643b9c2d2a4df" "payloadId": "0x282643b909febddf"
} }
}, },
"statusCode": 200 "statusCode": 200

@ -3,7 +3,7 @@
"jsonrpc": "2.0", "jsonrpc": "2.0",
"method": "engine_getPayloadV6110", "method": "engine_getPayloadV6110",
"params": [ "params": [
"0x282643b9c2d2a4df" "0x282643b909febddf"
], ],
"id": 67 "id": 67
}, },
@ -12,7 +12,7 @@
"id": 67, "id": 67,
"result": { "result": {
"executionPayload": { "executionPayload": {
"parentHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356", "parentHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", "feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x9b8c4a9a86cb49252075c0db2f0e72fb1e49350a0f70ea36f26f700201961e62", "stateRoot": "0x9b8c4a9a86cb49252075c0db2f0e72fb1e49350a0f70ea36f26f700201961e62",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
@ -28,9 +28,9 @@
"withdrawals": [], "withdrawals": [],
"depositReceipts": [], "depositReceipts": [],
"blockNumber": "0x2", "blockNumber": "0x2",
"blockHash": "0xf6c3f1180ba58d6ea4c69c9328c7afb1fda41df06c368741c1f8310567879de7", "blockHash": "0xc8255831601171a628ef17f6601d3d1d30ff9b382e77592ed1af32354f6dafbb",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "blobGasUsed": "0x0",
"blobGasUsed": "0x0" "receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"
}, },
"blockValue": "0x0", "blockValue": "0x0",
"blobsBundle": { "blobsBundle": {

@ -4,7 +4,7 @@
"method": "engine_newPayloadV6110", "method": "engine_newPayloadV6110",
"params": [ "params": [
{ {
"parentHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356", "parentHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", "feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x14208ac0e218167936e220b72d5d5887a963cb858ea2f2d268518f014a3da3fa", "stateRoot": "0x14208ac0e218167936e220b72d5d5887a963cb858ea2f2d268518f014a3da3fa",
"logsBloom": "0x10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000", "logsBloom": "0x10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000",
@ -23,7 +23,7 @@
{"amount":"0x773594000","index":"0x0","pubkey":"0x96a96086cff07df17668f35f7418ef8798079167e3f4f9b72ecde17b28226137cf454ab1dd20ef5d924786ab3483c2f9","signature":"0xb1acdb2c4d3df3f1b8d3bfd33421660df358d84d78d16c4603551935f4b67643373e7eb63dcb16ec359be0ec41fee33b03a16e80745f2374ff1d3c352508ac5d857c6476d3c3bcf7e6ca37427c9209f17be3af5264c0e2132b3dd1156c28b4e9","withdrawalCredentials":"0x003f5102dabe0a27b1746098d1dc17a5d3fbd478759fea9287e4e419b3c3cef2"} {"amount":"0x773594000","index":"0x0","pubkey":"0x96a96086cff07df17668f35f7418ef8798079167e3f4f9b72ecde17b28226137cf454ab1dd20ef5d924786ab3483c2f9","signature":"0xb1acdb2c4d3df3f1b8d3bfd33421660df358d84d78d16c4603551935f4b67643373e7eb63dcb16ec359be0ec41fee33b03a16e80745f2374ff1d3c352508ac5d857c6476d3c3bcf7e6ca37427c9209f17be3af5264c0e2132b3dd1156c28b4e9","withdrawalCredentials":"0x003f5102dabe0a27b1746098d1dc17a5d3fbd478759fea9287e4e419b3c3cef2"}
], ],
"blockNumber": "0x2", "blockNumber": "0x2",
"blockHash": "0xb3b483867217b83b1e4a2f95c84d2da30cbff12eb8636f2becbcc05f4507fa7a", "blockHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"receiptsRoot": "0x79ee3424eb720a3ad4b1c5a372bb8160580cbe4d893778660f34213c685627a9", "receiptsRoot": "0x79ee3424eb720a3ad4b1c5a372bb8160580cbe4d893778660f34213c685627a9",
"blobGasUsed": "0x0" "blobGasUsed": "0x0"
}, },
@ -37,7 +37,7 @@
"id": 67, "id": 67,
"result": { "result": {
"status": "VALID", "status": "VALID",
"latestValidHash": "0xb3b483867217b83b1e4a2f95c84d2da30cbff12eb8636f2becbcc05f4507fa7a", "latestValidHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"validationError": null "validationError": null
} }
}, },

@ -4,8 +4,8 @@
"method": "engine_forkchoiceUpdatedV3", "method": "engine_forkchoiceUpdatedV3",
"params": [ "params": [
{ {
"headBlockHash": "0xb3b483867217b83b1e4a2f95c84d2da30cbff12eb8636f2becbcc05f4507fa7a", "headBlockHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"safeBlockHash": "0xb3b483867217b83b1e4a2f95c84d2da30cbff12eb8636f2becbcc05f4507fa7a", "safeBlockHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"finalizedBlockHash": "0x0000000000000000000000000000000000000000000000000000000000000000" "finalizedBlockHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
}, },
{ {
@ -24,10 +24,10 @@
"result": { "result": {
"payloadStatus": { "payloadStatus": {
"status": "VALID", "status": "VALID",
"latestValidHash": "0xb3b483867217b83b1e4a2f95c84d2da30cbff12eb8636f2becbcc05f4507fa7a", "latestValidHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"validationError": null "validationError": null
}, },
"payloadId": "0x282643daa04b7631" "payloadId": "0x282643db882670cf"
} }
}, },
"statusCode" : 200 "statusCode" : 200

@ -3,7 +3,7 @@
"jsonrpc": "2.0", "jsonrpc": "2.0",
"method": "engine_getPayloadV6110", "method": "engine_getPayloadV6110",
"params": [ "params": [
"0x282643daa04b7631" "0x282643db882670cf"
], ],
"id": 67 "id": 67
}, },
@ -12,7 +12,7 @@
"id": 67, "id": 67,
"result": { "result": {
"executionPayload": { "executionPayload": {
"parentHash": "0xb3b483867217b83b1e4a2f95c84d2da30cbff12eb8636f2becbcc05f4507fa7a", "parentHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", "feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x14208ac0e218167936e220b72d5d5887a963cb858ea2f2d268518f014a3da3fa", "stateRoot": "0x14208ac0e218167936e220b72d5d5887a963cb858ea2f2d268518f014a3da3fa",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
@ -28,7 +28,7 @@
"withdrawals": [], "withdrawals": [],
"depositReceipts": [], "depositReceipts": [],
"blockNumber": "0x3", "blockNumber": "0x3",
"blockHash": "0xa28bf4db3363ce5b67848eb2ad52dbfead62ddb2287ae7eed36daa002528d1af", "blockHash": "0xf1e7093b5d229885caab11a3acb95412af80f9077b742020a8014cf81c8c75f2",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", "receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
"blobGasUsed": "0x0" "blobGasUsed": "0x0"
}, },

@ -690,7 +690,7 @@ public class RunnerBuilder {
.timestampForks(besuController.getGenesisConfigOptions().getForkBlockTimestamps()) .timestampForks(besuController.getGenesisConfigOptions().getForkBlockTimestamps())
.allConnectionsSupplier(ethPeers::getAllConnections) .allConnectionsSupplier(ethPeers::getAllConnections)
.allActiveConnectionsSupplier(ethPeers::getAllActiveConnections) .allActiveConnectionsSupplier(ethPeers::getAllActiveConnections)
.peersLowerBound(ethPeers.getPeerLowerBound()) .maxPeers(ethPeers.getMaxPeers())
.build(); .build();
}; };

@ -24,7 +24,6 @@ import static org.hyperledger.besu.cli.config.NetworkName.MAINNET;
import static org.hyperledger.besu.cli.util.CommandLineUtils.DEPENDENCY_WARNING_MSG; import static org.hyperledger.besu.cli.util.CommandLineUtils.DEPENDENCY_WARNING_MSG;
import static org.hyperledger.besu.cli.util.CommandLineUtils.isOptionSet; import static org.hyperledger.besu.cli.util.CommandLineUtils.isOptionSet;
import static org.hyperledger.besu.controller.BesuController.DATABASE_PATH; import static org.hyperledger.besu.controller.BesuController.DATABASE_PATH;
import static org.hyperledger.besu.ethereum.api.graphql.GraphQLConfiguration.DEFAULT_GRAPHQL_HTTP_PORT;
import static org.hyperledger.besu.ethereum.api.jsonrpc.JsonRpcConfiguration.DEFAULT_ENGINE_JSON_RPC_PORT; import static org.hyperledger.besu.ethereum.api.jsonrpc.JsonRpcConfiguration.DEFAULT_ENGINE_JSON_RPC_PORT;
import static org.hyperledger.besu.ethereum.api.jsonrpc.authentication.EngineAuthService.EPHEMERAL_JWT_FILE; import static org.hyperledger.besu.ethereum.api.jsonrpc.authentication.EngineAuthService.EPHEMERAL_JWT_FILE;
import static org.hyperledger.besu.metrics.BesuMetricCategory.DEFAULT_METRIC_CATEGORIES; import static org.hyperledger.besu.metrics.BesuMetricCategory.DEFAULT_METRIC_CATEGORIES;
@ -44,7 +43,6 @@ import org.hyperledger.besu.cli.config.NetworkName;
import org.hyperledger.besu.cli.config.ProfileName; import org.hyperledger.besu.cli.config.ProfileName;
import org.hyperledger.besu.cli.converter.MetricCategoryConverter; import org.hyperledger.besu.cli.converter.MetricCategoryConverter;
import org.hyperledger.besu.cli.converter.PercentageConverter; import org.hyperledger.besu.cli.converter.PercentageConverter;
import org.hyperledger.besu.cli.custom.CorsAllowedOriginsProperty;
import org.hyperledger.besu.cli.custom.JsonRPCAllowlistHostsProperty; import org.hyperledger.besu.cli.custom.JsonRPCAllowlistHostsProperty;
import org.hyperledger.besu.cli.error.BesuExecutionExceptionHandler; import org.hyperledger.besu.cli.error.BesuExecutionExceptionHandler;
import org.hyperledger.besu.cli.error.BesuParameterExceptionHandler; import org.hyperledger.besu.cli.error.BesuParameterExceptionHandler;
@ -52,10 +50,12 @@ import org.hyperledger.besu.cli.options.MiningOptions;
import org.hyperledger.besu.cli.options.TransactionPoolOptions; import org.hyperledger.besu.cli.options.TransactionPoolOptions;
import org.hyperledger.besu.cli.options.stable.DataStorageOptions; import org.hyperledger.besu.cli.options.stable.DataStorageOptions;
import org.hyperledger.besu.cli.options.stable.EthstatsOptions; import org.hyperledger.besu.cli.options.stable.EthstatsOptions;
import org.hyperledger.besu.cli.options.stable.GraphQlOptions;
import org.hyperledger.besu.cli.options.stable.JsonRpcHttpOptions; import org.hyperledger.besu.cli.options.stable.JsonRpcHttpOptions;
import org.hyperledger.besu.cli.options.stable.LoggingLevelOption; import org.hyperledger.besu.cli.options.stable.LoggingLevelOption;
import org.hyperledger.besu.cli.options.stable.NodePrivateKeyFileOption; import org.hyperledger.besu.cli.options.stable.NodePrivateKeyFileOption;
import org.hyperledger.besu.cli.options.stable.P2PTLSConfigOptions; import org.hyperledger.besu.cli.options.stable.P2PTLSConfigOptions;
import org.hyperledger.besu.cli.options.stable.PermissionsOptions;
import org.hyperledger.besu.cli.options.stable.RpcWebsocketOptions; import org.hyperledger.besu.cli.options.stable.RpcWebsocketOptions;
import org.hyperledger.besu.cli.options.unstable.ChainPruningOptions; import org.hyperledger.besu.cli.options.unstable.ChainPruningOptions;
import org.hyperledger.besu.cli.options.unstable.DnsOptions; import org.hyperledger.besu.cli.options.unstable.DnsOptions;
@ -130,8 +130,6 @@ import org.hyperledger.besu.ethereum.p2p.peers.StaticNodesParser;
import org.hyperledger.besu.ethereum.p2p.rlpx.connections.netty.TLSConfiguration; import org.hyperledger.besu.ethereum.p2p.rlpx.connections.netty.TLSConfiguration;
import org.hyperledger.besu.ethereum.permissioning.LocalPermissioningConfiguration; import org.hyperledger.besu.ethereum.permissioning.LocalPermissioningConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration; import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfigurationBuilder;
import org.hyperledger.besu.ethereum.permissioning.SmartContractPermissioningConfiguration;
import org.hyperledger.besu.ethereum.privacy.storage.keyvalue.PrivacyKeyValueStorageProvider; import org.hyperledger.besu.ethereum.privacy.storage.keyvalue.PrivacyKeyValueStorageProvider;
import org.hyperledger.besu.ethereum.privacy.storage.keyvalue.PrivacyKeyValueStorageProviderBuilder; import org.hyperledger.besu.ethereum.privacy.storage.keyvalue.PrivacyKeyValueStorageProviderBuilder;
import org.hyperledger.besu.ethereum.storage.StorageProvider; import org.hyperledger.besu.ethereum.storage.StorageProvider;
@ -360,6 +358,7 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
description = "Identification for this node in the Client ID", description = "Identification for this node in the Client ID",
arity = "1") arity = "1")
private final Optional<String> identityString = Optional.empty(); private final Optional<String> identityString = Optional.empty();
// P2P Discovery Option Group // P2P Discovery Option Group
@CommandLine.ArgGroup(validate = false, heading = "@|bold P2P Discovery Options|@%n") @CommandLine.ArgGroup(validate = false, heading = "@|bold P2P Discovery Options|@%n")
P2PDiscoveryOptionGroup p2PDiscoveryOptionGroup = new P2PDiscoveryOptionGroup(); P2PDiscoveryOptionGroup p2PDiscoveryOptionGroup = new P2PDiscoveryOptionGroup();
@ -557,35 +556,7 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
private final Path kzgTrustedSetupFile = null; private final Path kzgTrustedSetupFile = null;
@CommandLine.ArgGroup(validate = false, heading = "@|bold GraphQL Options|@%n") @CommandLine.ArgGroup(validate = false, heading = "@|bold GraphQL Options|@%n")
GraphQlOptionGroup graphQlOptionGroup = new GraphQlOptionGroup(); GraphQlOptions graphQlOptions = new GraphQlOptions();
static class GraphQlOptionGroup {
@Option(
names = {"--graphql-http-enabled"},
description = "Set to start the GraphQL HTTP service (default: ${DEFAULT-VALUE})")
private final Boolean isGraphQLHttpEnabled = false;
@SuppressWarnings({"FieldCanBeFinal", "FieldMayBeFinal"}) // PicoCLI requires non-final Strings.
@Option(
names = {"--graphql-http-host"},
paramLabel = MANDATORY_HOST_FORMAT_HELP,
description = "Host for GraphQL HTTP to listen on (default: ${DEFAULT-VALUE})",
arity = "1")
private String graphQLHttpHost;
@Option(
names = {"--graphql-http-port"},
paramLabel = MANDATORY_PORT_FORMAT_HELP,
description = "Port for GraphQL HTTP to listen on (default: ${DEFAULT-VALUE})",
arity = "1")
private final Integer graphQLHttpPort = DEFAULT_GRAPHQL_HTTP_PORT;
@Option(
names = {"--graphql-http-cors-origins"},
description = "Comma separated origin domain URLs for CORS validation (default: none)")
protected final CorsAllowedOriginsProperty graphQLHttpCorsAllowedOrigins =
new CorsAllowedOriginsProperty();
}
// Engine JSON-PRC Options // Engine JSON-PRC Options
@CommandLine.ArgGroup(validate = false, heading = "@|bold Engine JSON-RPC Options|@%n") @CommandLine.ArgGroup(validate = false, heading = "@|bold Engine JSON-RPC Options|@%n")
@ -611,13 +582,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
description = "Path to file containing shared secret key for JWT signature verification") description = "Path to file containing shared secret key for JWT signature verification")
private final Path engineJwtKeyFile = null; private final Path engineJwtKeyFile = null;
@Option(
names = {"--engine-jwt-enabled"},
description = "deprecated option, engine jwt auth is enabled by default",
hidden = true)
@SuppressWarnings({"FieldCanBeFinal", "UnusedVariable"})
private final Boolean deprecatedIsEngineAuthEnabled = true;
@Option( @Option(
names = {"--engine-jwt-disabled"}, names = {"--engine-jwt-disabled"},
description = "Disable authentication for Engine APIs (default: ${DEFAULT-VALUE})") description = "Disable authentication for Engine APIs (default: ${DEFAULT-VALUE})")
@ -825,62 +789,7 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
// Permission Option Group // Permission Option Group
@CommandLine.ArgGroup(validate = false, heading = "@|bold Permissions Options|@%n") @CommandLine.ArgGroup(validate = false, heading = "@|bold Permissions Options|@%n")
PermissionsOptionGroup permissionsOptionGroup = new PermissionsOptionGroup(); PermissionsOptions permissionsOptions = new PermissionsOptions();
static class PermissionsOptionGroup {
@Option(
names = {"--permissions-nodes-config-file-enabled"},
description = "Enable node level permissions (default: ${DEFAULT-VALUE})")
private final Boolean permissionsNodesEnabled = false;
@SuppressWarnings({"FieldCanBeFinal", "FieldMayBeFinal"}) // PicoCLI requires non-final Strings.
@CommandLine.Option(
names = {"--permissions-nodes-config-file"},
description =
"Node permissioning config TOML file (default: a file named \"permissions_config.toml\" in the Besu data folder)")
private String nodePermissionsConfigFile = null;
@Option(
names = {"--permissions-accounts-config-file-enabled"},
description = "Enable account level permissions (default: ${DEFAULT-VALUE})")
private final Boolean permissionsAccountsEnabled = false;
@SuppressWarnings({"FieldCanBeFinal", "FieldMayBeFinal"}) // PicoCLI requires non-final Strings.
@CommandLine.Option(
names = {"--permissions-accounts-config-file"},
description =
"Account permissioning config TOML file (default: a file named \"permissions_config.toml\" in the Besu data folder)")
private String accountPermissionsConfigFile = null;
@Option(
names = {"--permissions-nodes-contract-address"},
description = "Address of the node permissioning smart contract",
arity = "1")
private final Address permissionsNodesContractAddress = null;
@Option(
names = {"--permissions-nodes-contract-version"},
description = "Version of the EEA Node Permissioning interface (default: ${DEFAULT-VALUE})")
private final Integer permissionsNodesContractVersion = 1;
@Option(
names = {"--permissions-nodes-contract-enabled"},
description =
"Enable node level permissions via smart contract (default: ${DEFAULT-VALUE})")
private final Boolean permissionsNodesContractEnabled = false;
@Option(
names = {"--permissions-accounts-contract-address"},
description = "Address of the account permissioning smart contract",
arity = "1")
private final Address permissionsAccountsContractAddress = null;
@Option(
names = {"--permissions-accounts-contract-enabled"},
description =
"Enable account level permissions via smart contract (default: ${DEFAULT-VALUE})")
private final Boolean permissionsAccountsContractEnabled = false;
}
@Option( @Option(
names = {"--revert-reason-enabled"}, names = {"--revert-reason-enabled"},
@ -929,7 +838,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
@Option( @Option(
names = {"--pruning-blocks-retained"}, names = {"--pruning-blocks-retained"},
defaultValue = "1024",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"Minimum number of recent blocks for which to keep entire world state (default: ${DEFAULT-VALUE})", "Minimum number of recent blocks for which to keep entire world state (default: ${DEFAULT-VALUE})",
@ -938,7 +846,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
@Option( @Option(
names = {"--pruning-block-confirmations"}, names = {"--pruning-block-confirmations"},
defaultValue = "10",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"Minimum number of confirmations on a block before marking begins (default: ${DEFAULT-VALUE})", "Minimum number of confirmations on a block before marking begins (default: ${DEFAULT-VALUE})",
@ -1544,7 +1451,8 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
logger.info("Using the Java implementation of the blake2bf algorithm"); logger.info("Using the Java implementation of the blake2bf algorithm");
} }
if (getActualGenesisConfigOptions().getCancunTime().isPresent()) { if (getActualGenesisConfigOptions().getCancunTime().isPresent()
|| getActualGenesisConfigOptions().getPragueTime().isPresent()) {
if (kzgTrustedSetupFile != null) { if (kzgTrustedSetupFile != null) {
KZGPointEvalPrecompiledContract.init(kzgTrustedSetupFile); KZGPointEvalPrecompiledContract.init(kzgTrustedSetupFile);
} else { } else {
@ -1572,6 +1480,7 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
validatePostMergeCheckpointBlockRequirements(); validatePostMergeCheckpointBlockRequirements();
validateTransactionPoolOptions(); validateTransactionPoolOptions();
validateDataStorageOptions(); validateDataStorageOptions();
validateGraphQlOptions();
p2pTLSConfigOptions.checkP2PTLSOptionsDependencies(logger, commandLine); p2pTLSConfigOptions.checkP2PTLSOptionsDependencies(logger, commandLine);
pkiBlockCreationOptions.checkPkiBlockCreationOptionsDependencies(logger, commandLine); pkiBlockCreationOptions.checkPkiBlockCreationOptionsDependencies(logger, commandLine);
} }
@ -1617,6 +1526,10 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
} }
} }
private void validateGraphQlOptions() {
graphQlOptions.validate(logger, commandLine);
}
@SuppressWarnings("ConstantConditions") @SuppressWarnings("ConstantConditions")
private void validateNatParams() { private void validateNatParams() {
if (!(natMethod.equals(NatMethod.AUTO) || natMethod.equals(NatMethod.KUBERNETES)) if (!(natMethod.equals(NatMethod.AUTO) || natMethod.equals(NatMethod.KUBERNETES))
@ -1811,7 +1724,11 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
engineRPCOptionGroup.engineRpcPort, engineRPCOptionGroup.engineHostsAllowlist); engineRPCOptionGroup.engineRpcPort, engineRPCOptionGroup.engineHostsAllowlist);
} }
p2pTLSConfiguration = p2pTLSConfigOptions.p2pTLSConfiguration(commandLine); p2pTLSConfiguration = p2pTLSConfigOptions.p2pTLSConfiguration(commandLine);
graphQLConfiguration = graphQLConfiguration(); graphQLConfiguration =
graphQlOptions.graphQLConfiguration(
hostsAllowlist,
p2PDiscoveryOptionGroup.autoDiscoverDefaultIP().getHostAddress(),
unstableRPCOptions.getHttpTimeoutSec());
webSocketConfiguration = webSocketConfiguration =
rpcWebsocketOptions.webSocketConfiguration( rpcWebsocketOptions.webSocketConfiguration(
hostsAllowlist, hostsAllowlist,
@ -1852,6 +1769,16 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
logger.info("Security Module: {}", securityModuleName); logger.info("Security Module: {}", securityModuleName);
} }
private Optional<PermissioningConfiguration> permissioningConfiguration() throws Exception {
return permissionsOptions.permissioningConfiguration(
jsonRpcHttpOptions,
rpcWebsocketOptions,
getEnodeDnsConfiguration(),
dataDir(),
logger,
commandLine);
}
private JsonRpcIpcConfiguration jsonRpcIpcConfiguration( private JsonRpcIpcConfiguration jsonRpcIpcConfiguration(
final Boolean enabled, final Path ipcPath, final List<String> rpcIpcApis) { final Boolean enabled, final Path ipcPath, final List<String> rpcIpcApis) {
final Path actualPath; final Path actualPath;
@ -1953,28 +1880,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
return txSValidatorService.map(PluginTransactionValidatorService::get).orElse(null); return txSValidatorService.map(PluginTransactionValidatorService::get).orElse(null);
} }
private GraphQLConfiguration graphQLConfiguration() {
CommandLineUtils.checkOptionDependencies(
logger,
commandLine,
"--graphql-http-enabled",
!graphQlOptionGroup.isGraphQLHttpEnabled,
asList("--graphql-http-cors-origins", "--graphql-http-host", "--graphql-http-port"));
final GraphQLConfiguration graphQLConfiguration = GraphQLConfiguration.createDefault();
graphQLConfiguration.setEnabled(graphQlOptionGroup.isGraphQLHttpEnabled);
graphQLConfiguration.setHost(
Strings.isNullOrEmpty(graphQlOptionGroup.graphQLHttpHost)
? p2PDiscoveryOptionGroup.autoDiscoverDefaultIP().getHostAddress()
: graphQlOptionGroup.graphQLHttpHost);
graphQLConfiguration.setPort(graphQlOptionGroup.graphQLHttpPort);
graphQLConfiguration.setHostsAllowlist(hostsAllowlist);
graphQLConfiguration.setCorsAllowedDomains(graphQlOptionGroup.graphQLHttpCorsAllowedOrigins);
graphQLConfiguration.setHttpTimeoutSec(unstableRPCOptions.getHttpTimeoutSec());
return graphQLConfiguration;
}
private JsonRpcConfiguration createEngineJsonRpcConfiguration( private JsonRpcConfiguration createEngineJsonRpcConfiguration(
final Integer engineListenPort, final List<String> allowCallsFrom) { final Integer engineListenPort, final List<String> allowCallsFrom) {
jsonRpcHttpOptions.checkDependencies(logger, commandLine); jsonRpcHttpOptions.checkDependencies(logger, commandLine);
@ -2091,106 +1996,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
.build(); .build();
} }
private Optional<PermissioningConfiguration> permissioningConfiguration() throws Exception {
if (!(localPermissionsEnabled() || contractPermissionsEnabled())) {
if (jsonRpcHttpOptions.getRpcHttpApis().contains(RpcApis.PERM.name())
|| rpcWebsocketOptions.getRpcWsApis().contains(RpcApis.PERM.name())) {
logger.warn(
"Permissions are disabled. Cannot enable PERM APIs when not using Permissions.");
}
return Optional.empty();
}
final Optional<LocalPermissioningConfiguration> localPermissioningConfigurationOptional;
if (localPermissionsEnabled()) {
final Optional<String> nodePermissioningConfigFile =
Optional.ofNullable(permissionsOptionGroup.nodePermissionsConfigFile);
final Optional<String> accountPermissioningConfigFile =
Optional.ofNullable(permissionsOptionGroup.accountPermissionsConfigFile);
final LocalPermissioningConfiguration localPermissioningConfiguration =
PermissioningConfigurationBuilder.permissioningConfiguration(
permissionsOptionGroup.permissionsNodesEnabled,
getEnodeDnsConfiguration(),
nodePermissioningConfigFile.orElse(getDefaultPermissioningFilePath()),
permissionsOptionGroup.permissionsAccountsEnabled,
accountPermissioningConfigFile.orElse(getDefaultPermissioningFilePath()));
localPermissioningConfigurationOptional = Optional.of(localPermissioningConfiguration);
} else {
if (permissionsOptionGroup.nodePermissionsConfigFile != null
&& !permissionsOptionGroup.permissionsNodesEnabled) {
logger.warn(
"Node permissioning config file set {} but no permissions enabled",
permissionsOptionGroup.nodePermissionsConfigFile);
}
if (permissionsOptionGroup.accountPermissionsConfigFile != null
&& !permissionsOptionGroup.permissionsAccountsEnabled) {
logger.warn(
"Account permissioning config file set {} but no permissions enabled",
permissionsOptionGroup.accountPermissionsConfigFile);
}
localPermissioningConfigurationOptional = Optional.empty();
}
final SmartContractPermissioningConfiguration smartContractPermissioningConfiguration =
SmartContractPermissioningConfiguration.createDefault();
if (Boolean.TRUE.equals(permissionsOptionGroup.permissionsNodesContractEnabled)) {
if (permissionsOptionGroup.permissionsNodesContractAddress == null) {
throw new ParameterException(
this.commandLine,
"No node permissioning contract address specified. Cannot enable smart contract based node permissioning.");
} else {
smartContractPermissioningConfiguration.setSmartContractNodeAllowlistEnabled(
permissionsOptionGroup.permissionsNodesContractEnabled);
smartContractPermissioningConfiguration.setNodeSmartContractAddress(
permissionsOptionGroup.permissionsNodesContractAddress);
smartContractPermissioningConfiguration.setNodeSmartContractInterfaceVersion(
permissionsOptionGroup.permissionsNodesContractVersion);
}
} else if (permissionsOptionGroup.permissionsNodesContractAddress != null) {
logger.warn(
"Node permissioning smart contract address set {} but smart contract node permissioning is disabled.",
permissionsOptionGroup.permissionsNodesContractAddress);
}
if (Boolean.TRUE.equals(permissionsOptionGroup.permissionsAccountsContractEnabled)) {
if (permissionsOptionGroup.permissionsAccountsContractAddress == null) {
throw new ParameterException(
this.commandLine,
"No account permissioning contract address specified. Cannot enable smart contract based account permissioning.");
} else {
smartContractPermissioningConfiguration.setSmartContractAccountAllowlistEnabled(
permissionsOptionGroup.permissionsAccountsContractEnabled);
smartContractPermissioningConfiguration.setAccountSmartContractAddress(
permissionsOptionGroup.permissionsAccountsContractAddress);
}
} else if (permissionsOptionGroup.permissionsAccountsContractAddress != null) {
logger.warn(
"Account permissioning smart contract address set {} but smart contract account permissioning is disabled.",
permissionsOptionGroup.permissionsAccountsContractAddress);
}
final PermissioningConfiguration permissioningConfiguration =
new PermissioningConfiguration(
localPermissioningConfigurationOptional,
Optional.of(smartContractPermissioningConfiguration));
return Optional.of(permissioningConfiguration);
}
private boolean localPermissionsEnabled() {
return permissionsOptionGroup.permissionsAccountsEnabled
|| permissionsOptionGroup.permissionsNodesEnabled;
}
private boolean contractPermissionsEnabled() {
return permissionsOptionGroup.permissionsNodesContractEnabled
|| permissionsOptionGroup.permissionsAccountsContractEnabled;
}
private PrivacyParameters privacyParameters() { private PrivacyParameters privacyParameters() {
CommandLineUtils.checkOptionDependencies( CommandLineUtils.checkOptionDependencies(
@ -2661,12 +2466,6 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
.orElseGet(() -> KeyPairUtil.getDefaultKeyFile(dataDir())); .orElseGet(() -> KeyPairUtil.getDefaultKeyFile(dataDir()));
} }
private String getDefaultPermissioningFilePath() {
return dataDir()
+ System.getProperty("file.separator")
+ DefaultCommandValues.PERMISSIONING_CONFIG_LOCATION;
}
/** /**
* Metrics System used by Besu * Metrics System used by Besu
* *
@ -2790,9 +2589,7 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
addPortIfEnabled( addPortIfEnabled(
effectivePorts, p2PDiscoveryOptionGroup.p2pPort, p2PDiscoveryOptionGroup.p2pEnabled); effectivePorts, p2PDiscoveryOptionGroup.p2pPort, p2PDiscoveryOptionGroup.p2pEnabled);
addPortIfEnabled( addPortIfEnabled(
effectivePorts, effectivePorts, graphQlOptions.getGraphQLHttpPort(), graphQlOptions.isGraphQLHttpEnabled());
graphQlOptionGroup.graphQLHttpPort,
graphQlOptionGroup.isGraphQLHttpEnabled);
addPortIfEnabled( addPortIfEnabled(
effectivePorts, jsonRpcHttpOptions.getRpcHttpPort(), jsonRpcHttpOptions.isRpcHttpEnabled()); effectivePorts, jsonRpcHttpOptions.getRpcHttpPort(), jsonRpcHttpOptions.isRpcHttpEnabled());
addPortIfEnabled( addPortIfEnabled(

@ -0,0 +1,112 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.options.stable;
import static java.util.Arrays.asList;
import static org.hyperledger.besu.ethereum.api.graphql.GraphQLConfiguration.DEFAULT_GRAPHQL_HTTP_PORT;
import org.hyperledger.besu.cli.DefaultCommandValues;
import org.hyperledger.besu.cli.custom.CorsAllowedOriginsProperty;
import org.hyperledger.besu.cli.util.CommandLineUtils;
import org.hyperledger.besu.ethereum.api.graphql.GraphQLConfiguration;
import java.util.List;
import com.google.common.base.Strings;
import org.slf4j.Logger;
import picocli.CommandLine;
/** Handles configuration options for the GraphQL HTTP service in Besu. */
public class GraphQlOptions {
@CommandLine.Option(
names = {"--graphql-http-enabled"},
description = "Set to start the GraphQL HTTP service (default: ${DEFAULT-VALUE})")
private final Boolean isGraphQLHttpEnabled = false;
@SuppressWarnings({"FieldCanBeFinal", "FieldMayBeFinal"}) // PicoCLI requires non-final Strings.
@CommandLine.Option(
names = {"--graphql-http-host"},
paramLabel = DefaultCommandValues.MANDATORY_HOST_FORMAT_HELP,
description = "Host for GraphQL HTTP to listen on (default: ${DEFAULT-VALUE})",
arity = "1")
private String graphQLHttpHost;
@CommandLine.Option(
names = {"--graphql-http-port"},
paramLabel = DefaultCommandValues.MANDATORY_PORT_FORMAT_HELP,
description = "Port for GraphQL HTTP to listen on (default: ${DEFAULT-VALUE})",
arity = "1")
private final Integer graphQLHttpPort = DEFAULT_GRAPHQL_HTTP_PORT;
@CommandLine.Option(
names = {"--graphql-http-cors-origins"},
description = "Comma separated origin domain URLs for CORS validation (default: none)")
private final CorsAllowedOriginsProperty graphQLHttpCorsAllowedOrigins =
new CorsAllowedOriginsProperty();
/**
* Validates the GraphQL HTTP options.
*
* @param logger Logger instance
* @param commandLine CommandLine instance
*/
public void validate(final Logger logger, final CommandLine commandLine) {
CommandLineUtils.checkOptionDependencies(
logger,
commandLine,
"--graphql-http-enabled",
!isGraphQLHttpEnabled,
asList("--graphql-http-cors-origins", "--graphql-http-host", "--graphql-http-port"));
}
/**
* Creates a GraphQLConfiguration based on the provided options.
*
* @param hostsAllowlist List of hosts allowed
* @param defaultHostAddress Default host address
* @param timoutSec Timeout in seconds
* @return A GraphQLConfiguration instance
*/
public GraphQLConfiguration graphQLConfiguration(
final List<String> hostsAllowlist, final String defaultHostAddress, final Long timoutSec) {
final GraphQLConfiguration graphQLConfiguration = GraphQLConfiguration.createDefault();
graphQLConfiguration.setEnabled(isGraphQLHttpEnabled);
graphQLConfiguration.setHost(
Strings.isNullOrEmpty(graphQLHttpHost) ? defaultHostAddress : graphQLHttpHost);
graphQLConfiguration.setPort(graphQLHttpPort);
graphQLConfiguration.setHostsAllowlist(hostsAllowlist);
graphQLConfiguration.setCorsAllowedDomains(graphQLHttpCorsAllowedOrigins);
graphQLConfiguration.setHttpTimeoutSec(timoutSec);
return graphQLConfiguration;
}
/**
* Checks if GraphQL over HTTP is enabled.
*
* @return true if enabled, false otherwise
*/
public Boolean isGraphQLHttpEnabled() {
return isGraphQLHttpEnabled;
}
/**
* Returns the port for GraphQL over HTTP.
*
* @return The port number
*/
public Integer getGraphQLHttpPort() {
return graphQLHttpPort;
}
}

@ -0,0 +1,206 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.options.stable;
import org.hyperledger.besu.cli.DefaultCommandValues;
import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.ethereum.api.jsonrpc.RpcApis;
import org.hyperledger.besu.ethereum.p2p.peers.EnodeDnsConfiguration;
import org.hyperledger.besu.ethereum.permissioning.LocalPermissioningConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfigurationBuilder;
import org.hyperledger.besu.ethereum.permissioning.SmartContractPermissioningConfiguration;
import java.nio.file.Path;
import java.util.Optional;
import org.slf4j.Logger;
import picocli.CommandLine;
/** Handles configuration options for permissions in Besu. */
public class PermissionsOptions {
@CommandLine.Option(
names = {"--permissions-nodes-config-file-enabled"},
description = "Enable node level permissions (default: ${DEFAULT-VALUE})")
private final Boolean permissionsNodesEnabled = false;
@SuppressWarnings({"FieldCanBeFinal", "FieldMayBeFinal"}) // PicoCLI requires non-final Strings.
@CommandLine.Option(
names = {"--permissions-nodes-config-file"},
description =
"Node permissioning config TOML file (default: a file named \"permissions_config.toml\" in the Besu data folder)")
private String nodePermissionsConfigFile = null;
@CommandLine.Option(
names = {"--permissions-accounts-config-file-enabled"},
description = "Enable account level permissions (default: ${DEFAULT-VALUE})")
private final Boolean permissionsAccountsEnabled = false;
@SuppressWarnings({"FieldCanBeFinal", "FieldMayBeFinal"}) // PicoCLI requires non-final Strings.
@CommandLine.Option(
names = {"--permissions-accounts-config-file"},
description =
"Account permissioning config TOML file (default: a file named \"permissions_config.toml\" in the Besu data folder)")
private String accountPermissionsConfigFile = null;
@CommandLine.Option(
names = {"--permissions-nodes-contract-address"},
description = "Address of the node permissioning smart contract",
arity = "1")
private final Address permissionsNodesContractAddress = null;
@CommandLine.Option(
names = {"--permissions-nodes-contract-version"},
description = "Version of the EEA Node Permissioning interface (default: ${DEFAULT-VALUE})")
private final Integer permissionsNodesContractVersion = 1;
@CommandLine.Option(
names = {"--permissions-nodes-contract-enabled"},
description = "Enable node level permissions via smart contract (default: ${DEFAULT-VALUE})")
private final Boolean permissionsNodesContractEnabled = false;
@CommandLine.Option(
names = {"--permissions-accounts-contract-address"},
description = "Address of the account permissioning smart contract",
arity = "1")
private final Address permissionsAccountsContractAddress = null;
@CommandLine.Option(
names = {"--permissions-accounts-contract-enabled"},
description =
"Enable account level permissions via smart contract (default: ${DEFAULT-VALUE})")
private final Boolean permissionsAccountsContractEnabled = false;
/**
* Creates a PermissioningConfiguration based on the provided options.
*
* @param jsonRpcHttpOptions The JSON-RPC HTTP options
* @param rpcWebsocketOptions The RPC websocket options
* @param enodeDnsConfiguration The enode DNS configuration
* @param dataPath The data path
* @param logger The logger
* @param commandLine The command line
* @return An Optional PermissioningConfiguration instance
* @throws Exception If an error occurs while creating the configuration
*/
public Optional<PermissioningConfiguration> permissioningConfiguration(
final JsonRpcHttpOptions jsonRpcHttpOptions,
final RpcWebsocketOptions rpcWebsocketOptions,
final EnodeDnsConfiguration enodeDnsConfiguration,
final Path dataPath,
final Logger logger,
final CommandLine commandLine)
throws Exception {
if (!(localPermissionsEnabled() || contractPermissionsEnabled())) {
if (jsonRpcHttpOptions.getRpcHttpApis().contains(RpcApis.PERM.name())
|| rpcWebsocketOptions.getRpcWsApis().contains(RpcApis.PERM.name())) {
logger.warn(
"Permissions are disabled. Cannot enable PERM APIs when not using Permissions.");
}
return Optional.empty();
}
final Optional<LocalPermissioningConfiguration> localPermissioningConfigurationOptional;
if (localPermissionsEnabled()) {
final Optional<String> nodePermissioningConfigFile =
Optional.ofNullable(nodePermissionsConfigFile);
final Optional<String> accountPermissioningConfigFile =
Optional.ofNullable(accountPermissionsConfigFile);
final LocalPermissioningConfiguration localPermissioningConfiguration =
PermissioningConfigurationBuilder.permissioningConfiguration(
permissionsNodesEnabled,
enodeDnsConfiguration,
nodePermissioningConfigFile.orElse(getDefaultPermissioningFilePath(dataPath)),
permissionsAccountsEnabled,
accountPermissioningConfigFile.orElse(getDefaultPermissioningFilePath(dataPath)));
localPermissioningConfigurationOptional = Optional.of(localPermissioningConfiguration);
} else {
if (nodePermissionsConfigFile != null && !permissionsNodesEnabled) {
logger.warn(
"Node permissioning config file set {} but no permissions enabled",
nodePermissionsConfigFile);
}
if (accountPermissionsConfigFile != null && !permissionsAccountsEnabled) {
logger.warn(
"Account permissioning config file set {} but no permissions enabled",
accountPermissionsConfigFile);
}
localPermissioningConfigurationOptional = Optional.empty();
}
final SmartContractPermissioningConfiguration smartContractPermissioningConfiguration =
SmartContractPermissioningConfiguration.createDefault();
if (Boolean.TRUE.equals(permissionsNodesContractEnabled)) {
if (permissionsNodesContractAddress == null) {
throw new CommandLine.ParameterException(
commandLine,
"No node permissioning contract address specified. Cannot enable smart contract based node permissioning.");
} else {
smartContractPermissioningConfiguration.setSmartContractNodeAllowlistEnabled(
permissionsNodesContractEnabled);
smartContractPermissioningConfiguration.setNodeSmartContractAddress(
permissionsNodesContractAddress);
smartContractPermissioningConfiguration.setNodeSmartContractInterfaceVersion(
permissionsNodesContractVersion);
}
} else if (permissionsNodesContractAddress != null) {
logger.warn(
"Node permissioning smart contract address set {} but smart contract node permissioning is disabled.",
permissionsNodesContractAddress);
}
if (Boolean.TRUE.equals(permissionsAccountsContractEnabled)) {
if (permissionsAccountsContractAddress == null) {
throw new CommandLine.ParameterException(
commandLine,
"No account permissioning contract address specified. Cannot enable smart contract based account permissioning.");
} else {
smartContractPermissioningConfiguration.setSmartContractAccountAllowlistEnabled(
permissionsAccountsContractEnabled);
smartContractPermissioningConfiguration.setAccountSmartContractAddress(
permissionsAccountsContractAddress);
}
} else if (permissionsAccountsContractAddress != null) {
logger.warn(
"Account permissioning smart contract address set {} but smart contract account permissioning is disabled.",
permissionsAccountsContractAddress);
}
final PermissioningConfiguration permissioningConfiguration =
new PermissioningConfiguration(
localPermissioningConfigurationOptional,
Optional.of(smartContractPermissioningConfiguration));
return Optional.of(permissioningConfiguration);
}
private boolean localPermissionsEnabled() {
return permissionsAccountsEnabled || permissionsNodesEnabled;
}
private boolean contractPermissionsEnabled() {
return permissionsNodesContractEnabled || permissionsAccountsContractEnabled;
}
private String getDefaultPermissioningFilePath(final Path dataPath) {
return dataPath
+ System.getProperty("file.separator")
+ DefaultCommandValues.PERMISSIONING_CONFIG_LOCATION;
}
}

@ -47,7 +47,6 @@ public class EvmOptions implements CLIOptions<EvmConfiguration> {
"size in kilobytes to allow the cache " "size in kilobytes to allow the cache "
+ "of valid jump destinations to grow to before evicting the least recently used entry", + "of valid jump destinations to grow to before evicting the least recently used entry",
fallbackValue = "32000", fallbackValue = "32000",
defaultValue = "32000",
hidden = true, hidden = true,
arity = "1") arity = "1")
private Long jumpDestCacheWeightKilobytes = private Long jumpDestCacheWeightKilobytes =
@ -57,7 +56,6 @@ public class EvmOptions implements CLIOptions<EvmConfiguration> {
names = {WORLDSTATE_UPDATE_MODE}, names = {WORLDSTATE_UPDATE_MODE},
description = "How to handle worldstate updates within a transaction", description = "How to handle worldstate updates within a transaction",
fallbackValue = "STACKED", fallbackValue = "STACKED",
defaultValue = "STACKED",
hidden = true, hidden = true,
arity = "1") arity = "1")
private EvmConfiguration.WorldUpdaterMode worldstateUpdateMode = private EvmConfiguration.WorldUpdaterMode worldstateUpdateMode =

@ -30,7 +30,6 @@ public class MetricsCLIOptions implements CLIOptions<MetricsConfiguration.Builde
@CommandLine.Option( @CommandLine.Option(
names = TIMERS_ENABLED_FLAG, names = TIMERS_ENABLED_FLAG,
hidden = true, hidden = true,
defaultValue = "true",
description = "Whether to enable timer metrics (default: ${DEFAULT-VALUE}).") description = "Whether to enable timer metrics (default: ${DEFAULT-VALUE}).")
private Boolean timersEnabled = MetricsConfiguration.DEFAULT_METRICS_TIMERS_ENABLED; private Boolean timersEnabled = MetricsConfiguration.DEFAULT_METRICS_TIMERS_ENABLED;

@ -42,7 +42,6 @@ public class NetworkingOptions implements CLIOptions<NetworkingConfiguration> {
@CommandLine.Option( @CommandLine.Option(
names = INITIATE_CONNECTIONS_FREQUENCY_FLAG, names = INITIATE_CONNECTIONS_FREQUENCY_FLAG,
hidden = true, hidden = true,
defaultValue = "30",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"The frequency (in seconds) at which to initiate new outgoing connections (default: ${DEFAULT-VALUE})") "The frequency (in seconds) at which to initiate new outgoing connections (default: ${DEFAULT-VALUE})")
@ -52,7 +51,6 @@ public class NetworkingOptions implements CLIOptions<NetworkingConfiguration> {
@CommandLine.Option( @CommandLine.Option(
names = CHECK_MAINTAINED_CONNECTIONS_FREQUENCY_FLAG, names = CHECK_MAINTAINED_CONNECTIONS_FREQUENCY_FLAG,
hidden = true, hidden = true,
defaultValue = "60",
paramLabel = "<INTEGER>", paramLabel = "<INTEGER>",
description = description =
"The frequency (in seconds) at which to check maintained connections (default: ${DEFAULT-VALUE})") "The frequency (in seconds) at which to check maintained connections (default: ${DEFAULT-VALUE})")
@ -69,14 +67,12 @@ public class NetworkingOptions implements CLIOptions<NetworkingConfiguration> {
@CommandLine.Option( @CommandLine.Option(
names = DISCOVERY_PROTOCOL_V5_ENABLED, names = DISCOVERY_PROTOCOL_V5_ENABLED,
hidden = true, hidden = true,
defaultValue = "false",
description = "Whether to enable P2P Discovery Protocol v5 (default: ${DEFAULT-VALUE})") description = "Whether to enable P2P Discovery Protocol v5 (default: ${DEFAULT-VALUE})")
private final Boolean isPeerDiscoveryV5Enabled = false; private final Boolean isPeerDiscoveryV5Enabled = false;
@CommandLine.Option( @CommandLine.Option(
names = FILTER_ON_ENR_FORK_ID, names = FILTER_ON_ENR_FORK_ID,
hidden = true, hidden = true,
defaultValue = "true",
description = "Whether to enable filtering of peers based on the ENR field ForkId)") description = "Whether to enable filtering of peers based on the ENR field ForkId)")
private final Boolean filterOnEnrForkId = NetworkingConfiguration.DEFAULT_FILTER_ON_ENR_FORK_ID; private final Boolean filterOnEnrForkId = NetworkingConfiguration.DEFAULT_FILTER_ON_ENR_FORK_ID;

@ -51,7 +51,6 @@ import org.hyperledger.besu.cli.config.EthNetworkConfig;
import org.hyperledger.besu.config.GenesisConfigFile; import org.hyperledger.besu.config.GenesisConfigFile;
import org.hyperledger.besu.config.MergeConfigOptions; import org.hyperledger.besu.config.MergeConfigOptions;
import org.hyperledger.besu.crypto.SignatureAlgorithmFactory; import org.hyperledger.besu.crypto.SignatureAlgorithmFactory;
import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.datatypes.Hash; import org.hyperledger.besu.datatypes.Hash;
import org.hyperledger.besu.datatypes.Wei; import org.hyperledger.besu.datatypes.Wei;
import org.hyperledger.besu.ethereum.GasLimitCalculator; import org.hyperledger.besu.ethereum.GasLimitCalculator;
@ -67,9 +66,6 @@ import org.hyperledger.besu.ethereum.eth.sync.SyncMode;
import org.hyperledger.besu.ethereum.eth.sync.SynchronizerConfiguration; import org.hyperledger.besu.ethereum.eth.sync.SynchronizerConfiguration;
import org.hyperledger.besu.ethereum.eth.transactions.TransactionPoolConfiguration; import org.hyperledger.besu.ethereum.eth.transactions.TransactionPoolConfiguration;
import org.hyperledger.besu.ethereum.p2p.peers.EnodeURLImpl; import org.hyperledger.besu.ethereum.p2p.peers.EnodeURLImpl;
import org.hyperledger.besu.ethereum.permissioning.LocalPermissioningConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration;
import org.hyperledger.besu.ethereum.permissioning.SmartContractPermissioningConfiguration;
import org.hyperledger.besu.ethereum.trie.forest.pruner.PrunerConfiguration; import org.hyperledger.besu.ethereum.trie.forest.pruner.PrunerConfiguration;
import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration; import org.hyperledger.besu.ethereum.worldstate.DataStorageConfiguration;
import org.hyperledger.besu.evm.precompile.AbstractAltBnPrecompiledContract; import org.hyperledger.besu.evm.precompile.AbstractAltBnPrecompiledContract;
@ -96,7 +92,6 @@ import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -104,7 +99,6 @@ import java.util.Optional;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import com.google.common.collect.Lists;
import com.google.common.io.Resources; import com.google.common.io.Resources;
import io.vertx.core.json.JsonObject; import io.vertx.core.json.JsonObject;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
@ -129,7 +123,6 @@ public class BesuCommandTest extends CommandTestAbstract {
private static final String ENCLAVE_PUBLIC_KEY = "A1aVtMxLCUHmBVHXoZzzBgPbW/wj5axDpW9X8l91SGo="; private static final String ENCLAVE_PUBLIC_KEY = "A1aVtMxLCUHmBVHXoZzzBgPbW/wj5axDpW9X8l91SGo=";
private static final String VALID_NODE_ID = private static final String VALID_NODE_ID =
"6f8a80d14311c39f35f516fa664deaaaa13e85b2f7493f37f6144d86991ec012937307647bd3b9a82abe2974e1407241d54947bbb39763a4cac9f77166ad92a0"; "6f8a80d14311c39f35f516fa664deaaaa13e85b2f7493f37f6144d86991ec012937307647bd3b9a82abe2974e1407241d54947bbb39763a4cac9f77166ad92a0";
private static final String PERMISSIONING_CONFIG_TOML = "/permissioning_config.toml";
private static final JsonRpcConfiguration DEFAULT_JSON_RPC_CONFIGURATION; private static final JsonRpcConfiguration DEFAULT_JSON_RPC_CONFIGURATION;
private static final GraphQLConfiguration DEFAULT_GRAPH_QL_CONFIGURATION; private static final GraphQLConfiguration DEFAULT_GRAPH_QL_CONFIGURATION;
private static final WebSocketConfiguration DEFAULT_WEB_SOCKET_CONFIGURATION; private static final WebSocketConfiguration DEFAULT_WEB_SOCKET_CONFIGURATION;
@ -349,374 +342,6 @@ public class BesuCommandTest extends CommandTestAbstract {
assertThat(commandOutput.toString(UTF_8)).isEmpty(); assertThat(commandOutput.toString(UTF_8)).isEmpty();
} }
@Test
public void nodePermissionsSmartContractWithoutOptionMustError() {
parseCommand("--permissions-nodes-contract-address");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.startsWith("Missing required parameter for option '--permissions-nodes-contract-address'");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsEnabledWithoutContractAddressMustError() {
parseCommand("--permissions-nodes-contract-enabled");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.contains("No node permissioning contract address specified");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsEnabledWithInvalidContractAddressMustError() {
parseCommand(
"--permissions-nodes-contract-enabled",
"--permissions-nodes-contract-address",
"invalid-smart-contract-address");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Invalid value");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsEnabledWithTooShortContractAddressMustError() {
parseCommand(
"--permissions-nodes-contract-enabled", "--permissions-nodes-contract-address", "0x1234");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Invalid value");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsSmartContractMustUseOption() {
final String smartContractAddress = "0x0000000000000000000000000000000000001234";
parseCommand(
"--permissions-nodes-contract-enabled",
"--permissions-nodes-contract-address",
smartContractAddress);
final SmartContractPermissioningConfiguration smartContractPermissioningConfiguration =
new SmartContractPermissioningConfiguration();
smartContractPermissioningConfiguration.setNodeSmartContractAddress(
Address.fromHexString(smartContractAddress));
smartContractPermissioningConfiguration.setSmartContractNodeAllowlistEnabled(true);
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
final PermissioningConfiguration config =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(config.getSmartContractConfig().get())
.usingRecursiveComparison()
.isEqualTo(smartContractPermissioningConfiguration);
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsContractVersionDefaultValue() {
final SmartContractPermissioningConfiguration expectedConfig =
new SmartContractPermissioningConfiguration();
expectedConfig.setNodeSmartContractAddress(
Address.fromHexString("0x0000000000000000000000000000000000001234"));
expectedConfig.setSmartContractNodeAllowlistEnabled(true);
expectedConfig.setNodeSmartContractInterfaceVersion(1);
parseCommand(
"--permissions-nodes-contract-enabled",
"--permissions-nodes-contract-address",
"0x0000000000000000000000000000000000001234");
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
final PermissioningConfiguration config =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(config.getSmartContractConfig().get())
.usingRecursiveComparison()
.isEqualTo(expectedConfig);
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsContractVersionSetsValue() {
final SmartContractPermissioningConfiguration expectedConfig =
new SmartContractPermissioningConfiguration();
expectedConfig.setNodeSmartContractAddress(
Address.fromHexString("0x0000000000000000000000000000000000001234"));
expectedConfig.setSmartContractNodeAllowlistEnabled(true);
expectedConfig.setNodeSmartContractInterfaceVersion(2);
parseCommand(
"--permissions-nodes-contract-enabled",
"--permissions-nodes-contract-address",
"0x0000000000000000000000000000000000001234",
"--permissions-nodes-contract-version",
"2");
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
final PermissioningConfiguration config =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(config.getSmartContractConfig().get())
.usingRecursiveComparison()
.isEqualTo(expectedConfig);
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsSmartContractWithoutOptionMustError() {
parseCommand("--permissions-accounts-contract-address");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.startsWith(
"Missing required parameter for option '--permissions-accounts-contract-address'");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsEnabledWithoutContractAddressMustError() {
parseCommand("--permissions-accounts-contract-enabled");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.contains("No account permissioning contract address specified");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsEnabledWithInvalidContractAddressMustError() {
parseCommand(
"--permissions-accounts-contract-enabled",
"--permissions-accounts-contract-address",
"invalid-smart-contract-address");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Invalid value");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsEnabledWithTooShortContractAddressMustError() {
parseCommand(
"--permissions-accounts-contract-enabled",
"--permissions-accounts-contract-address",
"0x1234");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Invalid value");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsSmartContractMustUseOption() {
final String smartContractAddress = "0x0000000000000000000000000000000000001234";
parseCommand(
"--permissions-accounts-contract-enabled",
"--permissions-accounts-contract-address",
smartContractAddress);
final SmartContractPermissioningConfiguration smartContractPermissioningConfiguration =
new SmartContractPermissioningConfiguration();
smartContractPermissioningConfiguration.setAccountSmartContractAddress(
Address.fromHexString(smartContractAddress));
smartContractPermissioningConfiguration.setSmartContractAccountAllowlistEnabled(true);
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
final PermissioningConfiguration permissioningConfiguration =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(permissioningConfiguration.getSmartContractConfig()).isPresent();
final SmartContractPermissioningConfiguration effectiveSmartContractConfig =
permissioningConfiguration.getSmartContractConfig().get();
assertThat(effectiveSmartContractConfig.isSmartContractAccountAllowlistEnabled()).isTrue();
assertThat(effectiveSmartContractConfig.getAccountSmartContractAddress())
.isEqualTo(Address.fromHexString(smartContractAddress));
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissioningTomlPathWithoutOptionMustDisplayUsage() {
parseCommand("--permissions-nodes-config-file");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.startsWith("Missing required parameter for option '--permissions-nodes-config-file'");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissioningTomlPathWithoutOptionMustDisplayUsage() {
parseCommand("--permissions-accounts-config-file");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.startsWith("Missing required parameter for option '--permissions-accounts-config-file'");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissioningEnabledWithNonexistentConfigFileMustError() {
parseCommand(
"--permissions-nodes-config-file-enabled",
"--permissions-nodes-config-file",
"file-does-not-exist");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Configuration file does not exist");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissioningEnabledWithNonexistentConfigFileMustError() {
parseCommand(
"--permissions-accounts-config-file-enabled",
"--permissions-accounts-config-file",
"file-does-not-exist");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Configuration file does not exist");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissioningTomlFileWithNoPermissionsEnabledMustNotError() throws IOException {
final URL configFile = this.getClass().getResource(PERMISSIONING_CONFIG_TOML);
final Path permToml = createTempFile("toml", Resources.toByteArray(configFile));
parseCommand("--permissions-nodes-config-file", permToml.toString());
verify(mockRunnerBuilder).build();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissioningTomlFileWithNoPermissionsEnabledMustNotError()
throws IOException {
final URL configFile = this.getClass().getResource(PERMISSIONING_CONFIG_TOML);
final Path permToml = createTempFile("toml", Resources.toByteArray(configFile));
parseCommand("--permissions-accounts-config-file", permToml.toString());
verify(mockRunnerBuilder).build();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void defaultPermissionsTomlFileWithNoPermissionsEnabledMustNotError() {
parseCommand("--p2p-enabled", "false");
verify(mockRunnerBuilder).build();
assertThat(commandErrorOutput.toString(UTF_8)).doesNotContain("no permissions enabled");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissioningTomlPathMustUseOption() throws IOException {
final List<EnodeURL> allowedNodes =
Lists.newArrayList(
EnodeURLImpl.fromString(
"enode://6f8a80d14311c39f35f516fa664deaaaa13e85b2f7493f37f6144d86991ec012937307647bd3b9a82abe2974e1407241d54947bbb39763a4cac9f77166ad92a0@192.168.0.9:4567"),
EnodeURLImpl.fromString(
"enode://6f8a80d14311c39f35f516fa664deaaaa13e85b2f7493f37f6144d86991ec012937307647bd3b9a82abe2974e1407241d54947bbb39763a4cac9f77166ad92a0@192.169.0.9:4568"));
final URL configFile = this.getClass().getResource(PERMISSIONING_CONFIG_TOML);
final Path permToml = createTempFile("toml", Resources.toByteArray(configFile));
final String allowedNodesString =
allowedNodes.stream().map(Object::toString).collect(Collectors.joining(","));
parseCommand(
"--permissions-nodes-config-file-enabled",
"--permissions-nodes-config-file",
permToml.toString(),
"--bootnodes",
allowedNodesString);
final LocalPermissioningConfiguration localPermissioningConfiguration =
LocalPermissioningConfiguration.createDefault();
localPermissioningConfiguration.setNodePermissioningConfigFilePath(permToml.toString());
localPermissioningConfiguration.setNodeAllowlist(allowedNodes);
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
final PermissioningConfiguration config =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(config.getLocalConfig().get())
.usingRecursiveComparison()
.isEqualTo(localPermissioningConfiguration);
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissioningTomlPathMustUseOption() throws IOException {
final URL configFile = this.getClass().getResource(PERMISSIONING_CONFIG_TOML);
final Path permToml = createTempFile("toml", Resources.toByteArray(configFile));
parseCommand(
"--permissions-accounts-config-file-enabled",
"--permissions-accounts-config-file",
permToml.toString());
final LocalPermissioningConfiguration localPermissioningConfiguration =
LocalPermissioningConfiguration.createDefault();
localPermissioningConfiguration.setAccountPermissioningConfigFilePath(permToml.toString());
localPermissioningConfiguration.setAccountAllowlist(
Collections.singletonList("0x0000000000000000000000000000000000000009"));
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
final PermissioningConfiguration permissioningConfiguration =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(permissioningConfiguration.getLocalConfig()).isPresent();
final LocalPermissioningConfiguration effectiveLocalPermissioningConfig =
permissioningConfiguration.getLocalConfig().get();
assertThat(effectiveLocalPermissioningConfig.isAccountAllowlistEnabled()).isTrue();
assertThat(effectiveLocalPermissioningConfig.getAccountPermissioningConfigFilePath())
.isEqualTo(permToml.toString());
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test @Test
public void tomlThatConfiguresEverythingExceptPermissioningToml() throws IOException { public void tomlThatConfiguresEverythingExceptPermissioningToml() throws IOException {
// Load a TOML that configures literally everything (except permissioning TOML config) // Load a TOML that configures literally everything (except permissioning TOML config)
@ -1969,19 +1594,6 @@ public class BesuCommandTest extends CommandTestAbstract {
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
} }
@Test
public void graphQLHttpEnabledPropertyMustBeUsed() {
parseCommand("--graphql-http-enabled");
verify(mockRunnerBuilder).graphQLConfiguration(graphQLConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(graphQLConfigArgumentCaptor.getValue().isEnabled()).isTrue();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test @Test
public void rpcApisSupportsEngine() { public void rpcApisSupportsEngine() {
parseCommand("--rpc-http-api", "ENGINE", "--rpc-http-enabled"); parseCommand("--rpc-http-api", "ENGINE", "--rpc-http-enabled");
@ -2111,58 +1723,6 @@ public class BesuCommandTest extends CommandTestAbstract {
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
} }
@Test
public void graphQLHttpHostAndPortOptionsMustBeUsed() {
final String host = "1.2.3.4";
final int port = 1234;
parseCommand(
"--graphql-http-enabled",
"--graphql-http-host",
host,
"--graphql-http-port",
String.valueOf(port));
verify(mockRunnerBuilder).graphQLConfiguration(graphQLConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(graphQLConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(graphQLConfigArgumentCaptor.getValue().getPort()).isEqualTo(port);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void graphQLHttpHostMayBeLocalhost() {
final String host = "localhost";
parseCommand("--graphql-http-enabled", "--graphql-http-host", host);
verify(mockRunnerBuilder).graphQLConfiguration(graphQLConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(graphQLConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void graphQLHttpHostMayBeIPv6() {
final String host = "2600:DB8::8545";
parseCommand("--graphql-http-enabled", "--graphql-http-host", host);
verify(mockRunnerBuilder).graphQLConfiguration(graphQLConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(graphQLConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
/** test deprecated CLI option * */ /** test deprecated CLI option * */
@Deprecated @Deprecated
@Test @Test
@ -3166,43 +2726,6 @@ public class BesuCommandTest extends CommandTestAbstract {
assertThat(commandOutput.toString(UTF_8)).isEmpty(); assertThat(commandOutput.toString(UTF_8)).isEmpty();
} }
@Test
public void errorIsRaisedIfStaticNodesAreNotAllowed(final @TempDir Path testFolder)
throws IOException {
final Path staticNodesFile = testFolder.resolve("static-nodes.json");
final Path permissioningConfig = testFolder.resolve("permissioning.json");
final EnodeURL staticNodeURI =
EnodeURLImpl.builder()
.nodeId(
"50203c6bfca6874370e71aecc8958529fd723feb05013dc1abca8fc1fff845c5259faba05852e9dfe5ce172a7d6e7c2a3a5eaa8b541c8af15ea5518bbff5f2fa")
.ipAddress("127.0.0.1")
.useDefaultPorts()
.build();
final EnodeURL allowedNode =
EnodeURLImpl.builder()
.nodeId(
"50203c6bfca6874370e71aecc8958529fd723feb05013dc1abca8fc1fff845c5259faba05852e9dfe5ce172a7d6e7c2a3a5eaa8b541c8af15ea5518bbff5f2fa")
.useDefaultPorts()
.ipAddress("127.0.0.1")
.listeningPort(30304)
.build();
Files.write(staticNodesFile, ("[\"" + staticNodeURI.toString() + "\"]").getBytes(UTF_8));
Files.write(
permissioningConfig,
("nodes-allowlist=[\"" + allowedNode.toString() + "\"]").getBytes(UTF_8));
parseCommand(
"--data-path=" + testFolder,
"--bootnodes",
"--permissions-nodes-config-file-enabled=true",
"--permissions-nodes-config-file=" + permissioningConfig);
assertThat(commandErrorOutput.toString(UTF_8))
.contains(staticNodeURI.toString(), "not in nodes-allowlist");
}
@Test @Test
public void tomlThatHasInvalidOptions() throws IOException { public void tomlThatHasInvalidOptions() throws IOException {
final URL configFile = this.getClass().getResource("/complete_config.toml"); final URL configFile = this.getClass().getResource("/complete_config.toml");

@ -0,0 +1,93 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.options;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.verify;
import org.hyperledger.besu.cli.CommandTestAbstract;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
public class GraphQlOptionsTest extends CommandTestAbstract {
@Test
public void graphQLHttpEnabledPropertyMustBeUsed() {
parseCommand("--graphql-http-enabled");
verify(mockRunnerBuilder).graphQLConfiguration(graphQLConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(graphQLConfigArgumentCaptor.getValue().isEnabled()).isTrue();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void graphQLHttpHostAndPortOptionsMustBeUsed() {
final String host = "1.2.3.4";
final int port = 1234;
parseCommand(
"--graphql-http-enabled",
"--graphql-http-host",
host,
"--graphql-http-port",
String.valueOf(port));
verify(mockRunnerBuilder).graphQLConfiguration(graphQLConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(graphQLConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(graphQLConfigArgumentCaptor.getValue().getPort()).isEqualTo(port);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void graphQLHttpHostMayBeLocalhost() {
final String host = "localhost";
parseCommand("--graphql-http-enabled", "--graphql-http-host", host);
verify(mockRunnerBuilder).graphQLConfiguration(graphQLConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(graphQLConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void graphQLHttpHostMayBeIPv6() {
final String host = "2600:DB8::8545";
parseCommand("--graphql-http-enabled", "--graphql-http-host", host);
verify(mockRunnerBuilder).graphQLConfiguration(graphQLConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(graphQLConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
}

@ -0,0 +1,453 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.options;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.verify;
import org.hyperledger.besu.cli.CommandTestAbstract;
import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.ethereum.p2p.peers.EnodeURLImpl;
import org.hyperledger.besu.ethereum.permissioning.LocalPermissioningConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration;
import org.hyperledger.besu.ethereum.permissioning.SmartContractPermissioningConfiguration;
import org.hyperledger.besu.plugin.data.EnodeURL;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import com.google.common.collect.Lists;
import com.google.common.io.Resources;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.io.TempDir;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
public class PermissionsOptionsTest extends CommandTestAbstract {
private static final String PERMISSIONING_CONFIG_TOML = "/permissioning_config.toml";
@Test
public void errorIsRaisedIfStaticNodesAreNotAllowed(final @TempDir Path testFolder)
throws IOException {
final Path staticNodesFile = testFolder.resolve("static-nodes.json");
final Path permissioningConfig = testFolder.resolve("permissioning.json");
final EnodeURL staticNodeURI =
EnodeURLImpl.builder()
.nodeId(
"50203c6bfca6874370e71aecc8958529fd723feb05013dc1abca8fc1fff845c5259faba05852e9dfe5ce172a7d6e7c2a3a5eaa8b541c8af15ea5518bbff5f2fa")
.ipAddress("127.0.0.1")
.useDefaultPorts()
.build();
final EnodeURL allowedNode =
EnodeURLImpl.builder()
.nodeId(
"50203c6bfca6874370e71aecc8958529fd723feb05013dc1abca8fc1fff845c5259faba05852e9dfe5ce172a7d6e7c2a3a5eaa8b541c8af15ea5518bbff5f2fa")
.useDefaultPorts()
.ipAddress("127.0.0.1")
.listeningPort(30304)
.build();
Files.write(staticNodesFile, ("[\"" + staticNodeURI.toString() + "\"]").getBytes(UTF_8));
Files.write(
permissioningConfig,
("nodes-allowlist=[\"" + allowedNode.toString() + "\"]").getBytes(UTF_8));
parseCommand(
"--data-path=" + testFolder,
"--bootnodes",
"--permissions-nodes-config-file-enabled=true",
"--permissions-nodes-config-file=" + permissioningConfig);
assertThat(commandErrorOutput.toString(UTF_8))
.contains(staticNodeURI.toString(), "not in nodes-allowlist");
}
@Test
public void nodePermissionsSmartContractWithoutOptionMustError() {
parseCommand("--permissions-nodes-contract-address");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.startsWith("Missing required parameter for option '--permissions-nodes-contract-address'");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsEnabledWithoutContractAddressMustError() {
parseCommand("--permissions-nodes-contract-enabled");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.contains("No node permissioning contract address specified");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsEnabledWithInvalidContractAddressMustError() {
parseCommand(
"--permissions-nodes-contract-enabled",
"--permissions-nodes-contract-address",
"invalid-smart-contract-address");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Invalid value");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsEnabledWithTooShortContractAddressMustError() {
parseCommand(
"--permissions-nodes-contract-enabled", "--permissions-nodes-contract-address", "0x1234");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Invalid value");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsSmartContractMustUseOption() {
final String smartContractAddress = "0x0000000000000000000000000000000000001234";
parseCommand(
"--permissions-nodes-contract-enabled",
"--permissions-nodes-contract-address",
smartContractAddress);
final SmartContractPermissioningConfiguration smartContractPermissioningConfiguration =
new SmartContractPermissioningConfiguration();
smartContractPermissioningConfiguration.setNodeSmartContractAddress(
Address.fromHexString(smartContractAddress));
smartContractPermissioningConfiguration.setSmartContractNodeAllowlistEnabled(true);
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
final PermissioningConfiguration config =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(config.getSmartContractConfig().get())
.usingRecursiveComparison()
.isEqualTo(smartContractPermissioningConfiguration);
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsContractVersionDefaultValue() {
final SmartContractPermissioningConfiguration expectedConfig =
new SmartContractPermissioningConfiguration();
expectedConfig.setNodeSmartContractAddress(
Address.fromHexString("0x0000000000000000000000000000000000001234"));
expectedConfig.setSmartContractNodeAllowlistEnabled(true);
expectedConfig.setNodeSmartContractInterfaceVersion(1);
parseCommand(
"--permissions-nodes-contract-enabled",
"--permissions-nodes-contract-address",
"0x0000000000000000000000000000000000001234");
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
final PermissioningConfiguration config =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(config.getSmartContractConfig().get())
.usingRecursiveComparison()
.isEqualTo(expectedConfig);
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsContractVersionSetsValue() {
final SmartContractPermissioningConfiguration expectedConfig =
new SmartContractPermissioningConfiguration();
expectedConfig.setNodeSmartContractAddress(
Address.fromHexString("0x0000000000000000000000000000000000001234"));
expectedConfig.setSmartContractNodeAllowlistEnabled(true);
expectedConfig.setNodeSmartContractInterfaceVersion(2);
parseCommand(
"--permissions-nodes-contract-enabled",
"--permissions-nodes-contract-address",
"0x0000000000000000000000000000000000001234",
"--permissions-nodes-contract-version",
"2");
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
final PermissioningConfiguration config =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(config.getSmartContractConfig().get())
.usingRecursiveComparison()
.isEqualTo(expectedConfig);
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsSmartContractWithoutOptionMustError() {
parseCommand("--permissions-accounts-contract-address");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.startsWith(
"Missing required parameter for option '--permissions-accounts-contract-address'");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsEnabledWithoutContractAddressMustError() {
parseCommand("--permissions-accounts-contract-enabled");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.contains("No account permissioning contract address specified");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsEnabledWithInvalidContractAddressMustError() {
parseCommand(
"--permissions-accounts-contract-enabled",
"--permissions-accounts-contract-address",
"invalid-smart-contract-address");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Invalid value");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsEnabledWithTooShortContractAddressMustError() {
parseCommand(
"--permissions-accounts-contract-enabled",
"--permissions-accounts-contract-address",
"0x1234");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Invalid value");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsSmartContractMustUseOption() {
final String smartContractAddress = "0x0000000000000000000000000000000000001234";
parseCommand(
"--permissions-accounts-contract-enabled",
"--permissions-accounts-contract-address",
smartContractAddress);
final SmartContractPermissioningConfiguration smartContractPermissioningConfiguration =
new SmartContractPermissioningConfiguration();
smartContractPermissioningConfiguration.setAccountSmartContractAddress(
Address.fromHexString(smartContractAddress));
smartContractPermissioningConfiguration.setSmartContractAccountAllowlistEnabled(true);
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
final PermissioningConfiguration permissioningConfiguration =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(permissioningConfiguration.getSmartContractConfig()).isPresent();
final SmartContractPermissioningConfiguration effectiveSmartContractConfig =
permissioningConfiguration.getSmartContractConfig().get();
assertThat(effectiveSmartContractConfig.isSmartContractAccountAllowlistEnabled()).isTrue();
assertThat(effectiveSmartContractConfig.getAccountSmartContractAddress())
.isEqualTo(Address.fromHexString(smartContractAddress));
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissioningTomlPathWithoutOptionMustDisplayUsage() {
parseCommand("--permissions-nodes-config-file");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.startsWith("Missing required parameter for option '--permissions-nodes-config-file'");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissioningTomlPathWithoutOptionMustDisplayUsage() {
parseCommand("--permissions-accounts-config-file");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.startsWith("Missing required parameter for option '--permissions-accounts-config-file'");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissioningEnabledWithNonexistentConfigFileMustError() {
parseCommand(
"--permissions-nodes-config-file-enabled",
"--permissions-nodes-config-file",
"file-does-not-exist");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Configuration file does not exist");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissioningEnabledWithNonexistentConfigFileMustError() {
parseCommand(
"--permissions-accounts-config-file-enabled",
"--permissions-accounts-config-file",
"file-does-not-exist");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Configuration file does not exist");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissioningTomlFileWithNoPermissionsEnabledMustNotError() throws IOException {
final URL configFile = this.getClass().getResource(PERMISSIONING_CONFIG_TOML);
final Path permToml = createTempFile("toml", Resources.toByteArray(configFile));
parseCommand("--permissions-nodes-config-file", permToml.toString());
verify(mockRunnerBuilder).build();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissioningTomlFileWithNoPermissionsEnabledMustNotError()
throws IOException {
final URL configFile = this.getClass().getResource(PERMISSIONING_CONFIG_TOML);
final Path permToml = createTempFile("toml", Resources.toByteArray(configFile));
parseCommand("--permissions-accounts-config-file", permToml.toString());
verify(mockRunnerBuilder).build();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void defaultPermissionsTomlFileWithNoPermissionsEnabledMustNotError() {
parseCommand("--p2p-enabled", "false");
verify(mockRunnerBuilder).build();
assertThat(commandErrorOutput.toString(UTF_8)).doesNotContain("no permissions enabled");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissioningTomlPathMustUseOption() throws IOException {
final List<EnodeURL> allowedNodes =
Lists.newArrayList(
EnodeURLImpl.fromString(
"enode://6f8a80d14311c39f35f516fa664deaaaa13e85b2f7493f37f6144d86991ec012937307647bd3b9a82abe2974e1407241d54947bbb39763a4cac9f77166ad92a0@192.168.0.9:4567"),
EnodeURLImpl.fromString(
"enode://6f8a80d14311c39f35f516fa664deaaaa13e85b2f7493f37f6144d86991ec012937307647bd3b9a82abe2974e1407241d54947bbb39763a4cac9f77166ad92a0@192.169.0.9:4568"));
final URL configFile = this.getClass().getResource(PERMISSIONING_CONFIG_TOML);
final Path permToml = createTempFile("toml", Resources.toByteArray(configFile));
final String allowedNodesString =
allowedNodes.stream().map(Object::toString).collect(Collectors.joining(","));
parseCommand(
"--permissions-nodes-config-file-enabled",
"--permissions-nodes-config-file",
permToml.toString(),
"--bootnodes",
allowedNodesString);
final LocalPermissioningConfiguration localPermissioningConfiguration =
LocalPermissioningConfiguration.createDefault();
localPermissioningConfiguration.setNodePermissioningConfigFilePath(permToml.toString());
localPermissioningConfiguration.setNodeAllowlist(allowedNodes);
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
final PermissioningConfiguration config =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(config.getLocalConfig().get())
.usingRecursiveComparison()
.isEqualTo(localPermissioningConfiguration);
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissioningTomlPathMustUseOption() throws IOException {
final URL configFile = this.getClass().getResource(PERMISSIONING_CONFIG_TOML);
final Path permToml = createTempFile("toml", Resources.toByteArray(configFile));
parseCommand(
"--permissions-accounts-config-file-enabled",
"--permissions-accounts-config-file",
permToml.toString());
final LocalPermissioningConfiguration localPermissioningConfiguration =
LocalPermissioningConfiguration.createDefault();
localPermissioningConfiguration.setAccountPermissioningConfigFilePath(permToml.toString());
localPermissioningConfiguration.setAccountAllowlist(
Collections.singletonList("0x0000000000000000000000000000000000000009"));
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
final PermissioningConfiguration permissioningConfiguration =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(permissioningConfiguration.getLocalConfig()).isPresent();
final LocalPermissioningConfiguration effectiveLocalPermissioningConfig =
permissioningConfiguration.getLocalConfig().get();
assertThat(effectiveLocalPermissioningConfig.isAccountAllowlistEnabled()).isTrue();
assertThat(effectiveLocalPermissioningConfig.getAccountPermissioningConfigFilePath())
.isEqualTo(permToml.toString());
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
}

@ -36,8 +36,9 @@ plugins {
sonarqube { sonarqube {
properties { properties {
property "sonar.projectKey", "hyperledger_besu" property "sonar.projectKey", "$System.env.SONAR_PROJECT_KEY"
property "sonar.organization", "hyperledger" property "sonar.organization", "$System.env.SONAR_ORGANIZATION"
property "sonar.gradle.skipCompile", "true"
property "sonar.host.url", "https://sonarcloud.io" property "sonar.host.url", "https://sonarcloud.io"
property "sonar.coverage.jacoco.xmlReportPaths", "${buildDir}/reports/jacoco/jacocoRootReport/jacocoRootReport.xml" property "sonar.coverage.jacoco.xmlReportPaths", "${buildDir}/reports/jacoco/jacocoRootReport/jacocoRootReport.xml"
property "sonar.coverage.exclusions", "acceptance-tests/**/*" property "sonar.coverage.exclusions", "acceptance-tests/**/*"
@ -646,6 +647,8 @@ task autocomplete(type: JavaExec) {
} }
} }
def archiveBuildVersion = project.hasProperty('release.releaseVersion') ? project.property('release.releaseVersion') : "${rootProject.version}"
installDist { dependsOn checkLicense, untunedStartScripts, evmToolStartScripts } installDist { dependsOn checkLicense, untunedStartScripts, evmToolStartScripts }
distTar { distTar {
@ -654,6 +657,7 @@ distTar {
delete fileTree(dir: 'build/distributions', include: '*.tar.gz') delete fileTree(dir: 'build/distributions', include: '*.tar.gz')
} }
compression = Compression.GZIP compression = Compression.GZIP
setVersion(archiveBuildVersion)
archiveExtension = 'tar.gz' archiveExtension = 'tar.gz'
} }
@ -662,6 +666,7 @@ distZip {
doFirst { doFirst {
delete fileTree(dir: 'build/distributions', include: '*.zip') delete fileTree(dir: 'build/distributions', include: '*.zip')
} }
setVersion(archiveBuildVersion)
} }
publishing { publishing {
@ -983,6 +988,12 @@ task checkSpdxHeader(type: CheckSpdxHeader) {
].join("|") ].join("|")
} }
jacocoTestReport {
reports {
xml.enabled true
}
}
task jacocoRootReport(type: org.gradle.testing.jacoco.tasks.JacocoReport) { task jacocoRootReport(type: org.gradle.testing.jacoco.tasks.JacocoReport) {
additionalSourceDirs.from files(subprojects.sourceSets.main.allSource.srcDirs) additionalSourceDirs.from files(subprojects.sourceSets.main.allSource.srcDirs)
sourceDirectories.from files(subprojects.sourceSets.main.allSource.srcDirs) sourceDirectories.from files(subprojects.sourceSets.main.allSource.srcDirs)

@ -242,6 +242,13 @@ public interface GenesisConfigOptions {
*/ */
OptionalLong getCancunTime(); OptionalLong getCancunTime();
/**
* Gets prague time.
*
* @return the prague time
*/
OptionalLong getPragueTime();
/** /**
* Gets future eips time. * Gets future eips time.
* *

@ -28,7 +28,6 @@ import java.util.Optional;
import java.util.OptionalInt; import java.util.OptionalInt;
import java.util.OptionalLong; import java.util.OptionalLong;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.ObjectNode;
@ -292,6 +291,11 @@ public class JsonGenesisConfigOptions implements GenesisConfigOptions {
return getOptionalLong("cancuntime"); return getOptionalLong("cancuntime");
} }
@Override
public OptionalLong getPragueTime() {
return getOptionalLong("praguetime");
}
@Override @Override
public OptionalLong getFutureEipsTime() { public OptionalLong getFutureEipsTime() {
return getOptionalLong("futureeipstime"); return getOptionalLong("futureeipstime");
@ -304,10 +308,7 @@ public class JsonGenesisConfigOptions implements GenesisConfigOptions {
@Override @Override
public Optional<Wei> getBaseFeePerGas() { public Optional<Wei> getBaseFeePerGas() {
return Optional.ofNullable(configOverrides.get("baseFeePerGas")) return Optional.ofNullable(configOverrides.get("baseFeePerGas")).map(Wei::fromHexString);
.map(Wei::fromHexString)
.map(Optional::of)
.orElse(Optional.empty());
} }
@Override @Override
@ -448,6 +449,7 @@ public class JsonGenesisConfigOptions implements GenesisConfigOptions {
getMergeNetSplitBlockNumber().ifPresent(l -> builder.put("mergeNetSplitBlock", l)); getMergeNetSplitBlockNumber().ifPresent(l -> builder.put("mergeNetSplitBlock", l));
getShanghaiTime().ifPresent(l -> builder.put("shanghaiTime", l)); getShanghaiTime().ifPresent(l -> builder.put("shanghaiTime", l));
getCancunTime().ifPresent(l -> builder.put("cancunTime", l)); getCancunTime().ifPresent(l -> builder.put("cancunTime", l));
getPragueTime().ifPresent(l -> builder.put("pragueTime", l));
getTerminalBlockNumber().ifPresent(l -> builder.put("terminalBlockNumber", l)); getTerminalBlockNumber().ifPresent(l -> builder.put("terminalBlockNumber", l));
getTerminalBlockHash().ifPresent(h -> builder.put("terminalBlockHash", h.toHexString())); getTerminalBlockHash().ifPresent(h -> builder.put("terminalBlockHash", h.toHexString()));
getFutureEipsTime().ifPresent(l -> builder.put("futureEipsTime", l)); getFutureEipsTime().ifPresent(l -> builder.put("futureEipsTime", l));
@ -582,14 +584,18 @@ public class JsonGenesisConfigOptions implements GenesisConfigOptions {
.map(OptionalLong::getAsLong) .map(OptionalLong::getAsLong)
.distinct() .distinct()
.sorted() .sorted()
.collect(Collectors.toList()); .toList();
} }
@Override @Override
public List<Long> getForkBlockTimestamps() { public List<Long> getForkBlockTimestamps() {
Stream<OptionalLong> forkBlockTimestamps = Stream<OptionalLong> forkBlockTimestamps =
Stream.of( Stream.of(
getShanghaiTime(), getCancunTime(), getFutureEipsTime(), getExperimentalEipsTime()); getShanghaiTime(),
getCancunTime(),
getPragueTime(),
getFutureEipsTime(),
getExperimentalEipsTime());
// when adding forks add an entry to ${REPO_ROOT}/config/src/test/resources/all_forks.json // when adding forks add an entry to ${REPO_ROOT}/config/src/test/resources/all_forks.json
return forkBlockTimestamps return forkBlockTimestamps
@ -597,6 +603,6 @@ public class JsonGenesisConfigOptions implements GenesisConfigOptions {
.map(OptionalLong::getAsLong) .map(OptionalLong::getAsLong)
.distinct() .distinct()
.sorted() .sorted()
.collect(Collectors.toList()); .toList();
} }
} }

@ -48,6 +48,7 @@ public class StubGenesisConfigOptions implements GenesisConfigOptions, Cloneable
private OptionalLong mergeNetSplitBlockNumber = OptionalLong.empty(); private OptionalLong mergeNetSplitBlockNumber = OptionalLong.empty();
private OptionalLong shanghaiTime = OptionalLong.empty(); private OptionalLong shanghaiTime = OptionalLong.empty();
private OptionalLong cancunTime = OptionalLong.empty(); private OptionalLong cancunTime = OptionalLong.empty();
private OptionalLong pragueTime = OptionalLong.empty();
private OptionalLong futureEipsTime = OptionalLong.empty(); private OptionalLong futureEipsTime = OptionalLong.empty();
private OptionalLong experimentalEipsTime = OptionalLong.empty(); private OptionalLong experimentalEipsTime = OptionalLong.empty();
private OptionalLong terminalBlockNumber = OptionalLong.empty(); private OptionalLong terminalBlockNumber = OptionalLong.empty();
@ -232,6 +233,11 @@ public class StubGenesisConfigOptions implements GenesisConfigOptions, Cloneable
return cancunTime; return cancunTime;
} }
@Override
public OptionalLong getPragueTime() {
return pragueTime;
}
@Override @Override
public OptionalLong getFutureEipsTime() { public OptionalLong getFutureEipsTime() {
return futureEipsTime; return futureEipsTime;
@ -364,6 +370,7 @@ public class StubGenesisConfigOptions implements GenesisConfigOptions, Cloneable
getMergeNetSplitBlockNumber().ifPresent(l -> builder.put("mergeNetSplitBlock", l)); getMergeNetSplitBlockNumber().ifPresent(l -> builder.put("mergeNetSplitBlock", l));
getShanghaiTime().ifPresent(l -> builder.put("shanghaiTime", l)); getShanghaiTime().ifPresent(l -> builder.put("shanghaiTime", l));
getCancunTime().ifPresent(l -> builder.put("cancunTime", l)); getCancunTime().ifPresent(l -> builder.put("cancunTime", l));
getPragueTime().ifPresent(l -> builder.put("pragueTime", l));
getFutureEipsTime().ifPresent(l -> builder.put("futureEipsTime", l)); getFutureEipsTime().ifPresent(l -> builder.put("futureEipsTime", l));
getExperimentalEipsTime().ifPresent(l -> builder.put("experimentalEipsTime", l)); getExperimentalEipsTime().ifPresent(l -> builder.put("experimentalEipsTime", l));
getTerminalBlockNumber().ifPresent(l -> builder.put("terminalBlockNumber", l)); getTerminalBlockNumber().ifPresent(l -> builder.put("terminalBlockNumber", l));
@ -608,6 +615,17 @@ public class StubGenesisConfigOptions implements GenesisConfigOptions, Cloneable
return this; return this;
} }
/**
* Prague time.
*
* @param timestamp the timestamp
* @return the stub genesis config options
*/
public StubGenesisConfigOptions pragueTime(final long timestamp) {
pragueTime = OptionalLong.of(timestamp);
return this;
}
/** /**
* Future EIPs Time block. * Future EIPs Time block.
* *

@ -38,14 +38,14 @@ import org.apache.tuweni.units.bigints.UInt256;
import org.assertj.core.api.ThrowableAssert.ThrowingCallable; import org.assertj.core.api.ThrowableAssert.ThrowingCallable;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
public class GenesisConfigFileTest { class GenesisConfigFileTest {
private static final BigInteger MAINNET_CHAIN_ID = BigInteger.ONE; private static final BigInteger MAINNET_CHAIN_ID = BigInteger.ONE;
private static final BigInteger DEVELOPMENT_CHAIN_ID = BigInteger.valueOf(1337); private static final BigInteger DEVELOPMENT_CHAIN_ID = BigInteger.valueOf(1337);
private static final GenesisConfigFile EMPTY_CONFIG = fromConfig("{}"); private static final GenesisConfigFile EMPTY_CONFIG = fromConfig("{}");
@Test @Test
public void shouldLoadMainnetConfigFile() { void shouldLoadMainnetConfigFile() {
final GenesisConfigFile config = GenesisConfigFile.mainnet(); final GenesisConfigFile config = GenesisConfigFile.mainnet();
// Sanity check some basic properties to confirm this is the mainnet file. // Sanity check some basic properties to confirm this is the mainnet file.
assertThat(config.getConfigOptions().isEthHash()).isTrue(); assertThat(config.getConfigOptions().isEthHash()).isTrue();
@ -58,7 +58,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void shouldLoadDevelopmentConfigFile() { void shouldLoadDevelopmentConfigFile() {
final GenesisConfigFile config = GenesisConfigFile.development(); final GenesisConfigFile config = GenesisConfigFile.development();
// Sanity check some basic properties to confirm this is the dev file. // Sanity check some basic properties to confirm this is the dev file.
assertThat(config.getConfigOptions().isEthHash()).isTrue(); assertThat(config.getConfigOptions().isEthHash()).isTrue();
@ -71,82 +71,82 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void shouldGetParentHash() { void shouldGetParentHash() {
assertThat(configWithProperty("parentHash", "844633").getParentHash()).isEqualTo("844633"); assertThat(configWithProperty("parentHash", "844633").getParentHash()).isEqualTo("844633");
} }
@Test @Test
public void shouldDefaultParentHashToEmptyString() { void shouldDefaultParentHashToEmptyString() {
assertThat(EMPTY_CONFIG.getParentHash()).isEmpty(); assertThat(EMPTY_CONFIG.getParentHash()).isEmpty();
} }
@Test @Test
public void shouldGetDifficulty() { void shouldGetDifficulty() {
assertThat(configWithProperty("difficulty", "1234").getDifficulty()).isEqualTo("1234"); assertThat(configWithProperty("difficulty", "1234").getDifficulty()).isEqualTo("1234");
} }
@Test @Test
public void shouldRequireDifficulty() { void shouldRequireDifficulty() {
assertInvalidConfiguration(EMPTY_CONFIG::getDifficulty); assertInvalidConfiguration(EMPTY_CONFIG::getDifficulty);
} }
@Test @Test
public void shouldGetExtraData() { void shouldGetExtraData() {
assertThat(configWithProperty("extraData", "yay").getExtraData()).isEqualTo("yay"); assertThat(configWithProperty("extraData", "yay").getExtraData()).isEqualTo("yay");
} }
@Test @Test
public void shouldDefaultExtraDataToEmptyString() { void shouldDefaultExtraDataToEmptyString() {
assertThat(EMPTY_CONFIG.getExtraData()).isEmpty(); assertThat(EMPTY_CONFIG.getExtraData()).isEmpty();
} }
@Test @Test
public void shouldGetGasLimit() { void shouldGetGasLimit() {
assertThat(configWithProperty("gasLimit", "1000").getGasLimit()).isEqualTo(1000); assertThat(configWithProperty("gasLimit", "1000").getGasLimit()).isEqualTo(1000);
} }
@Test @Test
public void shouldRequireGasLimit() { void shouldRequireGasLimit() {
assertInvalidConfiguration(EMPTY_CONFIG::getGasLimit); assertInvalidConfiguration(EMPTY_CONFIG::getGasLimit);
} }
@Test @Test
public void shouldGetMixHash() { void shouldGetMixHash() {
assertThat(configWithProperty("mixHash", "asdf").getMixHash()).isEqualTo("asdf"); assertThat(configWithProperty("mixHash", "asdf").getMixHash()).isEqualTo("asdf");
} }
@Test @Test
public void shouldDefaultMixHashToEmptyString() { void shouldDefaultMixHashToEmptyString() {
assertThat(EMPTY_CONFIG.getMixHash()).isEmpty(); assertThat(EMPTY_CONFIG.getMixHash()).isEmpty();
} }
@Test @Test
public void shouldGetNonce() { void shouldGetNonce() {
assertThat(configWithProperty("nonce", "0x10").getNonce()).isEqualTo("0x10"); assertThat(configWithProperty("nonce", "0x10").getNonce()).isEqualTo("0x10");
} }
@Test @Test
public void shouldDefaultNonceToZero() { void shouldDefaultNonceToZero() {
assertThat(EMPTY_CONFIG.getNonce()).isEqualTo("0x0"); assertThat(EMPTY_CONFIG.getNonce()).isEqualTo("0x0");
} }
@Test @Test
public void shouldGetCoinbase() { void shouldGetCoinbase() {
assertThat(configWithProperty("coinbase", "abcd").getCoinbase()).contains("abcd"); assertThat(configWithProperty("coinbase", "abcd").getCoinbase()).contains("abcd");
} }
@Test @Test
public void shouldReturnEmptyWhenCoinbaseNotSpecified() { void shouldReturnEmptyWhenCoinbaseNotSpecified() {
assertThat(EMPTY_CONFIG.getCoinbase()).isEmpty(); assertThat(EMPTY_CONFIG.getCoinbase()).isEmpty();
} }
@Test @Test
public void shouldGetTimestamp() { void shouldGetTimestamp() {
assertThat(configWithProperty("timestamp", "0x10").getTimestamp()).isEqualTo(16L); assertThat(configWithProperty("timestamp", "0x10").getTimestamp()).isEqualTo(16L);
} }
@Test @Test
public void shouldGetBaseFeeAtGenesis() { void shouldGetBaseFeeAtGenesis() {
GenesisConfigFile withBaseFeeAtGenesis = GenesisConfigFile withBaseFeeAtGenesis =
GenesisConfigFile.fromConfig("{\"config\":{\"londonBlock\":0},\"baseFeePerGas\":\"0xa\"}"); GenesisConfigFile.fromConfig("{\"config\":{\"londonBlock\":0},\"baseFeePerGas\":\"0xa\"}");
assertThat(withBaseFeeAtGenesis.getBaseFeePerGas()).isPresent(); assertThat(withBaseFeeAtGenesis.getBaseFeePerGas()).isPresent();
@ -154,7 +154,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void shouldGetDefaultBaseFeeAtGenesis() { void shouldGetDefaultBaseFeeAtGenesis() {
GenesisConfigFile withBaseFeeAtGenesis = GenesisConfigFile withBaseFeeAtGenesis =
GenesisConfigFile.fromConfig("{\"config\":{\"londonBlock\":0}}"); GenesisConfigFile.fromConfig("{\"config\":{\"londonBlock\":0}}");
// no specified baseFeePerGas: // no specified baseFeePerGas:
@ -165,7 +165,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void shouldGetBaseFeeExplicitlyAtGenesis() { void shouldGetBaseFeeExplicitlyAtGenesis() {
GenesisConfigFile withBaseFeeNotAtGenesis = GenesisConfigFile withBaseFeeNotAtGenesis =
GenesisConfigFile.fromConfig("{\"config\":{\"londonBlock\":10},\"baseFeePerGas\":\"0xa\"}"); GenesisConfigFile.fromConfig("{\"config\":{\"londonBlock\":10},\"baseFeePerGas\":\"0xa\"}");
// specified baseFeePerGas: // specified baseFeePerGas:
@ -176,14 +176,14 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void shouldOverrideConfigOptionsBaseFeeWhenSpecified() { void shouldOverrideConfigOptionsBaseFeeWhenSpecified() {
GenesisConfigOptions withOverrides = GenesisConfigOptions withOverrides =
EMPTY_CONFIG.getConfigOptions(Map.of("baseFeePerGas", Wei.of(8).toString())); EMPTY_CONFIG.getConfigOptions(Map.of("baseFeePerGas", Wei.of(8).toString()));
assertThat(withOverrides.getBaseFeePerGas()).contains(Wei.of(8L)); assertThat(withOverrides.getBaseFeePerGas()).contains(Wei.of(8L));
} }
@Test @Test
public void shouldGetTerminalTotalDifficultyAtGenesis() { void shouldGetTerminalTotalDifficultyAtGenesis() {
GenesisConfigFile withTerminalTotalDifficultyAtGenesis = GenesisConfigFile withTerminalTotalDifficultyAtGenesis =
fromConfig("{\"config\":{\"terminalTotalDifficulty\":1000}}"); fromConfig("{\"config\":{\"terminalTotalDifficulty\":1000}}");
assertThat(withTerminalTotalDifficultyAtGenesis.getConfigOptions().getTerminalTotalDifficulty()) assertThat(withTerminalTotalDifficultyAtGenesis.getConfigOptions().getTerminalTotalDifficulty())
@ -191,12 +191,12 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void shouldGetEmptyTerminalTotalDifficultyAtGenesis() { void shouldGetEmptyTerminalTotalDifficultyAtGenesis() {
assertThat(EMPTY_CONFIG.getConfigOptions().getTerminalTotalDifficulty()).isNotPresent(); assertThat(EMPTY_CONFIG.getConfigOptions().getTerminalTotalDifficulty()).isNotPresent();
} }
@Test @Test
public void assertSepoliaTerminalTotalDifficulty() { void assertSepoliaTerminalTotalDifficulty() {
GenesisConfigOptions sepoliaOptions = GenesisConfigOptions sepoliaOptions =
GenesisConfigFile.genesisFileFromResources("/sepolia.json").getConfigOptions(); GenesisConfigFile.genesisFileFromResources("/sepolia.json").getConfigOptions();
@ -206,7 +206,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void assertGoerliTerminalTotalDifficulty() { void assertGoerliTerminalTotalDifficulty() {
GenesisConfigOptions goerliOptions = GenesisConfigOptions goerliOptions =
GenesisConfigFile.genesisFileFromResources("/goerli.json").getConfigOptions(); GenesisConfigFile.genesisFileFromResources("/goerli.json").getConfigOptions();
@ -216,7 +216,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void assertMainnetTerminalTotalDifficulty() { void assertMainnetTerminalTotalDifficulty() {
GenesisConfigOptions mainnetOptions = GenesisConfigOptions mainnetOptions =
GenesisConfigFile.genesisFileFromResources("/mainnet.json").getConfigOptions(); GenesisConfigFile.genesisFileFromResources("/mainnet.json").getConfigOptions();
@ -227,7 +227,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void assertTerminalTotalDifficultyOverride() { void assertTerminalTotalDifficultyOverride() {
GenesisConfigOptions sepoliaOverrideOptions = GenesisConfigOptions sepoliaOverrideOptions =
GenesisConfigFile.genesisFileFromResources("/sepolia.json") GenesisConfigFile.genesisFileFromResources("/sepolia.json")
.getConfigOptions(Map.of("terminalTotalDifficulty", String.valueOf(Long.MAX_VALUE))); .getConfigOptions(Map.of("terminalTotalDifficulty", String.valueOf(Long.MAX_VALUE)));
@ -238,7 +238,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void shouldFindMergeNetSplitForkAndAlias() { void shouldFindMergeNetSplitForkAndAlias() {
GenesisConfigFile mergeNetSplitGenesis = GenesisConfigFile mergeNetSplitGenesis =
GenesisConfigFile.fromConfig( GenesisConfigFile.fromConfig(
"{\"config\":{\"mergeNetsplitBlock\":11},\"baseFeePerGas\":\"0xa\"}"); "{\"config\":{\"mergeNetsplitBlock\":11},\"baseFeePerGas\":\"0xa\"}");
@ -255,12 +255,12 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void shouldDefaultTimestampToZero() { void shouldDefaultTimestampToZero() {
assertThat(EMPTY_CONFIG.getTimestamp()).isZero(); assertThat(EMPTY_CONFIG.getTimestamp()).isZero();
} }
@Test @Test
public void shouldGetAllocations() { void shouldGetAllocations() {
final GenesisConfigFile config = final GenesisConfigFile config =
fromConfig( fromConfig(
"{" "{"
@ -309,13 +309,13 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void shouldGetEmptyAllocationsWhenAllocNotPresent() { void shouldGetEmptyAllocationsWhenAllocNotPresent() {
final GenesisConfigFile config = fromConfig("{}"); final GenesisConfigFile config = fromConfig("{}");
assertThat(config.streamAllocations()).isEmpty(); assertThat(config.streamAllocations()).isEmpty();
} }
@Test @Test
public void shouldGetLargeChainId() { void shouldGetLargeChainId() {
final GenesisConfigFile config = final GenesisConfigFile config =
fromConfig( fromConfig(
"{\"config\": { \"chainId\": 31415926535897932384626433832795028841971693993751058209749445923078164062862089986280348253421170679821480865132823066470938446095 }}"); "{\"config\": { \"chainId\": 31415926535897932384626433832795028841971693993751058209749445923078164062862089986280348253421170679821480865132823066470938446095 }}");
@ -326,7 +326,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void mustNotAcceptComments() { void mustNotAcceptComments() {
assertThatThrownBy( assertThatThrownBy(
() -> () ->
fromConfig( fromConfig(
@ -336,7 +336,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void testOverridePresent() { void testOverridePresent() {
final GenesisConfigFile config = GenesisConfigFile.development(); final GenesisConfigFile config = GenesisConfigFile.development();
final int bigBlock = 999_999_999; final int bigBlock = 999_999_999;
final String bigBlockString = Integer.toString(bigBlock); final String bigBlockString = Integer.toString(bigBlock);
@ -353,7 +353,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void testOverrideNull() { void testOverrideNull() {
final GenesisConfigFile config = GenesisConfigFile.development(); final GenesisConfigFile config = GenesisConfigFile.development();
final Map<String, String> override = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); final Map<String, String> override = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
override.put("istanbulBlock", null); override.put("istanbulBlock", null);
@ -367,7 +367,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void testOverrideCaseInsensitivity() { void testOverrideCaseInsensitivity() {
final GenesisConfigFile config = GenesisConfigFile.development(); final GenesisConfigFile config = GenesisConfigFile.development();
final int bigBlock = 999_999_999; final int bigBlock = 999_999_999;
final String bigBlockString = Integer.toString(bigBlock); final String bigBlockString = Integer.toString(bigBlock);
@ -386,7 +386,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void testOverrideEmptyString() { void testOverrideEmptyString() {
final GenesisConfigFile config = GenesisConfigFile.development(); final GenesisConfigFile config = GenesisConfigFile.development();
final Map<String, String> override = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); final Map<String, String> override = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
override.put("istanbulBlock", ""); override.put("istanbulBlock", "");
@ -399,7 +399,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void testNoOverride() { void testNoOverride() {
final GenesisConfigFile config = GenesisConfigFile.development(); final GenesisConfigFile config = GenesisConfigFile.development();
assertThat(config.getConfigOptions().getLondonBlockNumber()).hasValue(0); assertThat(config.getConfigOptions().getLondonBlockNumber()).hasValue(0);
@ -411,7 +411,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void testConstantinopleFixShouldNotBeSupportedAlongPetersburg() { void testConstantinopleFixShouldNotBeSupportedAlongPetersburg() {
// petersburg node // petersburg node
final GenesisConfigFile config = GenesisConfigFile.genesisFileFromResources("/all_forks.json"); final GenesisConfigFile config = GenesisConfigFile.genesisFileFromResources("/all_forks.json");
@ -428,7 +428,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void shouldLoadForksInSortedOrder() throws IOException { void shouldLoadForksInSortedOrder() throws IOException {
final ObjectNode configNode = final ObjectNode configNode =
new ObjectMapper() new ObjectMapper()
.createObjectNode() .createObjectNode()
@ -449,7 +449,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void shouldLoadForksIgnoreClassicForkBlock() throws IOException { void shouldLoadForksIgnoreClassicForkBlock() throws IOException {
final ObjectNode configNode = final ObjectNode configNode =
new ObjectMapper() new ObjectMapper()
.createObjectNode() .createObjectNode()
@ -469,7 +469,7 @@ public class GenesisConfigFileTest {
} }
@Test @Test
public void shouldLoadForksIgnoreUnexpectedValues() throws IOException { void shouldLoadForksIgnoreUnexpectedValues() throws IOException {
final ObjectNode configNoUnexpectedForks = final ObjectNode configNoUnexpectedForks =
new ObjectMapper() new ObjectMapper()
.createObjectNode() .createObjectNode()
@ -533,7 +533,7 @@ public class GenesisConfigFileTest {
* been case agnostic. * been case agnostic.
*/ */
@Test @Test
public void roundTripForkIdBlocks() throws IOException { void roundTripForkIdBlocks() throws IOException {
final String configText = final String configText =
Resources.toString(Resources.getResource("all_forks.json"), StandardCharsets.UTF_8); Resources.toString(Resources.getResource("all_forks.json"), StandardCharsets.UTF_8);
final ObjectNode genesisNode = JsonUtil.objectNodeFromString(configText); final ObjectNode genesisNode = JsonUtil.objectNodeFromString(configText);

@ -29,30 +29,30 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.tuweni.units.bigints.UInt256; import org.apache.tuweni.units.bigints.UInt256;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
public class GenesisConfigOptionsTest { class GenesisConfigOptionsTest {
@Test @Test
public void shouldUseEthHashWhenEthHashInConfig() { void shouldUseEthHashWhenEthHashInConfig() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("ethash", emptyMap())); final GenesisConfigOptions config = fromConfigOptions(singletonMap("ethash", emptyMap()));
assertThat(config.isEthHash()).isTrue(); assertThat(config.isEthHash()).isTrue();
assertThat(config.getConsensusEngine()).isEqualTo("ethash"); assertThat(config.getConsensusEngine()).isEqualTo("ethash");
} }
@Test @Test
public void shouldNotUseEthHashIfEthHashNotPresent() { void shouldNotUseEthHashIfEthHashNotPresent() {
final GenesisConfigOptions config = fromConfigOptions(emptyMap()); final GenesisConfigOptions config = fromConfigOptions(emptyMap());
assertThat(config.isEthHash()).isFalse(); assertThat(config.isEthHash()).isFalse();
} }
@Test @Test
public void shouldUseIbft2WhenIbft2InConfig() { void shouldUseIbft2WhenIbft2InConfig() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("ibft2", emptyMap())); final GenesisConfigOptions config = fromConfigOptions(singletonMap("ibft2", emptyMap()));
assertThat(config.isIbft2()).isTrue(); assertThat(config.isIbft2()).isTrue();
assertThat(config.isPoa()).isTrue(); assertThat(config.isPoa()).isTrue();
assertThat(config.getConsensusEngine()).isEqualTo("ibft2"); assertThat(config.getConsensusEngine()).isEqualTo("ibft2");
} }
public void shouldUseQbftWhenQbftInConfig() { void shouldUseQbftWhenQbftInConfig() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("qbft", emptyMap())); final GenesisConfigOptions config = fromConfigOptions(singletonMap("qbft", emptyMap()));
assertThat(config.isQbft()).isTrue(); assertThat(config.isQbft()).isTrue();
assertThat(config.isPoa()).isTrue(); assertThat(config.isPoa()).isTrue();
@ -60,7 +60,7 @@ public class GenesisConfigOptionsTest {
} }
@Test @Test
public void shouldUseCliqueWhenCliqueInConfig() { void shouldUseCliqueWhenCliqueInConfig() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("clique", emptyMap())); final GenesisConfigOptions config = fromConfigOptions(singletonMap("clique", emptyMap()));
assertThat(config.isClique()).isTrue(); assertThat(config.isClique()).isTrue();
assertThat(config.isPoa()).isTrue(); assertThat(config.isPoa()).isTrue();
@ -69,7 +69,7 @@ public class GenesisConfigOptionsTest {
} }
@Test @Test
public void shouldNotUseCliqueIfCliqueNotPresent() { void shouldNotUseCliqueIfCliqueNotPresent() {
final GenesisConfigOptions config = fromConfigOptions(emptyMap()); final GenesisConfigOptions config = fromConfigOptions(emptyMap());
assertThat(config.isClique()).isFalse(); assertThat(config.isClique()).isFalse();
assertThat(config.isPoa()).isFalse(); assertThat(config.isPoa()).isFalse();
@ -77,63 +77,63 @@ public class GenesisConfigOptionsTest {
} }
@Test @Test
public void shouldGetHomesteadBlockNumber() { void shouldGetHomesteadBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("homesteadBlock", 1000)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("homesteadBlock", 1000));
assertThat(config.getHomesteadBlockNumber()).hasValue(1000); assertThat(config.getHomesteadBlockNumber()).hasValue(1000);
} }
@Test @Test
public void shouldGetDaoForkBlockNumber() { void shouldGetDaoForkBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("daoForkBlock", 1000)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("daoForkBlock", 1000));
assertThat(config.getDaoForkBlock()).hasValue(1000); assertThat(config.getDaoForkBlock()).hasValue(1000);
} }
@Test @Test
public void shouldNotHaveDaoForkBlockWhenSetToZero() { void shouldNotHaveDaoForkBlockWhenSetToZero() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("daoForkBlock", 0)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("daoForkBlock", 0));
assertThat(config.getDaoForkBlock()).isEmpty(); assertThat(config.getDaoForkBlock()).isEmpty();
} }
@Test @Test
public void shouldGetTangerineWhistleBlockNumber() { void shouldGetTangerineWhistleBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("eip150Block", 1000)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("eip150Block", 1000));
assertThat(config.getTangerineWhistleBlockNumber()).hasValue(1000); assertThat(config.getTangerineWhistleBlockNumber()).hasValue(1000);
} }
@Test @Test
public void shouldGetSpuriousDragonBlockNumber() { void shouldGetSpuriousDragonBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("eip158Block", 1000)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("eip158Block", 1000));
assertThat(config.getSpuriousDragonBlockNumber()).hasValue(1000); assertThat(config.getSpuriousDragonBlockNumber()).hasValue(1000);
} }
@Test @Test
public void shouldGetByzantiumBlockNumber() { void shouldGetByzantiumBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("byzantiumBlock", 1000)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("byzantiumBlock", 1000));
assertThat(config.getByzantiumBlockNumber()).hasValue(1000); assertThat(config.getByzantiumBlockNumber()).hasValue(1000);
} }
@Test @Test
public void shouldGetConstantinopleBlockNumber() { void shouldGetConstantinopleBlockNumber() {
final GenesisConfigOptions config = final GenesisConfigOptions config =
fromConfigOptions(singletonMap("constantinopleBlock", 1000)); fromConfigOptions(singletonMap("constantinopleBlock", 1000));
assertThat(config.getConstantinopleBlockNumber()).hasValue(1000); assertThat(config.getConstantinopleBlockNumber()).hasValue(1000);
} }
@Test @Test
public void shouldGetConstantinopleFixBlockNumber() { void shouldGetConstantinopleFixBlockNumber() {
final GenesisConfigOptions config = final GenesisConfigOptions config =
fromConfigOptions(singletonMap("constantinopleFixBlock", 1000)); fromConfigOptions(singletonMap("constantinopleFixBlock", 1000));
assertThat(config.getPetersburgBlockNumber()).hasValue(1000); assertThat(config.getPetersburgBlockNumber()).hasValue(1000);
} }
@Test @Test
public void shouldGetPetersburgBlockNumber() { void shouldGetPetersburgBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("petersburgBlock", 1000)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("petersburgBlock", 1000));
assertThat(config.getPetersburgBlockNumber()).hasValue(1000); assertThat(config.getPetersburgBlockNumber()).hasValue(1000);
} }
@Test @Test
public void shouldFailWithBothPetersburgAndConstantinopleFixBlockNumber() { void shouldFailWithBothPetersburgAndConstantinopleFixBlockNumber() {
Map<String, Object> configMap = new HashMap<>(); Map<String, Object> configMap = new HashMap<>();
configMap.put("constantinopleFixBlock", 1000); configMap.put("constantinopleFixBlock", 1000);
configMap.put("petersburgBlock", 1000); configMap.put("petersburgBlock", 1000);
@ -145,68 +145,74 @@ public class GenesisConfigOptionsTest {
} }
@Test @Test
public void shouldGetIstanbulBlockNumber() { void shouldGetIstanbulBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("istanbulBlock", 1000)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("istanbulBlock", 1000));
assertThat(config.getIstanbulBlockNumber()).hasValue(1000); assertThat(config.getIstanbulBlockNumber()).hasValue(1000);
} }
@Test @Test
public void shouldGetMuirGlacierBlockNumber() { void shouldGetMuirGlacierBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("muirGlacierBlock", 1000)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("muirGlacierBlock", 1000));
assertThat(config.getMuirGlacierBlockNumber()).hasValue(1000); assertThat(config.getMuirGlacierBlockNumber()).hasValue(1000);
} }
@Test @Test
public void shouldGetBerlinBlockNumber() { void shouldGetBerlinBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("berlinBlock", 1000)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("berlinBlock", 1000));
assertThat(config.getBerlinBlockNumber()).hasValue(1000); assertThat(config.getBerlinBlockNumber()).hasValue(1000);
} }
@Test @Test
public void shouldGetLondonBlockNumber() { void shouldGetLondonBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("londonblock", 1000)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("londonblock", 1000));
assertThat(config.getLondonBlockNumber()).hasValue(1000); assertThat(config.getLondonBlockNumber()).hasValue(1000);
} }
@Test @Test
public void shouldGetArrowGlacierBlockNumber() { void shouldGetArrowGlacierBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("arrowGlacierBlock", 1000)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("arrowGlacierBlock", 1000));
assertThat(config.getArrowGlacierBlockNumber()).hasValue(1000); assertThat(config.getArrowGlacierBlockNumber()).hasValue(1000);
} }
@Test @Test
public void shouldGetGrayGlacierBlockNumber() { void shouldGetGrayGlacierBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("grayGlacierBlock", 4242)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("grayGlacierBlock", 4242));
assertThat(config.getGrayGlacierBlockNumber()).hasValue(4242); assertThat(config.getGrayGlacierBlockNumber()).hasValue(4242);
} }
@Test @Test
public void shouldGetShanghaiTime() { void shouldGetShanghaiTime() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("shanghaiTime", 1670470141)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("shanghaiTime", 1670470141));
assertThat(config.getShanghaiTime()).hasValue(1670470141); assertThat(config.getShanghaiTime()).hasValue(1670470141);
} }
@Test @Test
public void shouldGetCancunTime() { void shouldGetCancunTime() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("cancunTime", 1670470142)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("cancunTime", 1670470142));
assertThat(config.getCancunTime()).hasValue(1670470142); assertThat(config.getCancunTime()).hasValue(1670470142);
} }
@Test @Test
public void shouldGetFutureEipsTime() { void shouldGetPragueTime() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("pragueTime", 1670470143));
assertThat(config.getPragueTime()).hasValue(1670470143);
}
@Test
void shouldGetFutureEipsTime() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("futureEipsTime", 1337)); final GenesisConfigOptions config = fromConfigOptions(singletonMap("futureEipsTime", 1337));
assertThat(config.getFutureEipsTime()).hasValue(1337); assertThat(config.getFutureEipsTime()).hasValue(1337);
} }
@Test @Test
public void shouldGetExperimentalEipsTime() { void shouldGetExperimentalEipsTime() {
final GenesisConfigOptions config = final GenesisConfigOptions config =
fromConfigOptions(singletonMap("experimentalEipsTime", 1337)); fromConfigOptions(singletonMap("experimentalEipsTime", 1337));
assertThat(config.getExperimentalEipsTime()).hasValue(1337); assertThat(config.getExperimentalEipsTime()).hasValue(1337);
} }
@Test @Test
public void shouldNotReturnEmptyOptionalWhenBlockNumberNotSpecified() { void shouldNotReturnEmptyOptionalWhenBlockNumberNotSpecified() {
final GenesisConfigOptions config = fromConfigOptions(emptyMap()); final GenesisConfigOptions config = fromConfigOptions(emptyMap());
assertThat(config.getHomesteadBlockNumber()).isEmpty(); assertThat(config.getHomesteadBlockNumber()).isEmpty();
assertThat(config.getDaoForkBlock()).isEmpty(); assertThat(config.getDaoForkBlock()).isEmpty();
@ -224,19 +230,20 @@ public class GenesisConfigOptionsTest {
assertThat(config.getMergeNetSplitBlockNumber()).isEmpty(); assertThat(config.getMergeNetSplitBlockNumber()).isEmpty();
assertThat(config.getShanghaiTime()).isEmpty(); assertThat(config.getShanghaiTime()).isEmpty();
assertThat(config.getCancunTime()).isEmpty(); assertThat(config.getCancunTime()).isEmpty();
assertThat(config.getPragueTime()).isEmpty();
assertThat(config.getFutureEipsTime()).isEmpty(); assertThat(config.getFutureEipsTime()).isEmpty();
assertThat(config.getExperimentalEipsTime()).isEmpty(); assertThat(config.getExperimentalEipsTime()).isEmpty();
} }
@Test @Test
public void shouldGetChainIdWhenSpecified() { void shouldGetChainIdWhenSpecified() {
final GenesisConfigOptions config = final GenesisConfigOptions config =
fromConfigOptions(singletonMap("chainId", BigInteger.valueOf(32))); fromConfigOptions(singletonMap("chainId", BigInteger.valueOf(32)));
assertThat(config.getChainId()).hasValue(BigInteger.valueOf(32)); assertThat(config.getChainId()).hasValue(BigInteger.valueOf(32));
} }
@Test @Test
public void shouldSupportEmptyGenesisConfig() { void shouldSupportEmptyGenesisConfig() {
final GenesisConfigOptions config = GenesisConfigFile.fromConfig("{}").getConfigOptions(); final GenesisConfigOptions config = GenesisConfigFile.fromConfig("{}").getConfigOptions();
assertThat(config.isEthHash()).isFalse(); assertThat(config.isEthHash()).isFalse();
assertThat(config.isClique()).isFalse(); assertThat(config.isClique()).isFalse();
@ -245,7 +252,7 @@ public class GenesisConfigOptionsTest {
} }
@Test @Test
public void shouldGetTerminalTotalDifficultyWhenSpecified() { void shouldGetTerminalTotalDifficultyWhenSpecified() {
final GenesisConfigOptions config = final GenesisConfigOptions config =
fromConfigOptions(singletonMap("terminalTotalDifficulty", BigInteger.valueOf(1000))); fromConfigOptions(singletonMap("terminalTotalDifficulty", BigInteger.valueOf(1000)));
assertThat(config.getTerminalTotalDifficulty()).isPresent(); assertThat(config.getTerminalTotalDifficulty()).isPresent();
@ -259,7 +266,7 @@ public class GenesisConfigOptionsTest {
} }
@Test @Test
public void shouldNotReturnTerminalTotalDifficultyWhenNotSpecified() { void shouldNotReturnTerminalTotalDifficultyWhenNotSpecified() {
final GenesisConfigOptions config = fromConfigOptions(emptyMap()); final GenesisConfigOptions config = fromConfigOptions(emptyMap());
assertThat(config.getTerminalTotalDifficulty()).isNotPresent(); assertThat(config.getTerminalTotalDifficulty()).isNotPresent();
// stubJsonGenesis // stubJsonGenesis
@ -267,28 +274,28 @@ public class GenesisConfigOptionsTest {
} }
@Test @Test
public void isZeroBaseFeeShouldDefaultToFalse() { void isZeroBaseFeeShouldDefaultToFalse() {
final GenesisConfigOptions config = GenesisConfigFile.fromConfig("{}").getConfigOptions(); final GenesisConfigOptions config = GenesisConfigFile.fromConfig("{}").getConfigOptions();
assertThat(config.isZeroBaseFee()).isFalse(); assertThat(config.isZeroBaseFee()).isFalse();
} }
@Test @Test
public void isZeroBaseFeeParsedCorrectly() { void isZeroBaseFeeParsedCorrectly() {
final GenesisConfigOptions config = fromConfigOptions(Map.of("zerobasefee", true)); final GenesisConfigOptions config = fromConfigOptions(Map.of("zerobasefee", true));
assertThat(config.isZeroBaseFee()).isTrue(); assertThat(config.isZeroBaseFee()).isTrue();
} }
@Test @Test
public void asMapIncludesZeroBaseFee() { void asMapIncludesZeroBaseFee() {
final GenesisConfigOptions config = fromConfigOptions(Map.of("zerobasefee", true)); final GenesisConfigOptions config = fromConfigOptions(Map.of("zerobasefee", true));
assertThat(config.asMap()).containsOnlyKeys("zeroBaseFee").containsValue(true); assertThat(config.asMap()).containsOnlyKeys("zeroBaseFee").containsValue(true);
} }
@Test @Test
public void shouldGetDepositContractAddress() { void shouldGetDepositContractAddress() {
final GenesisConfigOptions config = final GenesisConfigOptions config =
fromConfigOptions( fromConfigOptions(
singletonMap("depositContractAddress", "0x00000000219ab540356cbb839cbe05303d7705fa")); singletonMap("depositContractAddress", "0x00000000219ab540356cbb839cbe05303d7705fa"));
@ -297,13 +304,13 @@ public class GenesisConfigOptionsTest {
} }
@Test @Test
public void shouldNotHaveDepositContractAddressWhenEmpty() { void shouldNotHaveDepositContractAddressWhenEmpty() {
final GenesisConfigOptions config = fromConfigOptions(emptyMap()); final GenesisConfigOptions config = fromConfigOptions(emptyMap());
assertThat(config.getDepositContractAddress()).isEmpty(); assertThat(config.getDepositContractAddress()).isEmpty();
} }
@Test @Test
public void asMapIncludesDepositContractAddress() { void asMapIncludesDepositContractAddress() {
final GenesisConfigOptions config = fromConfigOptions(Map.of("depositContractAddress", "0x0")); final GenesisConfigOptions config = fromConfigOptions(Map.of("depositContractAddress", "0x0"));
assertThat(config.asMap()) assertThat(config.asMap())

@ -16,6 +16,7 @@
"mergeNetSplitBlock": 14, "mergeNetSplitBlock": 14,
"shanghaiTime": 15, "shanghaiTime": 15,
"cancunTime": 16, "cancunTime": 16,
"pragueTime": 17,
"futureEipsTime": 98, "futureEipsTime": 98,
"experimentalEipsTime": 99, "experimentalEipsTime": 99,
"ecip1015Block": 102, "ecip1015Block": 102,

@ -53,6 +53,15 @@ public class QbftGetValidatorsByBlockNumber extends AbstractBlockParameterMethod
return request.getRequiredParameter(0, BlockParameter.class); return request.getRequiredParameter(0, BlockParameter.class);
} }
@Override
protected Object pendingResult(final JsonRpcRequestContext request) {
final BlockHeader blockHeader = getBlockchainQueries().headBlockHeader();
LOG.trace("Received RPC rpcName={} block={}", getName(), blockHeader.getNumber());
return validatorProvider.getValidatorsAfterBlock(blockHeader).stream()
.map(Address::toString)
.collect(Collectors.toList());
}
@Override @Override
protected Object resultByBlockNumber( protected Object resultByBlockNumber(
final JsonRpcRequestContext request, final long blockNumber) { final JsonRpcRequestContext request, final long blockNumber) {

@ -23,6 +23,7 @@ import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequest; import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequest;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequestContext; import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequestContext;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.parameters.BlockParameter; import org.hyperledger.besu.ethereum.api.jsonrpc.internal.parameters.BlockParameter;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcSuccessResponse;
import org.hyperledger.besu.ethereum.api.query.BlockchainQueries; import org.hyperledger.besu.ethereum.api.query.BlockchainQueries;
import org.hyperledger.besu.ethereum.core.BlockHeader; import org.hyperledger.besu.ethereum.core.BlockHeader;
@ -73,4 +74,33 @@ public class QbftGetValidatorsByBlockNumberTest {
Object result = method.resultByBlockNumber(request, 12); Object result = method.resultByBlockNumber(request, 12);
assertThat(result).isEqualTo(expectedOutput); assertThat(result).isEqualTo(expectedOutput);
} }
@Test
public void shouldReturnListOfValidatorsFromLatestBlock() {
request =
new JsonRpcRequestContext(
new JsonRpcRequest("2.0", "qbft_getValidatorsByBlockNumber", new String[] {"latest"}));
when(blockchainQueries.headBlockNumber()).thenReturn(12L);
when(blockchainQueries.getBlockHeaderByNumber(12)).thenReturn(Optional.of(blockHeader));
final List<Address> addresses = Collections.singletonList(Address.ID);
final List<String> expectedOutput = Collections.singletonList(Address.ID.toString());
when(validatorProvider.getValidatorsForBlock(any())).thenReturn(addresses);
Object result = method.response(request);
assertThat(result).isInstanceOf(JsonRpcSuccessResponse.class);
assertThat(((JsonRpcSuccessResponse) result).getResult()).isEqualTo(expectedOutput);
}
@Test
public void shouldReturnListOfValidatorsFromPendingBlock() {
request =
new JsonRpcRequestContext(
new JsonRpcRequest("2.0", "qbft_getValidatorsByBlockNumber", new String[] {"pending"}));
when(blockchainQueries.headBlockHeader()).thenReturn(blockHeader);
final List<Address> addresses = Collections.singletonList(Address.ID);
final List<String> expectedOutput = Collections.singletonList(Address.ID.toString());
when(validatorProvider.getValidatorsAfterBlock(any())).thenReturn(addresses);
Object result = method.response(request);
assertThat(result).isInstanceOf(JsonRpcSuccessResponse.class);
assertThat(((JsonRpcSuccessResponse) result).getResult()).isEqualTo(expectedOutput);
}
} }

@ -168,3 +168,29 @@ tasks.register('generateTestBlockchain') {
} }
} }
test.dependsOn(generateTestBlockchain) test.dependsOn(generateTestBlockchain)
/*
Utility tasks used to separate out long running suites of tests so they can be parallelized in CI
*/
tasks.register("testBonsai", Test) {
useJUnitPlatform()
filter {
includeTestsMatching("org.hyperledger.besu.ethereum.api.jsonrpc.bonsai.*")
}
dependsOn(generateTestBlockchain)
}
tasks.register("testForest", Test) {
useJUnitPlatform()
filter {
includeTestsMatching("org.hyperledger.besu.ethereum.api.jsonrpc.forest.*")
}
dependsOn(generateTestBlockchain)
}
tasks.register("testRemainder", Test) {
useJUnitPlatform()
filter {
excludeTestsMatching("org.hyperledger.besu.ethereum.api.jsonrpc.bonsai.*")
excludeTestsMatching("org.hyperledger.besu.ethereum.api.jsonrpc.forest.*")
}
}

@ -281,7 +281,7 @@ public final class GenesisState {
if (shanghaiTimestamp.isPresent()) { if (shanghaiTimestamp.isPresent()) {
return genesis.getTimestamp() >= shanghaiTimestamp.getAsLong(); return genesis.getTimestamp() >= shanghaiTimestamp.getAsLong();
} }
return false; return isCancunAtGenesis(genesis);
} }
private static boolean isCancunAtGenesis(final GenesisConfigFile genesis) { private static boolean isCancunAtGenesis(final GenesisConfigFile genesis) {
@ -289,7 +289,23 @@ public final class GenesisState {
if (cancunTimestamp.isPresent()) { if (cancunTimestamp.isPresent()) {
return genesis.getTimestamp() >= cancunTimestamp.getAsLong(); return genesis.getTimestamp() >= cancunTimestamp.getAsLong();
} }
return false; return isPragueAtGenesis(genesis);
}
private static boolean isPragueAtGenesis(final GenesisConfigFile genesis) {
final OptionalLong pragueTimestamp = genesis.getConfigOptions().getPragueTime();
if (pragueTimestamp.isPresent()) {
return genesis.getTimestamp() >= pragueTimestamp.getAsLong();
}
return isFutureEipsTimeAtGenesis(genesis);
}
private static boolean isFutureEipsTimeAtGenesis(final GenesisConfigFile genesis) {
final OptionalLong futureEipsTime = genesis.getConfigOptions().getFutureEipsTime();
if (futureEipsTime.isPresent()) {
return genesis.getTimestamp() >= futureEipsTime.getAsLong();
}
return isExperimentalEipsTimeAtGenesis(genesis);
} }
private static boolean isExperimentalEipsTimeAtGenesis(final GenesisConfigFile genesis) { private static boolean isExperimentalEipsTimeAtGenesis(final GenesisConfigFile genesis) {

@ -22,6 +22,7 @@ import static org.hyperledger.besu.evm.precompile.MainnetPrecompiledContracts.po
import static org.hyperledger.besu.evm.precompile.MainnetPrecompiledContracts.populateForFrontier; import static org.hyperledger.besu.evm.precompile.MainnetPrecompiledContracts.populateForFrontier;
import static org.hyperledger.besu.evm.precompile.MainnetPrecompiledContracts.populateForFutureEIPs; import static org.hyperledger.besu.evm.precompile.MainnetPrecompiledContracts.populateForFutureEIPs;
import static org.hyperledger.besu.evm.precompile.MainnetPrecompiledContracts.populateForIstanbul; import static org.hyperledger.besu.evm.precompile.MainnetPrecompiledContracts.populateForIstanbul;
import static org.hyperledger.besu.evm.precompile.MainnetPrecompiledContracts.populateForPrague;
import org.hyperledger.besu.ethereum.mainnet.precompiles.privacy.FlexiblePrivacyPrecompiledContract; import org.hyperledger.besu.ethereum.mainnet.precompiles.privacy.FlexiblePrivacyPrecompiledContract;
import org.hyperledger.besu.ethereum.mainnet.precompiles.privacy.PrivacyPluginPrecompiledContract; import org.hyperledger.besu.ethereum.mainnet.precompiles.privacy.PrivacyPluginPrecompiledContract;
@ -59,6 +60,13 @@ public interface MainnetPrecompiledContractRegistries {
return registry; return registry;
} }
static PrecompileContractRegistry prague(
final PrecompiledContractConfiguration precompiledContractConfiguration) {
final PrecompileContractRegistry registry = new PrecompileContractRegistry();
populateForPrague(registry, precompiledContractConfiguration.getGasCalculator());
return registry;
}
static PrecompileContractRegistry futureEips( static PrecompileContractRegistry futureEips(
final PrecompiledContractConfiguration precompiledContractConfiguration) { final PrecompiledContractConfiguration precompiledContractConfiguration) {
final PrecompileContractRegistry registry = new PrecompileContractRegistry(); final PrecompileContractRegistry registry = new PrecompileContractRegistry();

@ -168,6 +168,16 @@ public class MainnetProtocolSpecFactory {
evmConfiguration); evmConfiguration);
} }
public ProtocolSpecBuilder pragueDefinition(final GenesisConfigOptions genesisConfigOptions) {
return MainnetProtocolSpecs.pragueDefinition(
chainId,
contractSizeLimit,
evmStackSize,
isRevertReasonEnabled,
genesisConfigOptions,
evmConfiguration);
}
/** /**
* The "future" fork consists of EIPs that have been approved for Ethereum Mainnet but not * The "future" fork consists of EIPs that have been approved for Ethereum Mainnet but not
* scheduled for a fork. This is also known as "Eligible For Inclusion" (EFI) or "Considered for * scheduled for a fork. This is also known as "Eligible For Inclusion" (EFI) or "Considered for

@ -51,6 +51,7 @@ import org.hyperledger.besu.evm.gascalculator.HomesteadGasCalculator;
import org.hyperledger.besu.evm.gascalculator.IstanbulGasCalculator; import org.hyperledger.besu.evm.gascalculator.IstanbulGasCalculator;
import org.hyperledger.besu.evm.gascalculator.LondonGasCalculator; import org.hyperledger.besu.evm.gascalculator.LondonGasCalculator;
import org.hyperledger.besu.evm.gascalculator.PetersburgGasCalculator; import org.hyperledger.besu.evm.gascalculator.PetersburgGasCalculator;
import org.hyperledger.besu.evm.gascalculator.PragueGasCalculator;
import org.hyperledger.besu.evm.gascalculator.ShanghaiGasCalculator; import org.hyperledger.besu.evm.gascalculator.ShanghaiGasCalculator;
import org.hyperledger.besu.evm.gascalculator.SpuriousDragonGasCalculator; import org.hyperledger.besu.evm.gascalculator.SpuriousDragonGasCalculator;
import org.hyperledger.besu.evm.gascalculator.TangerineWhistleGasCalculator; import org.hyperledger.besu.evm.gascalculator.TangerineWhistleGasCalculator;
@ -661,7 +662,7 @@ public abstract class MainnetProtocolSpecs {
feeMarket -> feeMarket ->
new CancunTargetingGasLimitCalculator( new CancunTargetingGasLimitCalculator(
londonForkBlockNumber, (BaseFeeMarket) feeMarket)) londonForkBlockNumber, (BaseFeeMarket) feeMarket))
// EVM changes to support EOF EIPs (3670, 4200, 4750, 5450) // EVM changes to support EIP-1153: TSTORE and EIP-5656: MCOPY
.evmBuilder( .evmBuilder(
(gasCalculator, jdCacheConfig) -> (gasCalculator, jdCacheConfig) ->
MainnetEVMs.cancun( MainnetEVMs.cancun(
@ -703,7 +704,7 @@ public abstract class MainnetProtocolSpecs {
.name("Cancun"); .name("Cancun");
} }
static ProtocolSpecBuilder futureEipsDefinition( static ProtocolSpecBuilder pragueDefinition(
final Optional<BigInteger> chainId, final Optional<BigInteger> chainId,
final OptionalInt configContractSizeLimit, final OptionalInt configContractSizeLimit,
final OptionalInt configStackSizeLimit, final OptionalInt configStackSizeLimit,
@ -712,6 +713,7 @@ public abstract class MainnetProtocolSpecs {
final EvmConfiguration evmConfiguration) { final EvmConfiguration evmConfiguration) {
final int contractSizeLimit = final int contractSizeLimit =
configContractSizeLimit.orElse(SPURIOUS_DRAGON_CONTRACT_SIZE_LIMIT); configContractSizeLimit.orElse(SPURIOUS_DRAGON_CONTRACT_SIZE_LIMIT);
return cancunDefinition( return cancunDefinition(
chainId, chainId,
configContractSizeLimit, configContractSizeLimit,
@ -719,6 +721,44 @@ public abstract class MainnetProtocolSpecs {
enableRevertReason, enableRevertReason,
genesisConfigOptions, genesisConfigOptions,
evmConfiguration) evmConfiguration)
// EVM changes to support EOF EIPs (3670, 4200, 4750, 5450)
.gasCalculator(PragueGasCalculator::new)
.evmBuilder(
(gasCalculator, jdCacheConfig) ->
MainnetEVMs.prague(
gasCalculator, chainId.orElse(BigInteger.ZERO), evmConfiguration))
// change contract call creator to accept EOF code
.contractCreationProcessorBuilder(
(gasCalculator, evm) ->
new ContractCreationProcessor(
gasCalculator,
evm,
true,
List.of(
MaxCodeSizeRule.of(contractSizeLimit), EOFValidationCodeRule.of(1, false)),
1,
SPURIOUS_DRAGON_FORCE_DELETE_WHEN_EMPTY_ADDRESSES))
// use prague precompiled contracts
.precompileContractRegistryBuilder(MainnetPrecompiledContractRegistries::prague)
.name("Prague");
}
static ProtocolSpecBuilder futureEipsDefinition(
final Optional<BigInteger> chainId,
final OptionalInt configContractSizeLimit,
final OptionalInt configStackSizeLimit,
final boolean enableRevertReason,
final GenesisConfigOptions genesisConfigOptions,
final EvmConfiguration evmConfiguration) {
final int contractSizeLimit =
configContractSizeLimit.orElse(SPURIOUS_DRAGON_CONTRACT_SIZE_LIMIT);
return pragueDefinition(
chainId,
configContractSizeLimit,
configStackSizeLimit,
enableRevertReason,
genesisConfigOptions,
evmConfiguration)
// Use Future EIP configured EVM // Use Future EIP configured EVM
.evmBuilder( .evmBuilder(
(gasCalculator, jdCacheConfig) -> (gasCalculator, jdCacheConfig) ->

@ -235,6 +235,7 @@ public class ProtocolScheduleBuilder {
// Begin timestamp forks // Begin timestamp forks
lastForkBlock = validateForkOrder("Shanghai", config.getShanghaiTime(), lastForkBlock); lastForkBlock = validateForkOrder("Shanghai", config.getShanghaiTime(), lastForkBlock);
lastForkBlock = validateForkOrder("Cancun", config.getCancunTime(), lastForkBlock); lastForkBlock = validateForkOrder("Cancun", config.getCancunTime(), lastForkBlock);
lastForkBlock = validateForkOrder("Prague", config.getPragueTime(), lastForkBlock);
lastForkBlock = validateForkOrder("FutureEips", config.getFutureEipsTime(), lastForkBlock); lastForkBlock = validateForkOrder("FutureEips", config.getFutureEipsTime(), lastForkBlock);
lastForkBlock = lastForkBlock =
validateForkOrder("ExperimentalEips", config.getExperimentalEipsTime(), lastForkBlock); validateForkOrder("ExperimentalEips", config.getExperimentalEipsTime(), lastForkBlock);
@ -313,6 +314,7 @@ public class ProtocolScheduleBuilder {
// Timestamp Forks // Timestamp Forks
timestampMilestone(config.getShanghaiTime(), specFactory.shanghaiDefinition(config)), timestampMilestone(config.getShanghaiTime(), specFactory.shanghaiDefinition(config)),
timestampMilestone(config.getCancunTime(), specFactory.cancunDefinition(config)), timestampMilestone(config.getCancunTime(), specFactory.cancunDefinition(config)),
timestampMilestone(config.getPragueTime(), specFactory.pragueDefinition(config)),
timestampMilestone(config.getFutureEipsTime(), specFactory.futureEipsDefinition(config)), timestampMilestone(config.getFutureEipsTime(), specFactory.futureEipsDefinition(config)),
timestampMilestone( timestampMilestone(
config.getExperimentalEipsTime(), specFactory.experimentalEipsDefinition(config)), config.getExperimentalEipsTime(), specFactory.experimentalEipsDefinition(config)),

@ -23,15 +23,15 @@ import org.bouncycastle.util.Arrays;
public enum KeyValueSegmentIdentifier implements SegmentIdentifier { public enum KeyValueSegmentIdentifier implements SegmentIdentifier {
DEFAULT("default".getBytes(StandardCharsets.UTF_8)), DEFAULT("default".getBytes(StandardCharsets.UTF_8)),
BLOCKCHAIN(new byte[] {1}, true, true), BLOCKCHAIN(new byte[] {1}, true, true),
WORLD_STATE(new byte[] {2}, new int[] {0, 1}, false, true), WORLD_STATE(new byte[] {2}, new int[] {0, 1}, false, true, false),
PRIVATE_TRANSACTIONS(new byte[] {3}), PRIVATE_TRANSACTIONS(new byte[] {3}),
PRIVATE_STATE(new byte[] {4}), PRIVATE_STATE(new byte[] {4}),
PRUNING_STATE(new byte[] {5}, new int[] {0, 1}), PRUNING_STATE(new byte[] {5}, new int[] {0, 1}),
ACCOUNT_INFO_STATE(new byte[] {6}, new int[] {2}, false, true), ACCOUNT_INFO_STATE(new byte[] {6}, new int[] {2}, false, true, false),
CODE_STORAGE(new byte[] {7}, new int[] {2}), CODE_STORAGE(new byte[] {7}, new int[] {2}),
ACCOUNT_STORAGE_STORAGE(new byte[] {8}, new int[] {2}, false, true), ACCOUNT_STORAGE_STORAGE(new byte[] {8}, new int[] {2}, false, true, false),
TRIE_BRANCH_STORAGE(new byte[] {9}, new int[] {2}, false, true), TRIE_BRANCH_STORAGE(new byte[] {9}, new int[] {2}, false, true, false),
TRIE_LOG_STORAGE(new byte[] {10}, new int[] {2}), TRIE_LOG_STORAGE(new byte[] {10}, new int[] {2}, true, false, true),
VARIABLES(new byte[] {11}), // formerly GOQUORUM_PRIVATE_WORLD_STATE VARIABLES(new byte[] {11}), // formerly GOQUORUM_PRIVATE_WORLD_STATE
// previously supported GoQuorum private states // previously supported GoQuorum private states
@ -49,6 +49,7 @@ public enum KeyValueSegmentIdentifier implements SegmentIdentifier {
private final int[] versionList; private final int[] versionList;
private final boolean containsStaticData; private final boolean containsStaticData;
private final boolean eligibleToHighSpecFlag; private final boolean eligibleToHighSpecFlag;
private final boolean staticDataGarbageCollectionEnabled;
KeyValueSegmentIdentifier(final byte[] id) { KeyValueSegmentIdentifier(final byte[] id) {
this(id, new int[] {0, 1, 2}); this(id, new int[] {0, 1, 2});
@ -56,22 +57,24 @@ public enum KeyValueSegmentIdentifier implements SegmentIdentifier {
KeyValueSegmentIdentifier( KeyValueSegmentIdentifier(
final byte[] id, final boolean containsStaticData, final boolean eligibleToHighSpecFlag) { final byte[] id, final boolean containsStaticData, final boolean eligibleToHighSpecFlag) {
this(id, new int[] {0, 1, 2}, containsStaticData, eligibleToHighSpecFlag); this(id, new int[] {0, 1, 2}, containsStaticData, eligibleToHighSpecFlag, false);
} }
KeyValueSegmentIdentifier(final byte[] id, final int[] versionList) { KeyValueSegmentIdentifier(final byte[] id, final int[] versionList) {
this(id, versionList, false, false); this(id, versionList, false, false, false);
} }
KeyValueSegmentIdentifier( KeyValueSegmentIdentifier(
final byte[] id, final byte[] id,
final int[] versionList, final int[] versionList,
final boolean containsStaticData, final boolean containsStaticData,
final boolean eligibleToHighSpecFlag) { final boolean eligibleToHighSpecFlag,
final boolean staticDataGarbageCollectionEnabled) {
this.id = id; this.id = id;
this.versionList = versionList; this.versionList = versionList;
this.containsStaticData = containsStaticData; this.containsStaticData = containsStaticData;
this.eligibleToHighSpecFlag = eligibleToHighSpecFlag; this.eligibleToHighSpecFlag = eligibleToHighSpecFlag;
this.staticDataGarbageCollectionEnabled = staticDataGarbageCollectionEnabled;
} }
@Override @Override
@ -94,6 +97,11 @@ public enum KeyValueSegmentIdentifier implements SegmentIdentifier {
return eligibleToHighSpecFlag; return eligibleToHighSpecFlag;
} }
@Override
public boolean isStaticDataGarbageCollectionEnabled() {
return staticDataGarbageCollectionEnabled;
}
@Override @Override
public boolean includeInDatabaseVersion(final int version) { public boolean includeInDatabaseVersion(final int version) {
return Arrays.contains(versionList, version); return Arrays.contains(versionList, version);

@ -74,6 +74,7 @@ public class BonsaiWorldStateUpdateAccumulator
private final Map<Address, StorageConsumingMap<StorageSlotKey, BonsaiValue<UInt256>>> private final Map<Address, StorageConsumingMap<StorageSlotKey, BonsaiValue<UInt256>>>
storageToUpdate = new ConcurrentHashMap<>(); storageToUpdate = new ConcurrentHashMap<>();
private final Map<UInt256, Hash> storageKeyHashLookup = new ConcurrentHashMap<>();
protected boolean isAccumulatorStateChanged; protected boolean isAccumulatorStateChanged;
public BonsaiWorldStateUpdateAccumulator( public BonsaiWorldStateUpdateAccumulator(
@ -142,7 +143,7 @@ public class BonsaiWorldStateUpdateAccumulator
new BonsaiAccount( new BonsaiAccount(
this, this,
address, address,
hashAndSavePreImage(address), hashAndSaveAccountPreImage(address),
nonce, nonce,
balance, balance,
Hash.EMPTY_TRIE_HASH, Hash.EMPTY_TRIE_HASH,
@ -364,11 +365,11 @@ public class BonsaiWorldStateUpdateAccumulator
entries.forEach( entries.forEach(
storageUpdate -> { storageUpdate -> {
final UInt256 keyUInt = storageUpdate.getKey(); final UInt256 keyUInt = storageUpdate.getKey();
final Hash slotHash = hashAndSavePreImage(keyUInt);
final StorageSlotKey slotKey = final StorageSlotKey slotKey =
new StorageSlotKey(slotHash, Optional.of(keyUInt)); new StorageSlotKey(hashAndSaveSlotPreImage(keyUInt), Optional.of(keyUInt));
final UInt256 value = storageUpdate.getValue(); final UInt256 value = storageUpdate.getValue();
final BonsaiValue<UInt256> pendingValue = pendingStorageUpdates.get(slotKey); final BonsaiValue<UInt256> pendingValue = pendingStorageUpdates.get(slotKey);
if (pendingValue == null) { if (pendingValue == null) {
pendingStorageUpdates.put( pendingStorageUpdates.put(
slotKey, slotKey,
@ -409,7 +410,7 @@ public class BonsaiWorldStateUpdateAccumulator
@Override @Override
public UInt256 getStorageValue(final Address address, final UInt256 slotKey) { public UInt256 getStorageValue(final Address address, final UInt256 slotKey) {
StorageSlotKey storageSlotKey = StorageSlotKey storageSlotKey =
new StorageSlotKey(hashAndSavePreImage(slotKey), Optional.of(slotKey)); new StorageSlotKey(hashAndSaveSlotPreImage(slotKey), Optional.of(slotKey));
return getStorageValueByStorageSlotKey(address, storageSlotKey).orElse(UInt256.ZERO); return getStorageValueByStorageSlotKey(address, storageSlotKey).orElse(UInt256.ZERO);
} }
@ -453,7 +454,7 @@ public class BonsaiWorldStateUpdateAccumulator
public UInt256 getPriorStorageValue(final Address address, final UInt256 storageKey) { public UInt256 getPriorStorageValue(final Address address, final UInt256 storageKey) {
// TODO maybe log the read into the trie layer? // TODO maybe log the read into the trie layer?
StorageSlotKey storageSlotKey = StorageSlotKey storageSlotKey =
new StorageSlotKey(hashAndSavePreImage(storageKey), Optional.of(storageKey)); new StorageSlotKey(hashAndSaveSlotPreImage(storageKey), Optional.of(storageKey));
final Map<StorageSlotKey, BonsaiValue<UInt256>> localAccountStorage = final Map<StorageSlotKey, BonsaiValue<UInt256>> localAccountStorage =
storageToUpdate.get(address); storageToUpdate.get(address);
if (localAccountStorage != null) { if (localAccountStorage != null) {
@ -765,6 +766,7 @@ public class BonsaiWorldStateUpdateAccumulator
resetAccumulatorStateChanged(); resetAccumulatorStateChanged();
updatedAccounts.clear(); updatedAccounts.clear();
deletedAccounts.clear(); deletedAccounts.clear();
storageKeyHashLookup.clear();
} }
public static class AccountConsumingMap<T> extends ForwardingMap<Address, T> { public static class AccountConsumingMap<T> extends ForwardingMap<Address, T> {
@ -828,8 +830,17 @@ public class BonsaiWorldStateUpdateAccumulator
void process(final Address address, T value); void process(final Address address, T value);
} }
protected Hash hashAndSavePreImage(final Bytes bytes) { protected Hash hashAndSaveAccountPreImage(final Address address) {
// by default do not save hash preImages // no need to save account preimage by default
return Hash.hash(bytes); return Hash.hash(address);
}
protected Hash hashAndSaveSlotPreImage(final UInt256 slotKey) {
Hash hash = storageKeyHashLookup.get(slotKey);
if (hash == null) {
hash = Hash.hash(slotKey);
storageKeyHashLookup.put(slotKey, hash);
}
return hash;
} }
} }

@ -1,39 +0,0 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.ethereum.util;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
public class LogUtil {
static ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
public static void throttledLog(
final Consumer<String> logger,
final String logMessage,
final AtomicBoolean shouldLog,
final int logRepeatDelay) {
if (shouldLog.compareAndSet(true, false)) {
logger.accept(logMessage);
final Runnable runnable = () -> shouldLog.set(true);
executor.schedule(runnable, logRepeatDelay, TimeUnit.SECONDS);
}
}
}

@ -40,7 +40,7 @@ import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.ArgumentsProvider; import org.junit.jupiter.params.provider.ArgumentsProvider;
import org.junit.jupiter.params.provider.ArgumentsSource; import org.junit.jupiter.params.provider.ArgumentsSource;
public final class GenesisStateTest { final class GenesisStateTest {
/** Known RLP encoded bytes of the Olympic Genesis Block. */ /** Known RLP encoded bytes of the Olympic Genesis Block. */
private static final String OLYMPIC_RLP = private static final String OLYMPIC_RLP =
@ -63,7 +63,7 @@ public final class GenesisStateTest {
@ParameterizedTest @ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class) @ArgumentsSource(GenesisStateTestArguments.class)
public void createFromJsonWithAllocs(final DataStorageFormat dataStorageFormat) throws Exception { void createFromJsonWithAllocs(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState = final GenesisState genesisState =
GenesisState.fromJson( GenesisState.fromJson(
dataStorageFormat, dataStorageFormat,
@ -93,7 +93,7 @@ public final class GenesisStateTest {
@ParameterizedTest @ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class) @ArgumentsSource(GenesisStateTestArguments.class)
public void createFromJsonNoAllocs(final DataStorageFormat dataStorageFormat) throws Exception { void createFromJsonNoAllocs(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState = final GenesisState genesisState =
GenesisState.fromJson( GenesisState.fromJson(
dataStorageFormat, dataStorageFormat,
@ -136,8 +136,7 @@ public final class GenesisStateTest {
@ParameterizedTest @ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class) @ArgumentsSource(GenesisStateTestArguments.class)
public void createFromJsonWithContract(final DataStorageFormat dataStorageFormat) void createFromJsonWithContract(final DataStorageFormat dataStorageFormat) throws Exception {
throws Exception {
assertContractInvariants( assertContractInvariants(
dataStorageFormat, dataStorageFormat,
"genesis3.json", "genesis3.json",
@ -146,7 +145,7 @@ public final class GenesisStateTest {
@ParameterizedTest @ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class) @ArgumentsSource(GenesisStateTestArguments.class)
public void createFromJsonWithNonce(final DataStorageFormat dataStorageFormat) throws Exception { void createFromJsonWithNonce(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState = final GenesisState genesisState =
GenesisState.fromJson( GenesisState.fromJson(
dataStorageFormat, dataStorageFormat,
@ -162,7 +161,7 @@ public final class GenesisStateTest {
@ParameterizedTest @ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class) @ArgumentsSource(GenesisStateTestArguments.class)
public void encodeOlympicBlock(final DataStorageFormat dataStorageFormat) throws Exception { void encodeOlympicBlock(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState = final GenesisState genesisState =
GenesisState.fromJson( GenesisState.fromJson(
dataStorageFormat, dataStorageFormat,
@ -183,7 +182,7 @@ public final class GenesisStateTest {
@ParameterizedTest @ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class) @ArgumentsSource(GenesisStateTestArguments.class)
public void genesisFromShanghai(final DataStorageFormat dataStorageFormat) throws Exception { void genesisFromShanghai(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState = final GenesisState genesisState =
GenesisState.fromJson( GenesisState.fromJson(
dataStorageFormat, dataStorageFormat,
@ -196,8 +195,8 @@ public final class GenesisStateTest {
Hash.fromHexString( Hash.fromHexString(
"0xfdc41f92053811b877be43e61cab6b0d9ee55501ae2443df0970c753747f12d8")); "0xfdc41f92053811b877be43e61cab6b0d9ee55501ae2443df0970c753747f12d8"));
assertThat(header.getGasLimit()).isEqualTo(0x2fefd8); assertThat(header.getGasLimit()).isEqualTo(0x2fefd8);
assertThat(header.getGasUsed()).isEqualTo(0); assertThat(header.getGasUsed()).isZero();
assertThat(header.getNumber()).isEqualTo(0); assertThat(header.getNumber()).isZero();
assertThat(header.getReceiptsRoot()) assertThat(header.getReceiptsRoot())
.isEqualTo( .isEqualTo(
Hash.fromHexString( Hash.fromHexString(
@ -223,7 +222,7 @@ public final class GenesisStateTest {
final Account last = final Account last =
worldState.get(Address.fromHexString("fb289e2b2b65fb63299a682d000744671c50417b")); worldState.get(Address.fromHexString("fb289e2b2b65fb63299a682d000744671c50417b"));
assertThat(first).isNotNull(); assertThat(first).isNotNull();
assertThat(first.getBalance().toLong()).isEqualTo(0); assertThat(first.getBalance().toLong()).isZero();
assertThat(first.getCode()) assertThat(first.getCode())
.isEqualTo(Bytes.fromHexString("0x5f804955600180495560028049556003804955")); .isEqualTo(Bytes.fromHexString("0x5f804955600180495560028049556003804955"));
assertThat(last).isNotNull(); assertThat(last).isNotNull();
@ -233,7 +232,7 @@ public final class GenesisStateTest {
@ParameterizedTest @ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class) @ArgumentsSource(GenesisStateTestArguments.class)
public void genesisFromCancun(final DataStorageFormat dataStorageFormat) throws Exception { void genesisFromCancun(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState = final GenesisState genesisState =
GenesisState.fromJson( GenesisState.fromJson(
dataStorageFormat, dataStorageFormat,
@ -246,8 +245,58 @@ public final class GenesisStateTest {
Hash.fromHexString( Hash.fromHexString(
"0x87846b86c1026fa7d7be2da045716274231de1871065a320659c9b111287c688")); "0x87846b86c1026fa7d7be2da045716274231de1871065a320659c9b111287c688"));
assertThat(header.getGasLimit()).isEqualTo(0x2fefd8); assertThat(header.getGasLimit()).isEqualTo(0x2fefd8);
assertThat(header.getGasUsed()).isEqualTo(0); assertThat(header.getGasUsed()).isZero();
assertThat(header.getNumber()).isEqualTo(0); assertThat(header.getNumber()).isZero();
assertThat(header.getReceiptsRoot())
.isEqualTo(
Hash.fromHexString(
"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"));
assertThat(header.getTransactionsRoot()).isEqualTo(Hash.EMPTY_TRIE_HASH);
assertThat(header.getOmmersHash())
.isEqualTo(
Hash.fromHexString(
"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"));
assertThat(header.getExtraData()).isEqualTo(Bytes.EMPTY);
assertThat(header.getParentHash()).isEqualTo(Hash.ZERO);
final MutableWorldState worldState = InMemoryKeyValueStorageProvider.createInMemoryWorldState();
genesisState.writeStateTo(worldState);
Hash computedStateRoot = worldState.rootHash();
assertThat(computedStateRoot).isEqualTo(header.getStateRoot());
assertThat(header.getStateRoot())
.isEqualTo(
Hash.fromHexString(
"0x7f5cfe1375a61009a22d24512d18035bc8f855129452fa9c6a6be2ef4e9da7db"));
final Account first =
worldState.get(Address.fromHexString("0000000000000000000000000000000000000100"));
final Account last =
worldState.get(Address.fromHexString("fb289e2b2b65fb63299a682d000744671c50417b"));
assertThat(first).isNotNull();
assertThat(first.getBalance().toLong()).isZero();
assertThat(first.getCode())
.isEqualTo(Bytes.fromHexString("0x5f804955600180495560028049556003804955"));
assertThat(last).isNotNull();
Wei lastBalance = last.getBalance();
assertThat(lastBalance).isEqualTo(Wei.fromHexString("0x123450000000000000000"));
}
@ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class)
void genesisFromPrague(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState =
GenesisState.fromJson(
dataStorageFormat,
Resources.toString(
GenesisStateTest.class.getResource("genesis_prague.json"), Charsets.UTF_8),
ProtocolScheduleFixture.MAINNET);
final BlockHeader header = genesisState.getBlock().getHeader();
assertThat(header.getHash())
.isEqualTo(
Hash.fromHexString(
"0x87846b86c1026fa7d7be2da045716274231de1871065a320659c9b111287c688"));
assertThat(header.getGasLimit()).isEqualTo(0x2fefd8);
assertThat(header.getGasUsed()).isZero();
assertThat(header.getNumber()).isZero();
assertThat(header.getReceiptsRoot()) assertThat(header.getReceiptsRoot())
.isEqualTo( .isEqualTo(
Hash.fromHexString( Hash.fromHexString(
@ -273,7 +322,7 @@ public final class GenesisStateTest {
final Account last = final Account last =
worldState.get(Address.fromHexString("fb289e2b2b65fb63299a682d000744671c50417b")); worldState.get(Address.fromHexString("fb289e2b2b65fb63299a682d000744671c50417b"));
assertThat(first).isNotNull(); assertThat(first).isNotNull();
assertThat(first.getBalance().toLong()).isEqualTo(0); assertThat(first.getBalance().toLong()).isZero();
assertThat(first.getCode()) assertThat(first.getCode())
.isEqualTo(Bytes.fromHexString("0x5f804955600180495560028049556003804955")); .isEqualTo(Bytes.fromHexString("0x5f804955600180495560028049556003804955"));
assertThat(last).isNotNull(); assertThat(last).isNotNull();

@ -41,7 +41,7 @@ import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.stubbing.Answer; import org.mockito.stubbing.Answer;
@ExtendWith(MockitoExtension.class) @ExtendWith(MockitoExtension.class)
public class ProtocolScheduleBuilderTest { class ProtocolScheduleBuilderTest {
private final long PRE_SHANGHAI_TIMESTAMP = 1680488620L; // Mon, 03 Apr 2023 02:23:40 UTC private final long PRE_SHANGHAI_TIMESTAMP = 1680488620L; // Mon, 03 Apr 2023 02:23:40 UTC
@Mock GenesisConfigOptions configOptions; @Mock GenesisConfigOptions configOptions;
@Mock private Function<ProtocolSpecBuilder, ProtocolSpecBuilder> modifier; @Mock private Function<ProtocolSpecBuilder, ProtocolSpecBuilder> modifier;
@ -61,13 +61,14 @@ public class ProtocolScheduleBuilderTest {
} }
@Test @Test
public void createProtocolScheduleInOrder() { void createProtocolScheduleInOrder() {
when(configOptions.getHomesteadBlockNumber()).thenReturn(OptionalLong.of(1L)); when(configOptions.getHomesteadBlockNumber()).thenReturn(OptionalLong.of(1L));
when(configOptions.getDaoForkBlock()).thenReturn(OptionalLong.of(2L)); when(configOptions.getDaoForkBlock()).thenReturn(OptionalLong.of(2L));
when(configOptions.getByzantiumBlockNumber()).thenReturn(OptionalLong.of(13L)); when(configOptions.getByzantiumBlockNumber()).thenReturn(OptionalLong.of(13L));
when(configOptions.getMergeNetSplitBlockNumber()).thenReturn(OptionalLong.of(15L)); when(configOptions.getMergeNetSplitBlockNumber()).thenReturn(OptionalLong.of(15L));
when(configOptions.getShanghaiTime()).thenReturn(OptionalLong.of(PRE_SHANGHAI_TIMESTAMP + 1)); when(configOptions.getShanghaiTime()).thenReturn(OptionalLong.of(PRE_SHANGHAI_TIMESTAMP + 1));
when(configOptions.getCancunTime()).thenReturn(OptionalLong.of(PRE_SHANGHAI_TIMESTAMP + 3)); when(configOptions.getCancunTime()).thenReturn(OptionalLong.of(PRE_SHANGHAI_TIMESTAMP + 3));
when(configOptions.getPragueTime()).thenReturn(OptionalLong.of(PRE_SHANGHAI_TIMESTAMP + 5));
final ProtocolSchedule protocolSchedule = builder.createProtocolSchedule(); final ProtocolSchedule protocolSchedule = builder.createProtocolSchedule();
assertThat(protocolSchedule.getChainId()).contains(CHAIN_ID); assertThat(protocolSchedule.getChainId()).contains(CHAIN_ID);
@ -102,10 +103,20 @@ public class ProtocolScheduleBuilderTest {
.getByBlockHeader(blockHeader(54, PRE_SHANGHAI_TIMESTAMP + 4)) .getByBlockHeader(blockHeader(54, PRE_SHANGHAI_TIMESTAMP + 4))
.getName()) .getName())
.isEqualTo("Cancun"); .isEqualTo("Cancun");
assertThat(
protocolSchedule
.getByBlockHeader(blockHeader(55, PRE_SHANGHAI_TIMESTAMP + 5))
.getName())
.isEqualTo("Prague");
assertThat(
protocolSchedule
.getByBlockHeader(blockHeader(56, PRE_SHANGHAI_TIMESTAMP + 6))
.getName())
.isEqualTo("Prague");
} }
@Test @Test
public void createProtocolScheduleOverlappingUsesLatestFork() { void createProtocolScheduleOverlappingUsesLatestFork() {
when(configOptions.getHomesteadBlockNumber()).thenReturn(OptionalLong.of(0L)); when(configOptions.getHomesteadBlockNumber()).thenReturn(OptionalLong.of(0L));
when(configOptions.getByzantiumBlockNumber()).thenReturn(OptionalLong.of(0L)); when(configOptions.getByzantiumBlockNumber()).thenReturn(OptionalLong.of(0L));
final ProtocolSchedule protocolSchedule = builder.createProtocolSchedule(); final ProtocolSchedule protocolSchedule = builder.createProtocolSchedule();
@ -116,7 +127,7 @@ public class ProtocolScheduleBuilderTest {
} }
@Test @Test
public void createProtocolScheduleOutOfOrderThrows() { void createProtocolScheduleOutOfOrderThrows() {
when(configOptions.getDaoForkBlock()).thenReturn(OptionalLong.of(0L)); when(configOptions.getDaoForkBlock()).thenReturn(OptionalLong.of(0L));
when(configOptions.getArrowGlacierBlockNumber()).thenReturn(OptionalLong.of(12L)); when(configOptions.getArrowGlacierBlockNumber()).thenReturn(OptionalLong.of(12L));
when(configOptions.getGrayGlacierBlockNumber()).thenReturn(OptionalLong.of(11L)); when(configOptions.getGrayGlacierBlockNumber()).thenReturn(OptionalLong.of(11L));
@ -127,7 +138,7 @@ public class ProtocolScheduleBuilderTest {
} }
@Test @Test
public void createProtocolScheduleWithTimestampsOutOfOrderThrows() { void createProtocolScheduleWithTimestampsOutOfOrderThrows() {
when(configOptions.getDaoForkBlock()).thenReturn(OptionalLong.of(0L)); when(configOptions.getDaoForkBlock()).thenReturn(OptionalLong.of(0L));
when(configOptions.getShanghaiTime()).thenReturn(OptionalLong.of(3L)); when(configOptions.getShanghaiTime()).thenReturn(OptionalLong.of(3L));
when(configOptions.getCancunTime()).thenReturn(OptionalLong.of(2L)); when(configOptions.getCancunTime()).thenReturn(OptionalLong.of(2L));
@ -138,7 +149,7 @@ public class ProtocolScheduleBuilderTest {
} }
@Test @Test
public void modifierInsertedBetweenBlocksIsAppliedToLaterAndCreatesInterimMilestone() { void modifierInsertedBetweenBlocksIsAppliedToLaterAndCreatesInterimMilestone() {
when(configOptions.getHomesteadBlockNumber()).thenReturn(OptionalLong.of(5L)); when(configOptions.getHomesteadBlockNumber()).thenReturn(OptionalLong.of(5L));
when(modifier.apply(any())) when(modifier.apply(any()))
@ -158,7 +169,7 @@ public class ProtocolScheduleBuilderTest {
} }
@Test @Test
public void modifierPastEndOfDefinedMilestonesGetsItsOwnMilestoneCreated() { void modifierPastEndOfDefinedMilestonesGetsItsOwnMilestoneCreated() {
when(modifier.apply(any())) when(modifier.apply(any()))
.thenAnswer((Answer<ProtocolSpecBuilder>) invocation -> invocation.getArgument(0)); .thenAnswer((Answer<ProtocolSpecBuilder>) invocation -> invocation.getArgument(0));
@ -175,7 +186,7 @@ public class ProtocolScheduleBuilderTest {
} }
@Test @Test
public void modifierOnDefinedMilestoneIsAppliedButDoesNotGetAnExtraMilestoneCreated() { void modifierOnDefinedMilestoneIsAppliedButDoesNotGetAnExtraMilestoneCreated() {
when(configOptions.getHomesteadBlockNumber()).thenReturn(OptionalLong.of(5L)); when(configOptions.getHomesteadBlockNumber()).thenReturn(OptionalLong.of(5L));
when(modifier.apply(any())) when(modifier.apply(any()))
.thenAnswer((Answer<ProtocolSpecBuilder>) invocation -> invocation.getArgument(0)); .thenAnswer((Answer<ProtocolSpecBuilder>) invocation -> invocation.getArgument(0));

@ -212,7 +212,7 @@ public class EthPeer implements Comparable<EthPeer> {
public void recordRequestTimeout(final int requestCode) { public void recordRequestTimeout(final int requestCode) {
LOG.atDebug() LOG.atDebug()
.setMessage("Timed out while waiting for response from peer {}...") .setMessage("Timed out while waiting for response from peer {}...")
.addArgument(this::getShortNodeId) .addArgument(this::getLoggableId)
.log(); .log();
LOG.trace("Timed out while waiting for response from peer {}", this); LOG.trace("Timed out while waiting for response from peer {}", this);
reputation.recordRequestTimeout(requestCode).ifPresent(this::disconnect); reputation.recordRequestTimeout(requestCode).ifPresent(this::disconnect);
@ -222,7 +222,7 @@ public class EthPeer implements Comparable<EthPeer> {
LOG.atTrace() LOG.atTrace()
.setMessage("Received useless response for request type {} from peer {}...") .setMessage("Received useless response for request type {} from peer {}...")
.addArgument(requestType) .addArgument(requestType)
.addArgument(this::getShortNodeId) .addArgument(this::getLoggableId)
.log(); .log();
reputation.recordUselessResponse(System.currentTimeMillis()).ifPresent(this::disconnect); reputation.recordUselessResponse(System.currentTimeMillis()).ifPresent(this::disconnect);
} }
@ -264,7 +264,7 @@ public class EthPeer implements Comparable<EthPeer> {
LOG.atDebug() LOG.atDebug()
.setMessage("Protocol {} unavailable for this peer {}...") .setMessage("Protocol {} unavailable for this peer {}...")
.addArgument(protocolName) .addArgument(protocolName)
.addArgument(this.getShortNodeId()) .addArgument(this.getLoggableId())
.log(); .log();
return null; return null;
} }
@ -274,7 +274,7 @@ public class EthPeer implements Comparable<EthPeer> {
LOG.info( LOG.info(
"Permissioning blocked sending of message code {} to {}...", "Permissioning blocked sending of message code {} to {}...",
messageData.getCode(), messageData.getCode(),
this.getShortNodeId()); this.getLoggableId());
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug( LOG.debug(
"Permissioning blocked by providers {}", "Permissioning blocked by providers {}",
@ -608,7 +608,7 @@ public class EthPeer implements Comparable<EthPeer> {
public String toString() { public String toString() {
return String.format( return String.format(
"PeerId: %s... %s, validated? %s, disconnected? %s, client: %s, %s, %s", "PeerId: %s... %s, validated? %s, disconnected? %s, client: %s, %s, %s",
getShortNodeId(), getLoggableId(),
reputation, reputation,
isFullyValidated(), isFullyValidated(),
isDisconnected(), isDisconnected(),
@ -618,8 +618,9 @@ public class EthPeer implements Comparable<EthPeer> {
} }
@Nonnull @Nonnull
public String getShortNodeId() { public String getLoggableId() {
return nodeId().toString().substring(0, 20); // 8 bytes plus the 0x prefix is 18 characters
return nodeId().toString().substring(0, 18) + "...";
} }
@Override @Override

@ -54,10 +54,14 @@ import org.slf4j.LoggerFactory;
public class EthPeers { public class EthPeers {
private static final Logger LOG = LoggerFactory.getLogger(EthPeers.class); private static final Logger LOG = LoggerFactory.getLogger(EthPeers.class);
public static final Comparator<EthPeer> TOTAL_DIFFICULTY = public static final Comparator<EthPeer> TOTAL_DIFFICULTY =
Comparator.comparing(((final EthPeer p) -> p.chainState().getEstimatedTotalDifficulty())); Comparator.comparing((final EthPeer p) -> p.chainState().getEstimatedTotalDifficulty());
public static final Comparator<EthPeer> CHAIN_HEIGHT = public static final Comparator<EthPeer> CHAIN_HEIGHT =
Comparator.comparing(((final EthPeer p) -> p.chainState().getEstimatedHeight())); Comparator.comparing((final EthPeer p) -> p.chainState().getEstimatedHeight());
public static final Comparator<EthPeer> MOST_USEFUL_PEER =
Comparator.comparing((final EthPeer p) -> p.getReputation().getScore())
.thenComparing(CHAIN_HEIGHT);
public static final Comparator<EthPeer> HEAVIEST_CHAIN = public static final Comparator<EthPeer> HEAVIEST_CHAIN =
TOTAL_DIFFICULTY.thenComparing(CHAIN_HEIGHT); TOTAL_DIFFICULTY.thenComparing(CHAIN_HEIGHT);
@ -200,7 +204,7 @@ public class EthPeers {
if (peer.getReputation().getScore() > USEFULL_PEER_SCORE_THRESHOLD) { if (peer.getReputation().getScore() > USEFULL_PEER_SCORE_THRESHOLD) {
LOG.debug("Disconnected USEFULL peer {}", peer); LOG.debug("Disconnected USEFULL peer {}", peer);
} else { } else {
LOG.debug("Disconnected EthPeer {}", peer.getShortNodeId()); LOG.debug("Disconnected EthPeer {}", peer.getLoggableId());
} }
} }
} }
@ -389,7 +393,7 @@ public class EthPeers {
LOG.atDebug() LOG.atDebug()
.setMessage( .setMessage(
"disconnecting peer {}. Waiting for better peers. Current {} of max {}") "disconnecting peer {}. Waiting for better peers. Current {} of max {}")
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.addArgument(this::peerCount) .addArgument(this::peerCount)
.addArgument(this::getMaxPeers) .addArgument(this::getMaxPeers)
.log(); .log();

@ -400,13 +400,16 @@ public class EthProtocolManager implements ProtocolManager, MinedBlockObserver {
if (peer.getForkId().map(forkIdManager::peerCheck).orElse(true)) { if (peer.getForkId().map(forkIdManager::peerCheck).orElse(true)) {
LOG.atDebug() LOG.atDebug()
.setMessage("ForkId OK or not available for peer {}") .setMessage("ForkId OK or not available for peer {}")
.addArgument(peer::getId) .addArgument(peer::getLoggableId)
.log(); .log();
if (ethPeers.shouldConnect(peer, incoming)) { if (ethPeers.shouldConnect(peer, incoming)) {
return true; return true;
} }
} }
LOG.atDebug().setMessage("ForkId check failed for peer {}").addArgument(peer::getId).log(); LOG.atDebug()
.setMessage("ForkId check failed for peer {}")
.addArgument(peer::getLoggableId)
.log();
return false; return false;
} }
@ -417,10 +420,10 @@ public class EthProtocolManager implements ProtocolManager, MinedBlockObserver {
final boolean initiatedByPeer) { final boolean initiatedByPeer) {
if (ethPeers.registerDisconnect(connection)) { if (ethPeers.registerDisconnect(connection)) {
LOG.atDebug() LOG.atDebug()
.setMessage("Disconnect - {} - {} - {}... - {} peers left") .setMessage("Disconnect - {} - {} - {} - {} peers left")
.addArgument(initiatedByPeer ? "Inbound" : "Outbound") .addArgument(initiatedByPeer ? "Inbound" : "Outbound")
.addArgument(reason::toString) .addArgument(reason::toString)
.addArgument(() -> connection.getPeer().getId().slice(0, 8)) .addArgument(() -> connection.getPeer().getLoggableId())
.addArgument(ethPeers::peerCount) .addArgument(ethPeers::peerCount)
.log(); .log();
LOG.atTrace().setMessage("{}").addArgument(ethPeers::toString).log(); LOG.atTrace().setMessage("{}").addArgument(ethPeers::toString).log();
@ -478,7 +481,7 @@ public class EthProtocolManager implements ProtocolManager, MinedBlockObserver {
} catch (final RLPException e) { } catch (final RLPException e) {
LOG.atDebug() LOG.atDebug()
.setMessage("Unable to parse status message from peer {}... {}") .setMessage("Unable to parse status message from peer {}... {}")
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.addArgument(e) .addArgument(e)
.log(); .log();
// Parsing errors can happen when clients broadcast network ids outside the int range, // Parsing errors can happen when clients broadcast network ids outside the int range,
@ -488,7 +491,7 @@ public class EthProtocolManager implements ProtocolManager, MinedBlockObserver {
} }
private Object getPeerOrPeerId(final EthPeer peer) { private Object getPeerOrPeerId(final EthPeer peer) {
return LOG.isTraceEnabled() ? peer : peer.getShortNodeId(); return LOG.isTraceEnabled() ? peer : peer.getLoggableId();
} }
@Override @Override

@ -74,19 +74,19 @@ public abstract class AbstractGetHeadersFromPeerTask
final List<BlockHeader> headers = headersMessage.getHeaders(protocolSchedule); final List<BlockHeader> headers = headersMessage.getHeaders(protocolSchedule);
if (headers.isEmpty()) { if (headers.isEmpty()) {
// Message contains no data - nothing to do // Message contains no data - nothing to do
LOG.debug("headers.isEmpty. Peer: {}", peer.getShortNodeId()); LOG.debug("headers.isEmpty. Peer: {}", peer.getLoggableId());
return Optional.empty(); return Optional.empty();
} }
if (headers.size() > count) { if (headers.size() > count) {
// Too many headers - this isn't our response // Too many headers - this isn't our response
LOG.debug("headers.size()>count. Peer: {}", peer.getShortNodeId()); LOG.debug("headers.size()>count. Peer: {}", peer.getLoggableId());
return Optional.empty(); return Optional.empty();
} }
final BlockHeader firstHeader = headers.get(0); final BlockHeader firstHeader = headers.get(0);
if (!matchesFirstHeader(firstHeader)) { if (!matchesFirstHeader(firstHeader)) {
// This isn't our message - nothing to do // This isn't our message - nothing to do
LOG.debug("!matchesFirstHeader. Peer: {}", peer.getShortNodeId()); LOG.debug("!matchesFirstHeader. Peer: {}", peer.getLoggableId());
return Optional.empty(); return Optional.empty();
} }
@ -100,7 +100,7 @@ public abstract class AbstractGetHeadersFromPeerTask
header = headers.get(i); header = headers.get(i);
if (header.getNumber() != prevBlockHeader.getNumber() + expectedDelta) { if (header.getNumber() != prevBlockHeader.getNumber() + expectedDelta) {
// Skip doesn't match, this isn't our data // Skip doesn't match, this isn't our data
LOG.debug("header not matching the expected number. Peer: {}", peer.getShortNodeId()); LOG.debug("header not matching the expected number. Peer: {}", peer.getLoggableId());
return Optional.empty(); return Optional.empty();
} }
// if headers are supposed to be sequential check if a chain is formed // if headers are supposed to be sequential check if a chain is formed
@ -110,7 +110,7 @@ public abstract class AbstractGetHeadersFromPeerTask
if (!parent.getHash().equals(child.getParentHash())) { if (!parent.getHash().equals(child.getParentHash())) {
LOG.debug( LOG.debug(
"Sequential headers must form a chain through hashes (BREACH_OF_PROTOCOL), disconnecting peer: {}", "Sequential headers must form a chain through hashes (BREACH_OF_PROTOCOL), disconnecting peer: {}",
peer.getShortNodeId()); peer.getLoggableId());
peer.disconnect(DisconnectMessage.DisconnectReason.BREACH_OF_PROTOCOL); peer.disconnect(DisconnectMessage.DisconnectReason.BREACH_OF_PROTOCOL);
return Optional.empty(); return Optional.empty();
} }
@ -129,7 +129,7 @@ public abstract class AbstractGetHeadersFromPeerTask
.setMessage("Received {} of {} headers requested from peer {}...") .setMessage("Received {} of {} headers requested from peer {}...")
.addArgument(headersList::size) .addArgument(headersList::size)
.addArgument(count) .addArgument(count)
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.log(); .log();
return Optional.of(headersList); return Optional.of(headersList);
} }
@ -138,7 +138,7 @@ public abstract class AbstractGetHeadersFromPeerTask
if (blockHeader.getNumber() > peer.chainState().getEstimatedHeight()) { if (blockHeader.getNumber() > peer.chainState().getEstimatedHeight()) {
LOG.atTrace() LOG.atTrace()
.setMessage("Updating chain state for peer {}... to block header {}") .setMessage("Updating chain state for peer {}... to block header {}")
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.addArgument(blockHeader::toLogString) .addArgument(blockHeader::toLogString)
.log(); .log();
peer.chainState().update(blockHeader); peer.chainState().update(blockHeader);

@ -110,7 +110,7 @@ public abstract class AbstractPeerRequestTask<R> extends AbstractPeerTask<R> {
// Peer sent us malformed data - disconnect // Peer sent us malformed data - disconnect
LOG.debug( LOG.debug(
"Disconnecting with BREACH_OF_PROTOCOL due to malformed message: {}", "Disconnecting with BREACH_OF_PROTOCOL due to malformed message: {}",
peer.getShortNodeId(), peer.getLoggableId(),
e); e);
LOG.trace("Peer {} Malformed message data: {}", peer, message.getData()); LOG.trace("Peer {} Malformed message data: {}", peer, message.getData());
peer.disconnect(DisconnectReason.BREACH_OF_PROTOCOL); peer.disconnect(DisconnectReason.BREACH_OF_PROTOCOL);

@ -137,16 +137,15 @@ public abstract class AbstractRetryingSwitchingPeerTask<T> extends AbstractRetry
// or the least useful // or the least useful
if (peers.peerCount() >= peers.getMaxPeers()) { if (peers.peerCount() >= peers.getMaxPeers()) {
failedPeers.stream() failedPeers.stream().filter(peer -> !peer.isDisconnected()).findAny().stream()
.filter(peer -> !peer.isDisconnected()) .min(EthPeers.MOST_USEFUL_PEER)
.findAny() .or(() -> peers.streamAvailablePeers().min(EthPeers.MOST_USEFUL_PEER))
.or(() -> peers.streamAvailablePeers().min(peers.getBestChainComparator()))
.ifPresent( .ifPresent(
peer -> { peer -> {
LOG.atDebug() LOG.atDebug()
.setMessage( .setMessage(
"Refresh peers disconnecting peer {}... Waiting for better peers. Current {} of max {}") "Refresh peers disconnecting peer {} Waiting for better peers. Current {} of max {}")
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.addArgument(peers::peerCount) .addArgument(peers::peerCount)
.addArgument(peers::getMaxPeers) .addArgument(peers::getMaxPeers)
.log(); .log();

@ -92,7 +92,7 @@ public class BufferedGetPooledTransactionsFromPeerFetcher {
.setMessage("Got {} transactions of {} hashes requested from peer {}...") .setMessage("Got {} transactions of {} hashes requested from peer {}...")
.addArgument(retrievedTransactions::size) .addArgument(retrievedTransactions::size)
.addArgument(task.getTransactionHashes()::size) .addArgument(task.getTransactionHashes()::size)
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.log(); .log();
transactionPool.addRemoteTransactions(retrievedTransactions); transactionPool.addRemoteTransactions(retrievedTransactions);
@ -121,7 +121,7 @@ public class BufferedGetPooledTransactionsFromPeerFetcher {
LOG.atTrace() LOG.atTrace()
.setMessage( .setMessage(
"Transaction hashes to request from peer {}... fresh count {}, already seen count {}") "Transaction hashes to request from peer {}... fresh count {}, already seen count {}")
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.addArgument(toRetrieve::size) .addArgument(toRetrieve::size)
.addArgument(alreadySeenCount) .addArgument(alreadySeenCount)
.log(); .log();

@ -123,7 +123,7 @@ public class GetHeadersFromPeerByHashTask extends AbstractGetHeadersFromPeerTask
.setMessage("Requesting {} headers (hash {}...) from peer {}...") .setMessage("Requesting {} headers (hash {}...) from peer {}...")
.addArgument(count) .addArgument(count)
.addArgument(referenceHash.slice(0, 6)) .addArgument(referenceHash.slice(0, 6))
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.log(); .log();
return peer.getHeadersByHash(referenceHash, count, skip, reverse); return peer.getHeadersByHash(referenceHash, count, skip, reverse);
}, },

@ -81,7 +81,7 @@ public class GetHeadersFromPeerByNumberTask extends AbstractGetHeadersFromPeerTa
.setMessage("Requesting {} headers (blockNumber {}) from peer {}.") .setMessage("Requesting {} headers (blockNumber {}) from peer {}.")
.addArgument(count) .addArgument(count)
.addArgument(blockNumber) .addArgument(blockNumber)
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.log(); .log();
return peer.getHeadersByNumber(blockNumber, count, skip, reverse); return peer.getHeadersByNumber(blockNumber, count, skip, reverse);
}, },

@ -69,7 +69,7 @@ public class GetNodeDataFromPeerTask extends AbstractPeerRequestTask<Map<Hash, B
LOG.atTrace() LOG.atTrace()
.setMessage("Requesting {} node data entries from peer {}...") .setMessage("Requesting {} node data entries from peer {}...")
.addArgument(hashes::size) .addArgument(hashes::size)
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.log(); .log();
return peer.getNodeData(hashes); return peer.getNodeData(hashes);
}, },

@ -64,7 +64,7 @@ public class GetPooledTransactionsFromPeerTask extends AbstractPeerRequestTask<L
LOG.atTrace() LOG.atTrace()
.setMessage("Requesting {} transaction pool entries from peer {}...") .setMessage("Requesting {} transaction pool entries from peer {}...")
.addArgument(hashes::size) .addArgument(hashes::size)
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.log(); .log();
return peer.getPooledTransactions(new ArrayList<>(hashes)); return peer.getPooledTransactions(new ArrayList<>(hashes));
}, },

@ -86,7 +86,7 @@ public class GetReceiptsFromPeerTask
LOG.atTrace() LOG.atTrace()
.setMessage("Requesting {} receipts from peer {}...") .setMessage("Requesting {} receipts from peer {}...")
.addArgument(blockHeaders::size) .addArgument(blockHeaders::size)
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.log(); .log();
return peer.getReceipts(blockHashes); return peer.getReceipts(blockHashes);
}, },

@ -80,7 +80,7 @@ public class RetryingGetBlocksFromPeersTask
if (peerResult.getResult().isEmpty()) { if (peerResult.getResult().isEmpty()) {
currentPeer.recordUselessResponse("GetBodiesFromPeerTask"); currentPeer.recordUselessResponse("GetBodiesFromPeerTask");
throw new IncompleteResultsException( throw new IncompleteResultsException(
"No blocks returned by peer " + currentPeer.getShortNodeId()); "No blocks returned by peer " + currentPeer.getLoggableId());
} }
result.complete(peerResult); result.complete(peerResult);

@ -104,7 +104,7 @@ public class RetryingGetHeadersEndingAtFromPeerByHashTask
"No block headers for hash " "No block headers for hash "
+ referenceHash + referenceHash
+ " returned by peer " + " returned by peer "
+ currentPeer.getShortNodeId()); + currentPeer.getLoggableId());
} }
result.complete(peerResult.getResult()); result.complete(peerResult.getResult());
return peerResult.getResult(); return peerResult.getResult();

@ -68,7 +68,7 @@ public class ChainHeadTracker implements ConnectCallback {
public void onPeerConnected(final EthPeer peer) { public void onPeerConnected(final EthPeer peer) {
LOG.atDebug() LOG.atDebug()
.setMessage("Requesting chain head info from {}...") .setMessage("Requesting chain head info from {}...")
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.log(); .log();
GetHeadersFromPeerByHashTask.forSingleHash( GetHeadersFromPeerByHashTask.forSingleHash(
protocolSchedule, protocolSchedule,
@ -92,12 +92,12 @@ public class ChainHeadTracker implements ConnectCallback {
+ " (" + " ("
+ chainHeadHeader.getBlockHash() + chainHeadHeader.getBlockHash()
+ ")") + ")")
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.log(); .log();
} else { } else {
LOG.atDebug() LOG.atDebug()
.setMessage("Failed to retrieve chain head info. Disconnecting {}... {}") .setMessage("Failed to retrieve chain head info. Disconnecting {}... {}")
.addArgument(peer::getShortNodeId) .addArgument(peer::getLoggableId)
.addArgument(error) .addArgument(error)
.log(); .log();
peer.disconnect(DisconnectReason.USELESS_PEER); peer.disconnect(DisconnectReason.USELESS_PEER);

@ -69,7 +69,7 @@ public class TrailingPeerLimiter implements BlockAddedObserver {
"Enforcing trailing peers limit (min height {}, max trailing peers {}) by disconnecting {}... with height {}") "Enforcing trailing peers limit (min height {}, max trailing peers {}) by disconnecting {}... with height {}")
.addArgument(minimumHeightToBeUpToDate) .addArgument(minimumHeightToBeUpToDate)
.addArgument(maxTrailingPeers) .addArgument(maxTrailingPeers)
.addArgument(peerToDisconnect::getShortNodeId) .addArgument(peerToDisconnect::getLoggableId)
.addArgument( .addArgument(
peerToDisconnect.chainState() == null peerToDisconnect.chainState() == null
? "(no chain state)" ? "(no chain state)"

@ -44,7 +44,7 @@ import org.slf4j.LoggerFactory;
public class BackwardSyncContext { public class BackwardSyncContext {
private static final Logger LOG = LoggerFactory.getLogger(BackwardSyncContext.class); private static final Logger LOG = LoggerFactory.getLogger(BackwardSyncContext.class);
public static final int BATCH_SIZE = 200; public static final int BATCH_SIZE = 200;
private static final int DEFAULT_MAX_RETRIES = 20; private static final int DEFAULT_MAX_RETRIES = 2;
private static final long MILLIS_DELAY_BETWEEN_PROGRESS_LOG = 10_000L; private static final long MILLIS_DELAY_BETWEEN_PROGRESS_LOG = 10_000L;
private static final long DEFAULT_MILLIS_BETWEEN_RETRIES = 5000; private static final long DEFAULT_MILLIS_BETWEEN_RETRIES = 5000;
private static final int DEFAULT_MAX_CHAIN_EVENT_ENTRIES = BadBlockManager.MAX_BAD_BLOCKS_SIZE; private static final int DEFAULT_MAX_CHAIN_EVENT_ENTRIES = BadBlockManager.MAX_BAD_BLOCKS_SIZE;

@ -118,7 +118,7 @@ public class PivotSelectorFromPeers implements PivotBlockSelector {
private boolean canPeerDeterminePivotBlock(final EthPeer peer) { private boolean canPeerDeterminePivotBlock(final EthPeer peer) {
LOG.debug( LOG.debug(
"peer {} hasEstimatedHeight {} isFullyValidated? {}", "peer {} hasEstimatedHeight {} isFullyValidated? {}",
peer.getShortNodeId(), peer.getLoggableId(),
peer.chainState().hasEstimatedHeight(), peer.chainState().hasEstimatedHeight(),
peer.isFullyValidated()); peer.isFullyValidated());
return peer.chainState().hasEstimatedHeight() && peer.isFullyValidated(); return peer.chainState().hasEstimatedHeight() && peer.isFullyValidated();

@ -16,7 +16,7 @@ package org.hyperledger.besu.ethereum.eth.sync.fastsync;
import static java.util.concurrent.CompletableFuture.completedFuture; import static java.util.concurrent.CompletableFuture.completedFuture;
import static org.hyperledger.besu.ethereum.eth.sync.fastsync.PivotBlockRetriever.MAX_QUERY_RETRIES_PER_PEER; import static org.hyperledger.besu.ethereum.eth.sync.fastsync.PivotBlockRetriever.MAX_QUERY_RETRIES_PER_PEER;
import static org.hyperledger.besu.ethereum.util.LogUtil.throttledLog; import static org.hyperledger.besu.util.log.LogUtil.throttledLog;
import org.hyperledger.besu.ethereum.ProtocolContext; import org.hyperledger.besu.ethereum.ProtocolContext;
import org.hyperledger.besu.ethereum.core.BlockHeader; import org.hyperledger.besu.ethereum.core.BlockHeader;
@ -96,7 +96,7 @@ public class SyncTargetManager extends AbstractSyncTargetManager {
if (bestPeer.chainState().getEstimatedHeight() < pivotBlockHeader.getNumber()) { if (bestPeer.chainState().getEstimatedHeight() < pivotBlockHeader.getNumber()) {
LOG.info( LOG.info(
"Best peer {} has chain height {} below pivotBlock height {}. Waiting for better peers. Current {} of max {}", "Best peer {} has chain height {} below pivotBlock height {}. Waiting for better peers. Current {} of max {}",
maybeBestPeer.map(EthPeer::getShortNodeId).orElse("none"), maybeBestPeer.map(EthPeer::getLoggableId).orElse("none"),
maybeBestPeer.map(p -> p.chainState().getEstimatedHeight()).orElse(-1L), maybeBestPeer.map(p -> p.chainState().getEstimatedHeight()).orElse(-1L),
pivotBlockHeader.getNumber(), pivotBlockHeader.getNumber(),
ethPeers.peerCount(), ethPeers.peerCount(),
@ -138,7 +138,7 @@ public class SyncTargetManager extends AbstractSyncTargetManager {
} }
LOG.debug( LOG.debug(
"Retrying best peer {} with new pivot block {}", "Retrying best peer {} with new pivot block {}",
bestPeer.getShortNodeId(), bestPeer.getLoggableId(),
pivotBlockHeader.toLogString()); pivotBlockHeader.toLogString());
return confirmPivotBlockHeader(bestPeer); return confirmPivotBlockHeader(bestPeer);
} else { } else {

@ -95,7 +95,7 @@ public class RetryingGetHeaderFromPeerByHashTask
"No block header for hash " "No block header for hash "
+ referenceHash + referenceHash
+ " returned by peer " + " returned by peer "
+ peer.getShortNodeId()); + peer.getLoggableId());
} }
result.complete(peerResult.getResult()); result.complete(peerResult.getResult());
return peerResult.getResult(); return peerResult.getResult();

@ -87,7 +87,7 @@ public class NewPooledTransactionHashesMessageProcessor {
LOG.atTrace() LOG.atTrace()
.setMessage( .setMessage(
"Received pooled transaction hashes message from {}... incoming hashes {}, incoming list {}") "Received pooled transaction hashes message from {}... incoming hashes {}, incoming list {}")
.addArgument(() -> peer == null ? null : peer.getShortNodeId()) .addArgument(() -> peer == null ? null : peer.getLoggableId())
.addArgument(incomingTransactionHashes::size) .addArgument(incomingTransactionHashes::size)
.addArgument(incomingTransactionHashes) .addArgument(incomingTransactionHashes)
.log(); .log();

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save