/ *
* Copyright Hyperledger Besu contributors .
*
* Licensed under the Apache License , Version 2.0 ( the "License" ) ; you may not use this file except in compliance with
* the License . You may obtain a copy of the License at
*
* http: //www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing , software distributed under the License is distributed on
* an "AS IS" BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . See the License for the
* specific language governing permissions and limitations under the License .
*
* SPDX - License - Identifier: Apache - 2.0
* /
import com.github.jk1.license.filter.LicenseBundleNormalizer
import groovy.transform.CompileStatic
import groovy.transform.Memoized
import net.ltgt.gradle.errorprone.CheckSeverity
import java.text.SimpleDateFormat
import java.util.regex.Pattern
plugins {
id 'com.diffplug.spotless' version '6.25.0'
id 'com.github.ben-manes.versions' version '0.51.0'
id 'com.github.jk1.dependency-license-report' version '2.7'
id 'com.jfrog.artifactory' version '5.2.0'
id 'io.spring.dependency-management' version '1.1.5'
id 'me.champeau.jmh' version '0.7.2' apply false
id 'net.ltgt.errorprone' version '3.1.0'
id 'maven-publish'
id 'org.sonarqube' version '4.4.1.3373'
}
sonarqube {
properties {
property "sonar.projectKey" , "$System.env.SONAR_PROJECT_KEY"
property "sonar.organization" , "$System.env.SONAR_ORGANIZATION"
property "sonar.gradle.skipCompile" , "true"
property "sonar.host.url" , "https://sonarcloud.io"
property "sonar.coverage.jacoco.xmlReportPaths" , "${buildDir}/reports/jacoco/jacocoRootReport/jacocoRootReport.xml"
property "sonar.coverage.exclusions" , "acceptance-tests/**/*"
}
}
project . tasks [ "sonarqube" ] . dependsOn "jacocoRootReport"
if ( ! JavaVersion . current ( ) . isCompatibleWith ( JavaVersion . VERSION_21 ) ) {
throw new GradleException ( "Java 21 or later is required to build Besu.\n" +
" Detected version ${JavaVersion.current()}" )
}
group = 'org.hyperledger.besu'
defaultTasks 'build' , 'checkLicense' , 'javadoc'
def buildAliases = [
'dev' : [
'spotlessApply' ,
'build' ,
'checkLicense' ,
'javadoc'
] ,
'build' : [ 'spotlessCheck' , 'build' ]
]
def expandedTaskList = [ ]
gradle . startParameter . taskNames . each {
expandedTaskList < < ( buildAliases [ it ] ? buildAliases [ it ] : it )
}
gradle . startParameter . taskNames = expandedTaskList . flatten ( )
// Gets an integer command argument, passed with -Pname=x, or the default if not provided.
def _intCmdArg ( name , defaultValue ) {
return project . hasProperty ( name ) ? project . property ( name ) as int : defaultValue
}
def _intCmdArg ( name ) {
return _intCmdArg ( name , null )
}
def _strListCmdArg ( name , defaultValue ) {
if ( ! project . hasProperty ( name ) )
return defaultValue
return ( ( String ) project . property ( name ) ) . tokenize ( ',' )
}
def _strListCmdArg ( name ) {
return _strListCmdArg ( name , null )
}
// set the shell command to use according to os
def shell = org . gradle . internal . os . OperatingSystem . current ( ) . isWindows ( ) ? "${projectDir}\\wslsh.bat" : '/bin/bash'
licenseReport {
// This is for the allowed-licenses-file in checkLicense Task
// Accepts File, URL or String path to local or remote file
allowedLicensesFile = new File ( "$rootDir/gradle/allowed-licenses.json" )
excludes = [
// only used for static analysis, not actually shipped
'com.google.errorprone:javac' ,
'org.checkerframework:dataflow-shaded' ,
'org.checkerframework:dataflow-errorprone' ,
// exclude Kotlin multiplatform dependencies container, they have the same license of what they contain
'com.squareup.okio:okio' ,
'org.jetbrains.kotlinx:kotlinx-coroutines-core'
]
// If set to true, then all boms will be excluded from the report
excludeBoms = true
filters = [
new LicenseBundleNormalizer ( bundlePath: "$rootDir/gradle/license-normalizer-bundle.json" )
]
}
allprojects {
apply plugin: 'java-library'
apply plugin: 'io.spring.dependency-management'
apply plugin: 'jacoco'
apply plugin: 'net.ltgt.errorprone'
apply from: "${rootDir}/gradle/versions.gradle"
version = calculateVersion ( )
jacoco {
toolVersion = '0.8.11'
if ( project . tasks . findByName ( 'referenceTests' ) ) {
applyTo referenceTests
}
}
task sourcesJar ( type: Jar , dependsOn: classes ) {
archiveClassifier = 'sources'
from sourceSets . main . allSource
}
task javadocJar ( type: Jar , dependsOn: javadoc ) {
archiveClassifier = 'javadoc'
from javadoc . outputDirectory
}
tasks . build {
dependsOn 'javadoc'
}
sourceCompatibility = 21
targetCompatibility = 21
repositories {
maven {
url 'https://hyperledger.jfrog.io/hyperledger/besu-maven'
content { includeGroupByRegex ( 'org\\.hyperledger\\..*' ) }
}
maven {
url 'https://artifacts.consensys.net/public/maven/maven/'
content { includeGroupByRegex ( 'tech\\.pegasys(\\..*)?' ) }
}
maven {
url 'https://splunk.jfrog.io/splunk/ext-releases-local'
content { includeGroupByRegex ( 'com\\.splunk\\..*' ) }
}
maven {
url 'https://gitlab.com/api/v4/projects/19871573/packages/maven'
content { includeGroupByRegex ( 'com\\.gitlab\\.javafuzz(\\..*)?' ) }
}
mavenCentral ( )
// ethereum execution spec tests fixtures. Exclusively for ethereum submodule to run ref tests
def ethExecSpecTestsRepo = ivy {
url 'https://github.com'
patternLayout {
artifact '/[organisation]/[module]/releases/download/v[revision]/[classifier].[ext]'
}
metadataSources {
artifact ( )
}
}
exclusiveContent {
forRepositories ( ethExecSpecTestsRepo )
filter { includeModule ( 'ethereum' , 'execution-spec-tests' ) }
}
}
dependencies {
components . all ( BouncyCastleCapability )
errorprone 'com.google.errorprone:error_prone_core'
// https://github.com/hyperledger/besu-errorprone-checks/
errorprone "org.hyperledger.besu:besu-errorprone-checks"
}
configurations . all {
resolutionStrategy . capabilitiesResolution . withCapability ( 'org.bouncycastle:bcprov-jdk18on' ) {
selectHighestVersion ( )
}
resolutionStrategy . capabilitiesResolution . withCapability ( 'org.bouncycastle:bcpkix-jdk18on' ) {
selectHighestVersion ( )
}
}
apply plugin: 'com.diffplug.spotless'
spotless {
java {
// This path needs to be relative to each project
target 'src/**/*.java'
targetExclude '**/src/reference-test/**' , '**/src/main/generated/**' , '**/src/test/generated/**' , '**/src/jmh/generated/**'
removeUnusedImports ( )
googleJavaFormat ( '1.22.0' )
importOrder 'org.hyperledger' , 'java' , ''
trimTrailingWhitespace ( )
endWithNewline ( )
// apply appropriate license header files.
licenseHeaderFile ( "${rootDir}/gradle/spotless/java.former.license" ) . named ( "older" ) . onlyIfContentMatches ( "^/\\*\\r?\\n.*Copyright ConsenSys AG\\." )
licenseHeaderFile ( "${rootDir}/gradle/spotless/java.former.date.license" ) . named ( "older.year" ) . onlyIfContentMatches ( "^/\\*\\r?\\n.* Copyright \\d{4} ConsenSys AG\\." )
licenseHeaderFile ( "${rootDir}/gradle/spotless/java.current.license" ) . named ( "current" ) . onlyIfContentMatches ( "^(?!/\\*\\r?\\n \\*.*ConsenSys AG\\.)" )
}
// spotless check applied to build.gradle (groovy) files
groovyGradle {
target '*.gradle'
greclipse ( '4.31' ) . configFile ( rootProject . file ( 'gradle/spotless/greclipse.properties' ) )
endWithNewline ( )
}
// Below this line are currently only license header tasks
format 'ShellScripts' , {
target '**/*.sh'
targetExclude '**/src/reference-test/**' , '**/src/main/generated/**' , '**/src/test/generated/**' , '**/src/jmh/generated/**'
trimTrailingWhitespace ( )
endWithNewline ( )
licenseHeaderFile ( "${rootDir}/gradle/spotless/sh.license" , "^(?!##).+" ) . skipLinesMatching ( "^#!.+?\$" )
}
format 'Solidity' , {
target '**/*.sol'
targetExclude '**/src/reference-test/**' , '**/src/main/generated/**' , '**/src/test/generated/**' , '**/src/jmh/generated/**'
trimTrailingWhitespace ( )
endWithNewline ( )
licenseHeaderFile ( "${rootDir}/gradle/spotless/java.former.license" , "^pragma solidity.+?" ) . named ( "former" ) . onlyIfContentMatches ( "^/\\*\\r?\\n.*Copyright ConsenSys AG\\." )
licenseHeaderFile ( "${rootDir}/gradle/spotless/java.former.date.license" , "^pragma solidity.+?" ) . named ( "former.date" ) . onlyIfContentMatches ( "^/\\*\\r?\\n.* Copyright \\d{4} ConsenSys AG\\." )
licenseHeaderFile ( "${rootDir}/gradle/spotless/java.current.license" , "^pragma solidity.+?" ) . named ( "current" ) . onlyIfContentMatches ( "^(?!/\\*\\r?\\n \\*.*ConsenSys AG\\.)" )
}
}
tasks . withType ( JavaCompile ) . configureEach {
options . compilerArgs + = [
'-Xlint:unchecked' ,
'-Xlint:cast' ,
'-Xlint:rawtypes' ,
'-Xlint:overloads' ,
'-Xlint:divzero' ,
'-Xlint:finally' ,
'-Xlint:static' ,
'-Werror' ,
]
options . errorprone {
excludedPaths = '.*/generated/*.*'
disableWarningsInGeneratedCode = true
// Our equals need to be symmetric, this checker doesn't respect that.
check ( 'EqualsGetClass' , CheckSeverity . OFF )
// We like to use futures with no return values.
check ( 'FutureReturnValueIgnored' , CheckSeverity . OFF )
// We use the JSR-305 annotations instead of the Google annotations.
check ( 'ImmutableEnumChecker' , CheckSeverity . OFF )
// This is a style check instead of an error-prone pattern.
check ( 'UnnecessaryParentheses' , CheckSeverity . OFF )
// This check is broken in Java 12. See https://github.com/google/error-prone/issues/1257
if ( JavaVersion . current ( ) = = JavaVersion . VERSION_12 ) {
check ( 'Finally' , CheckSeverity . OFF )
}
// This check is broken after Java 12. See https://github.com/google/error-prone/issues/1352
if ( JavaVersion . current ( ) > JavaVersion . VERSION_12 ) {
check ( 'TypeParameterUnusedInFormals' , CheckSeverity . OFF )
}
check ( 'FieldCanBeFinal' , CheckSeverity . WARN )
check ( 'InsecureCryptoUsage' , CheckSeverity . WARN )
check ( 'WildcardImport' , CheckSeverity . WARN )
}
options . encoding = 'UTF-8'
}
// IntelliJ workaround to allow repeated debugging of unchanged code
tasks . withType ( JavaExec ) {
if ( it . name . contains ( "." ) ) {
outputs . upToDateWhen { false }
}
}
/ *
* Pass some system properties provided on the gradle command line to test executions for
* convenience .
*
* The properties passed are:
* - 'test.ethereum.include' : allows to run a single Ethereum reference tests . For instance ,
* running a single general state test can be done with:
* . / gradlew : ethereum: org . hyperledger . besu . ethereum . vm : test - Dtest . single = GeneralStateTest - Dtest . ethereum . include = callcodecallcallcode_101 - Frontier
* The meaning being that will be run only the tests for which the value passed as "include"
* ( which can be a java pattern ) matches parts of the test name . Knowing that tests names for
* reference tests are of the form:
* < name > ( - < milestone > ( [ < variant > ] ) ? ) ?
* where < name > is the test name as defined in the json file ( usually the name of the json file
* as well ) , < milestone > is the Ethereum milestone tested ( not all test use it ) and < variant >
* is only use in some general state tests where for the same json file and same milestone ,
* multiple variant of that test are run . The variant is a simple number .
* - 'test.ethereum.state.eip' : for general state tests , allows to only run tests for the
* milestone specified by this value . So for instance ,
* . / gradlew : ethereum: org . hyperledger . besu . ethereum . vm : test - Dtest . single = GeneralStateTest - Dtest . ethereum . state . eip = Frontier
* only run general state tests for Frontier . Note that this behavior could be achieved as well
* with the 'include' option above since it is a pattern , but this is a slightly more convenient
* option .
* - 'root.log.level' and 'evm.log.level' : allow to control the log level used during the tests .
* - 'acctests.keepLogsOfPassingTests' : log files of failed acceptance tests are always saved .
* This property additionally keeps the log files of successful tests .
*
* /
test {
jvmArgs + = [
'-Xmx4g' ,
'-XX:-UseGCOverheadLimit' ,
// Mockito and jackson-databind do some strange reflection during tests.
// This suppresses an illegal access warning.
'--add-opens' ,
'java.base/java.util=ALL-UNNAMED' ,
'--add-opens' ,
'java.base/java.util.concurrent=ALL-UNNAMED' ,
'--add-opens' ,
'java.base/java.util.concurrent.atomic=ALL-UNNAMED' ,
// errorprone tests need access to the javac compiler
'--add-exports' ,
'jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED' ,
'--add-exports' ,
'jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED' ,
'--add-exports' ,
'jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED' ,
'--add-exports' ,
'jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED' ,
'--add-exports' ,
'jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED' ,
'--add-exports' ,
'jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED' ,
'--add-exports' ,
'jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED' ,
'--add-exports' ,
'jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED'
]
Set toImport = [
'test.ethereum.include' ,
'test.ethereum.state.eip' ,
'root.log.level' ,
'evm.log.level' ,
'acctests.keepLogsOfPassingTests'
]
for ( String name : toImport ) {
if ( System . getProperty ( name ) ! = null ) {
systemProperty name , System . getProperty ( name )
}
}
useJUnitPlatform { }
}
javadoc {
options . addBooleanOption ( 'Xdoclint:all' , true )
// disable doc lint checking for generated code and acceptance tests dsl.
options . addBooleanOption ( 'Xdoclint/package:-org.hyperledger.besu.privacy.contracts.generated,' +
'-org.hyperledger.besu.tests.acceptance.*,' +
'-org.hyperledger.besu.tests.web3j.generated,' +
// TODO: these are temporary excluded from lint (ethereum sub modules), it should be removed in a future PR.
// ethereum api module
'-org.hyperledger.besu.ethereum.api.handlers,' +
'-org.hyperledger.besu.ethereum.api.jsonrpc,' +
'-org.hyperledger.besu.ethereum.api.jsonrpc.*,' +
'-org.hyperledger.besu.ethereum.api.query,' +
'-org.hyperledger.besu.ethereum.api.query.*,' +
'-org.hyperledger.besu.ethereum.api.tls,' +
'-org.hyperledger.besu.ethereum.api.util,' +
// ethereum blockcreation module
'-org.hyperledger.besu.ethereum.blockcreation,' +
'-org.hyperledger.besu.ethereum.blockcreation.*,' +
// ethereum core module
'-org.hyperledger.besu.ethereum.chain,' +
'-org.hyperledger.besu.ethereum.core,' +
'-org.hyperledger.besu.ethereum.core.*,' +
'-org.hyperledger.besu.ethereum.debug,' +
'-org.hyperledger.besu.ethereum.difficulty.fixed,' +
'-org.hyperledger.besu.ethereum.forkid,' +
'-org.hyperledger.besu.ethereum.mainnet,' +
'-org.hyperledger.besu.ethereum.mainnet.*,' +
'-org.hyperledger.besu.ethereum.privacy,' +
'-org.hyperledger.besu.ethereum.privacy.*,' +
'-org.hyperledger.besu.ethereum.processing,' +
'-org.hyperledger.besu.ethereum.proof,' +
'-org.hyperledger.besu.ethereum.storage,' +
'-org.hyperledger.besu.ethereum.storage.*,' +
'-org.hyperledger.besu.ethereum.transaction,' +
'-org.hyperledger.besu.ethereum.trie.*,' +
'-org.hyperledger.besu.ethereum.util,' +
'-org.hyperledger.besu.ethereum.vm,' +
'-org.hyperledger.besu.ethereum.worldstate,' +
// ethereum eth module
'-org.hyperledger.besu.ethereum.eth.*,' +
'-org.hyperledger.besu.ethereum.eth,' +
'-org.hyperledger.besu.consensus.merge,' +
// p2p module
'-org.hyperledger.besu.ethereum.p2p,' +
'-org.hyperledger.besu.ethereum.p2p.*,' +
// permissioning module
'-org.hyperledger.besu.ethereum.permissioning,' +
'-org.hyperledger.besu.ethereum.permissioning.*,' +
// referencetests module
'-org.hyperledger.besu.ethereum.referencetests,' +
// retesteth module
'-org.hyperledger.besu.ethereum.retesteth.methods,' +
'-org.hyperledger.besu.ethereum.retesteth,' +
//rlp module
'-org.hyperledger.besu.ethereum.rlp,' +
// stratum module
'-org.hyperledger.besu.ethereum.stratum,' +
// trie module
'-org.hyperledger.besu.ethereum.trie.*,' +
'-org.hyperledger.besu.ethereum.trie,' +
// verkle trie module
'-org.hyperledger.besu.ethereum.verkletrie,' +
'-org.hyperledger.besu.ethereum.verkletrie.*' ,
true )
options . addStringOption ( 'Xmaxerrs' , '65535' )
options . addStringOption ( 'Xmaxwarns' , '65535' )
options . addStringOption ( 'Xwerror' , '-html5' )
options . encoding = 'UTF-8'
}
}
task deploy ( ) { }
task checkMavenCoordinateCollisions {
doLast {
def coordinates = [ : ]
getAllprojects ( ) . forEach {
if ( it . properties . containsKey ( 'publishing' ) & & it . jar ? . enabled ) {
def coordinate = it . publishing ? . publications [ 0 ] . coordinates
if ( coordinate . toString ( ) . startsWith ( "org" ) & & coordinates . containsKey ( coordinate ) ) {
throw new GradleException ( "Duplicate maven coordinates detected, ${coordinate} is used by " +
"both ${coordinates[coordinate]} and ${it.path}.\n" +
"Please add a `publishing` script block to one or both subprojects." )
}
coordinates [ coordinate ] = it . path
}
}
}
}
tasks . register ( 'checkPluginAPIChanges' , DefaultTask ) { }
checkPluginAPIChanges . dependsOn ( ':plugin-api:checkAPIChanges' )
check . dependsOn ( 'checkPluginAPIChanges' , 'checkMavenCoordinateCollisions' )
subprojects {
if ( file ( 'src/test-support' ) . directory ) {
sourceSets {
// test-support can be consumed as a library by other projects in their tests
testSupport {
java {
compileClasspath + = main . output
runtimeClasspath + = main . output
srcDir file ( 'src/test-support/java' )
}
resources . srcDir file ( 'src/test-support/resources' )
}
}
dependencies { testImplementation sourceSets . testSupport . output }
task testSupportJar ( type: Jar ) {
archiveBaseName = "${project.name}-support-test"
archiveClassifier = 'test-support'
from sourceSets . testSupport . output
}
}
if ( file ( 'src/integration-test' ) . directory ) {
sourceSets {
integrationTest {
java {
compileClasspath + = main . output
runtimeClasspath + = main . output
srcDir file ( 'src/integration-test/java' )
}
resources . srcDir file ( 'src/integration-test/resources' )
}
}
if ( file ( 'src/test-support' ) . directory ) {
dependencies { integrationTestImplementation sourceSets . testSupport . output }
}
task integrationTest ( type: Test , dependsOn: [ "compileTestJava" ] ) {
group = "verification"
description = "Runs the Besu integration tests"
jvmArgs = [
'--add-opens' ,
'java.base/java.util=ALL-UNNAMED' ,
'--add-opens' ,
'java.base/java.util.concurrent=ALL-UNNAMED'
]
testClassesDirs = sourceSets . integrationTest . output . classesDirs
classpath = sourceSets . integrationTest . runtimeClasspath
outputs . upToDateWhen { false }
useJUnitPlatform { }
}
}
def sourceSetIsPopulated = { sourceSetName - >
def result = project . sourceSets . names . contains ( sourceSetName ) & & ! project . sourceSets . getAt ( sourceSetName ) . allSource . empty
logger . info ( "Project = " + project . name + " Has Source Set (" + sourceSetName + ") = " + result + "(" + project . sourceSets . names + ")" )
return result
}
if ( sourceSetIsPopulated ( "main" ) | | sourceSetIsPopulated ( "testSupport" ) ) {
apply plugin: 'com.jfrog.artifactory'
apply plugin: 'maven-publish'
publishing {
publications {
mavenJava ( MavenPublication ) {
groupId "org.hyperledger.besu.internal"
version "${project.version}"
if ( sourceSetIsPopulated ( "main" ) ) {
from components . java
artifact sourcesJar
artifact javadocJar
}
if ( sourceSetIsPopulated ( "testSupport" ) ) {
artifact testSupportJar
}
versionMapping {
usage ( 'java-api' ) { fromResolutionOf ( 'runtimeClasspath' ) }
usage ( 'java-runtime' ) { fromResolutionResult ( ) }
}
pom {
name = "Besu - ${project.name}"
url = 'http://github.com/hyperledger/besu'
licenses {
license {
name = 'The Apache License, Version 2.0'
url = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
}
}
scm {
connection = 'scm:git:git://github.com/hyperledger/besu.git'
developerConnection = 'scm:git:ssh://github.com/hyperledger/besu.git'
url = 'https://github.com/hyperledger/besu'
}
}
}
}
}
def artifactoryUser = project . hasProperty ( 'artifactoryUser' ) ? project . property ( 'artifactoryUser' ) : System . getenv ( 'ARTIFACTORY_USER' )
def artifactoryKey = project . hasProperty ( 'artifactoryApiKey' ) ? project . property ( 'artifactoryApiKey' ) : System . getenv ( 'ARTIFACTORY_KEY' )
def artifactoryRepo = System . getenv ( 'ARTIFACTORY_REPO' ) ? : 'besu-maven'
def artifactoryOrg = System . getenv ( 'ARTIFACTORY_ORG' ) ? : 'hyperledger'
artifactory {
contextUrl = "https://hyperledger.jfrog.io/${artifactoryOrg}"
publish {
repository {
repoKey = "${artifactoryRepo}"
username = artifactoryUser
password = artifactoryKey
}
defaults {
publications ( 'mavenJava' )
publishArtifacts = true
publishPom = true
}
}
}
}
tasks . withType ( Test ) {
// If GRADLE_MAX_TEST_FORKS is not set, use half the available processors
maxParallelForks = ( System . getenv ( 'GRADLE_MAX_TEST_FORKS' ) ? : ( Runtime . runtime . availableProcessors ( ) . intdiv ( 2 ) ? : 1 ) ) . toInteger ( )
}
tasks . withType ( JavaCompile ) {
options . fork = true
options . incremental = true
}
configurations {
testSupportImplementation . extendsFrom implementation
integrationTestImplementation . extendsFrom implementation
testSupportArtifacts
}
if ( file ( 'src/jmh' ) . directory ) {
apply plugin: 'me.champeau.jmh'
jmh {
// Allows to control JMH execution directly from the command line. I typical execution may look
// like:
// gradle jmh -Pf=2 -Pwi=3 -Pi=5 -Pinclude=MyBench
// which will run 2 forks with 3 warmup iterations and 5 normal ones for each, and will only
// run the benchmark matching 'MyBench' (a regexp).
warmupForks = _intCmdArg ( 'wf' )
warmupIterations = _intCmdArg ( 'wi' )
fork = _intCmdArg ( 'f' )
iterations = _intCmdArg ( 'i' )
benchmarkMode = _strListCmdArg ( 'bm' )
includes = _strListCmdArg ( 'include' , [ '' ] )
humanOutputFile = project . file ( "${project.buildDir}/reports/jmh/results.txt" )
resultFormat = 'JSON'
duplicateClassesStrategy = DuplicatesStrategy . WARN
}
dependencies { jmh 'org.slf4j:slf4j-api' }
}
// making sure assemble task invokes integration test compile
afterEvaluate { project - >
if ( project . tasks . findByName ( 'compileIntegrationTestJava' ) ) {
project . tasks . assemble . dependsOn compileIntegrationTestJava
}
}
}
jar { enabled = false }
apply plugin: 'application'
mainClassName = 'org.hyperledger.besu.Besu'
applicationDefaultJvmArgs = [
'-Dvertx.disableFileCPResolving=true' ,
// BESU_HOME is replaced by a doFirst block in the run task.
'-Dbesu.home=BESU_HOME' ,
// We shutdown log4j ourselves, as otherwise this shutdown hook runs before our own and whatever
// happens during shutdown is not logged.
'-Dlog4j.shutdownHookEnabled=false' ,
// Disable JNI lookups in log4j messages to improve security
'-Dlog4j2.formatMsgNoLookups=true' ,
// Redirect java.util.logging loggers to use log4j2.
'-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager' ,
// Suppress Java JPMS warnings. Document the reason for each suppression.
// Bouncy Castle needs access to sun.security.provider, which is not open by default.
'--add-opens' ,
'java.base/sun.security.provider=ALL-UNNAMED' ,
// Jackson likes to access java.util.OptionalLong's constructor
'--add-opens' ,
'java.base/java.util=ALL-UNNAMED' ,
// suppress netty specific module warnings in debug
"-Dio.netty.tryReflectionSetAccessible=true" ,
"--add-exports" ,
"java.base/jdk.internal.misc=ALL-UNNAMED" ,
"--add-opens" ,
"java.base/java.nio=ALL-UNNAMED"
]
run {
args project . hasProperty ( "besu.run.args" ) ? project . property ( "besu.run.args" ) . toString ( ) . split ( "\\s+" ) : [ ]
doFirst {
applicationDefaultJvmArgs = applicationDefaultJvmArgs . collect {
it . replace ( 'BESU_HOME' , "$buildDir/besu" )
}
}
}
def tweakStartScript ( createScriptTask ) {
def shortenWindowsClasspath = { line - >
line . replaceAll ( /^set CLASSPATH=.*$/ , "set CLASSPATH=%APP_HOME%/lib/*" )
}
createScriptTask . unixScript . text = createScriptTask . unixScript . text . replace ( 'BESU_HOME' , '\$APP_HOME' )
createScriptTask . windowsScript . text = createScriptTask . windowsScript . text . replace ( 'BESU_HOME' , '%~dp0..' )
// Prevent the error originating from the 8191 chars limit on Windows
createScriptTask . windowsScript . text =
createScriptTask . windowsScript
. readLines ( )
. collect ( shortenWindowsClasspath )
. join ( '\r\n' )
}
startScripts {
defaultJvmOpts = applicationDefaultJvmArgs + [
"-XX:G1ConcRefinementThreads=2" ,
"-XX:G1HeapWastePercent=15" ,
"-XX:MaxGCPauseMillis=100" ,
"-XX:StartFlightRecording,dumponexit=true,settings=default.jfc" ,
"-Xlog:jfr*=off"
]
unixStartScriptGenerator . template = resources . text . fromFile ( "${projectDir}/besu/src/main/scripts/unixStartScript.txt" )
windowsStartScriptGenerator . template = resources . text . fromFile ( "${projectDir}/besu/src/main/scripts/windowsStartScript.txt" )
doLast { tweakStartScript ( startScripts ) }
}
task untunedStartScripts ( type: CreateStartScripts ) {
mainClass = 'org.hyperledger.besu.Besu'
classpath = startScripts . classpath
outputDir = startScripts . outputDir
applicationName = 'besu-untuned'
defaultJvmOpts = applicationDefaultJvmArgs
unixStartScriptGenerator . template = resources . text . fromFile ( "${projectDir}/besu/src/main/scripts/unixStartScript.txt" )
windowsStartScriptGenerator . template = resources . text . fromFile ( "${projectDir}/besu/src/main/scripts/windowsStartScript.txt" )
doLast { tweakStartScript ( untunedStartScripts ) }
}
task evmToolStartScripts ( type: CreateStartScripts ) {
mainClass = 'org.hyperledger.besu.evmtool.EvmTool'
classpath = startScripts . classpath
outputDir = startScripts . outputDir
applicationName = 'evmtool'
defaultJvmOpts = [
"-Dsecp256k1.randomize=false"
]
unixStartScriptGenerator . template = resources . text . fromFile ( "${projectDir}/besu/src/main/scripts/unixStartScript.txt" )
windowsStartScriptGenerator . template = resources . text . fromFile ( "${projectDir}/besu/src/main/scripts/windowsStartScript.txt" )
doLast { tweakStartScript ( evmToolStartScripts ) }
}
task autocomplete ( type: JavaExec ) {
dependsOn compileJava
def tempAutocompleteFile = File . createTempFile ( "besu" , ".autocomplete" )
standardOutput tempAutocompleteFile . newOutputStream ( )
outputs . file "$buildDir/besu.autocomplete.sh"
mainClass = application . mainClass
args "generate-completion"
classpath sourceSets . main . runtimeClasspath
doLast {
copy {
from tempAutocompleteFile
into "$buildDir"
rename tempAutocompleteFile . getName ( ) , 'besu.autocomplete.sh'
}
}
}
def archiveBuildVersion = project . hasProperty ( 'release.releaseVersion' ) ? project . property ( 'release.releaseVersion' ) : "${rootProject.version}"
installDist { dependsOn checkLicense , untunedStartScripts , evmToolStartScripts }
distTar {
dependsOn checkLicense , autocomplete , untunedStartScripts , evmToolStartScripts
doFirst {
delete fileTree ( dir: 'build/distributions' , include: '*.tar.gz' )
}
compression = Compression . GZIP
setVersion ( archiveBuildVersion )
archiveExtension = 'tar.gz'
}
distZip {
dependsOn checkLicense , autocomplete , untunedStartScripts , evmToolStartScripts
doFirst {
delete fileTree ( dir: 'build/distributions' , include: '*.zip' )
}
setVersion ( archiveBuildVersion )
}
publishing {
publications {
distArtifactory ( MavenPublication ) {
groupId = '.'
version = project . version
artifactId = 'besu'
}
}
}
artifactoryPublish {
dependsOn distTar
dependsOn distZip
}
def dockerBuildVersion = project . hasProperty ( 'release.releaseVersion' ) ? project . property ( 'release.releaseVersion' ) : "${rootProject.version}"
def dockerOrgName = project . hasProperty ( 'dockerOrgName' ) ? project . getProperty ( "dockerOrgName" ) : "hyperledger"
def dockerArtifactName = project . hasProperty ( "dockerArtifactName" ) ? project . getProperty ( "dockerArtifactName" ) : "besu"
def dockerImageName = "${dockerOrgName}/${dockerArtifactName}"
// rename the top level dir from besu-<version> to besu and this makes it really
// simple for use in docker
tasks . register ( "dockerDistUntar" ) {
dependsOn distTar
dependsOn distZip
def dockerBuildDir = "build/docker-besu/"
def distTarFile = distTar . outputs . files . singleFile
def distTarFileName = distTar . outputs . files . singleFile . name . replace ( ".tar.gz" , "" )
doFirst {
new File ( dockerBuildDir ) . mkdir ( )
copy {
from tarTree ( distTarFile )
into ( dockerBuildDir )
}
project . delete ( files ( "${dockerBuildDir}/besu" ) )
file ( "${dockerBuildDir}/${distTarFileName}" ) . renameTo ( "${dockerBuildDir}/besu" )
}
}
task distDocker {
dependsOn dockerDistUntar
inputs . dir ( "build/docker-besu/" )
def dockerBuildDir = "build/docker-besu/"
doLast {
copy {
from file ( "${projectDir}/docker/Dockerfile" )
into ( dockerBuildDir )
}
exec {
def image = "${dockerImageName}:${dockerBuildVersion}"
def dockerPlatform = ""
if ( project . hasProperty ( 'docker-platform' ) ) {
dockerPlatform = "--platform ${project.getProperty('docker-platform')}"
println "Building for platform ${project.getProperty('docker-platform')}"
}
def gitDetails = getGitCommitDetails ( )
executable shell
workingDir dockerBuildDir
args "-c" , "docker build ${dockerPlatform} --build-arg BUILD_DATE=${buildTime()} --build-arg VERSION=${dockerBuildVersion} --build-arg VCS_REF=${gitDetails.hash} -t ${image} ."
}
}
}
task testDocker {
dependsOn distDocker
def dockerReportsDir = "docker/reports/"
doFirst {
new File ( dockerReportsDir ) . mkdir ( )
}
doLast {
exec {
def image = project . hasProperty ( 'release.releaseVersion' ) ? "${dockerImageName}:" + project . property ( 'release.releaseVersion' ) : "${dockerImageName}:${project.version}"
workingDir "${projectDir}/docker"
executable shell
args "-c" , "./test.sh ${image}"
}
}
}
task dockerUpload {
dependsOn distDocker
def architecture = System . getenv ( 'architecture' )
def image = "${dockerImageName}:${dockerBuildVersion}"
def additionalTags = [ ]
if ( project . hasProperty ( 'branch' ) & & project . property ( 'branch' ) = = 'main' ) {
additionalTags . add ( 'develop' )
}
if ( ! isInterimBuild ( dockerBuildVersion ) ) {
additionalTags . add ( dockerBuildVersion . split ( /\./ ) [ 0 . . 1 ] . join ( '.' ) )
}
doLast {
exec {
def archVariantImage = "${image}-${architecture}"
def cmd = "docker tag '${image}' '${archVariantImage}' && docker push '${archVariantImage}'"
println "Executing '${cmd}'"
executable shell
args "-c" , cmd
}
}
}
task dockerUploadRelease {
def archs = [ "arm64" , "amd64" ]
def image = "${dockerImageName}:${dockerBuildVersion}"
doLast {
for ( def architecture in archs ) {
exec {
def cmd = "docker pull '${image}-${architecture}' && docker tag '${image}-${architecture}' '${dockerImageName}:latest-${architecture}'"
println "Executing '${cmd}'"
executable shell
args "-c" , cmd
}
exec {
def cmd = "docker push '${dockerImageName}:latest-${architecture}'"
println "Executing '${cmd}'"
executable shell
args "-c" , cmd
}
exec {
def archImage = "${image}-${architecture}"
def cmd = "docker pull '${archImage}' && docker tag ${archImage} '${dockerImageName}:latest-${architecture}'"
println "Executing '${cmd}'"
executable shell
args "-c" , cmd
}
exec {
def cmd = "docker push '${dockerImageName}:latest-${architecture}'"
println "Executing '${cmd}'"
executable shell
args "-c" , cmd
}
}
}
}
task manifestDocker {
def image = "${dockerImageName}:${dockerBuildVersion}"
def archs = [
"arm64" ,
"amd64" ] //TODO: this assumes dockerUpload task has already been run on 2 different archs!
doLast {
exec {
def targets = ""
archs . forEach { arch - > targets + = "'${image}-${arch}' " }
def cmd = "docker manifest create '${image}' ${targets}"
println "Executing '${cmd}'"
executable shell
args "-c" , cmd
}
exec {
def cmd = "docker manifest push '${image}'"
println "Executing '${cmd}'"
executable shell
args "-c" , cmd
}
}
}
task manifestDockerRelease {
def archs = [ "arm64" , "amd64" ]
def baseTag = "${dockerImageName}:latest" ;
doLast {
exec {
def targets = ""
archs . forEach { arch - > targets + = "'${baseTag}-${arch}' " }
def cmd = "docker manifest create '${baseTag}' ${targets} --amend"
println "Executing '${cmd}'"
executable shell
args "-c" , cmd
}
exec {
def cmd = "docker manifest push '${baseTag}'"
println "Executing '${cmd}'"
executable shell
args "-c" , cmd
}
}
}
def sep = Pattern . quote ( File . separator )
jacocoTestReport {
reports {
xml . required = true
}
}
task jacocoRootReport ( type: org . gradle . testing . jacoco . tasks . JacocoReport ) {
additionalSourceDirs . from files ( subprojects . sourceSets . main . allSource . srcDirs )
sourceDirectories . from files ( subprojects . sourceSets . main . allSource . srcDirs )
classDirectories . from files ( subprojects . sourceSets . main . output ) . asFileTree . matching { exclude 'org/hyperledger/besu/tests/acceptance/**' }
executionData . from fileTree ( dir: '.' , includes: [ '**/jacoco/*.exec' ] )
reports {
xml . required = true
csv . required = true
html . destination file ( "build/reports/jacocoHtml" )
}
onlyIf = { true }
}
// http://label-schema.org/rc1/
// using the RFC3339 format "2016-04-12T23:20:50.52Z"
def buildTime ( ) {
def df = new SimpleDateFormat ( "yyyy-MM-dd'T'HH:mm'Z'" )
df . setTimeZone ( TimeZone . getTimeZone ( "UTC" ) )
return df . format ( new Date ( ) )
}
@Memoized
def calculateVersion ( ) {
// Regex pattern for basic calendar versioning, with provision to omit patch rev
def calVerPattern = ~ /\d+\.\d+(\.\d+)?(-\w+)?$/
def calVerWithCommitPattern = ~ /\d+\.\d+(\.\d+)?(-\w+)?-[0-9a-fA-F]{7,8}$/
def gitDetails = getGitCommitDetails ( ) // Adjust length as needed
if ( project . hasProperty ( 'version' ) & & project . version = ~ calVerWithCommitPattern ) {
println ( "Utilising supplied version as it appears to already contain commit hash: ${project.version}" )
return project . version
} else if ( project . hasProperty ( 'version' ) & & project . version = ~ calVerPattern ) {
println ( "Generating project version using supplied version: ${project.version}-${gitDetails.hash}" )
return "${project.version}-${gitDetails.hash}"
} else {
// If no version is supplied or it doesn't match the semantic versioning, calculate from git
println ( "Generating project version using date (${gitDetails.date}-develop-${gitDetails.hash}), as supplied version is not semver: ${project.version}" )
return "${gitDetails.date}-develop-${gitDetails.hash}"
}
}
def getGitCommitDetails ( length = 8 ) {
try {
def gitFolder = "$projectDir/.git/"
if ( ! file ( gitFolder ) . isDirectory ( ) ) {
gitFolder = file ( gitFolder ) . text . substring ( length ) . trim ( ) + "/"
}
def takeFromHash = length
def head = new File ( gitFolder + "HEAD" ) . text . split ( ":" )
def isCommit = head . length = = 1
def commitHash , refHeadFile
if ( isCommit ) {
commitHash = head [ 0 ] . trim ( ) . take ( takeFromHash )
refHeadFile = new File ( gitFolder + "HEAD" )
} else {
refHeadFile = new File ( gitFolder + head [ 1 ] . trim ( ) )
commitHash = refHeadFile . text . trim ( ) . take ( takeFromHash )
}
// Use head file modification time as a proxy for the build date
def lastModified = new Date ( refHeadFile . lastModified ( ) )
// Format the date as "yy.M" (e.g. 24.3 for March 2024)
def formattedDate = new SimpleDateFormat ( "yy.M" ) . format ( lastModified )
return [ hash: commitHash , date: formattedDate ]
} catch ( Exception e ) {
logger . warn ( 'Could not calculate git commit details, using defaults (run with --info for stacktrace)' )
logger . info ( 'Error retrieving git commit details' , e )
return [ hash: "xxxxxxxx" , date: "00.0" ]
}
}
// Takes the version and if it contains SNAPSHOT, alpha, beta or RC in version then return true indicating an interim build
def isInterimBuild ( dockerBuildVersion ) {
return ( dockerBuildVersion = = ~ /.*-SNAPSHOT/ ) | | ( dockerBuildVersion = = ~ /.*-alpha/ )
| | ( dockerBuildVersion = = ~ /.*-beta/ ) | | ( dockerBuildVersion = = ~ /.*-RC.*/ )
| | ( dockerBuildVersion = = ~ /.*develop.*/ )
}
tasks . register ( "verifyDistributions" ) {
dependsOn distTar
dependsOn distZip
def distTarFile = distTar . outputs . files . singleFile
def distZipFile = distZip . outputs . files . singleFile
def minDistributionSize = 20000000
// Sanity check the distributions by checking they are at least a reasonable size
doFirst {
if ( distTarFile . length ( ) < minDistributionSize ) {
throw new GradleException ( "Distribution tar is suspiciously small: " + distTarFile . length ( ) + " bytes" )
}
if ( distZipFile . length ( ) < minDistributionSize ) {
throw new GradleException ( "Distribution zip is suspiciously small: " + distZipFile . length ( ) + " bytes" )
}
}
}
dependencies {
errorprone 'com.google.errorprone:error_prone_core'
// https://github.com/hyperledger/besu-errorprone-checks/
errorprone 'org.hyperledger.besu:besu-errorprone-checks'
implementation project ( ':besu' )
implementation project ( ':ethereum:evmtool' )
}
@CompileStatic
class BouncyCastleCapability implements ComponentMetadataRule {
void execute ( ComponentMetadataContext context ) {
context . details . with {
if ( id . group = = "org.bouncycastle" ) {
if ( id . name = = "bcprov-jdk15on" ) {
allVariants {
it . withCapabilities {
it . addCapability ( "org.bouncycastle" , "bcprov-jdk18on" , "0" )
}
}
} else if ( id . name = = "bcpkix-jdk15on" ) {
allVariants {
it . withCapabilities {
it . addCapability ( "org.bouncycastle" , "bcpkix-jdk18on" , "0" )
}
}
}
}
}
}
}
distributions {
main {
contents {
from ( "./LICENSE" ) { into "." }
from ( "build/reports/license/license-dependency.html" ) { into "." }
from ( "./docs/GettingStartedBinaries.md" ) { into "." }
from ( "./docs/DocsArchive0.8.0.html" ) { into "." }
from ( autocomplete ) { into "." }
}
}
}
build . dependsOn verifyDistributions
artifactoryPublish . dependsOn verifyDistributions
artifactoryPublish . mustRunAfter ( distTar )
artifactoryPublish . mustRunAfter ( distZip )
artifactoryPublish . mustRunAfter ( javadocJar )