|
|
|
/*
|
|
|
|
* Copyright ConsenSys AG.
|
|
|
|
*
|
|
|
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
|
|
|
* the License. You may obtain a copy of the License at
|
|
|
|
*
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
*
|
|
|
|
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
|
|
|
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
|
|
|
* specific language governing permissions and limitations under the License.
|
|
|
|
*
|
|
|
|
* SPDX-License-Identifier: Apache-2.0
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
|
|
import groovy.transform.Memoized
|
|
|
|
import net.ltgt.gradle.errorprone.CheckSeverity
|
|
|
|
|
|
|
|
import java.text.SimpleDateFormat
|
|
|
|
|
|
|
|
plugins {
|
|
|
|
id 'com.diffplug.spotless' version '6.12.0'
|
|
|
|
id 'com.github.ben-manes.versions' version '0.42.0'
|
|
|
|
id 'com.github.hierynomus.license' version '0.16.1-fix'
|
|
|
|
id 'com.jfrog.artifactory' version '4.28.3'
|
|
|
|
id 'io.spring.dependency-management' version '1.0.11.RELEASE'
|
|
|
|
id 'me.champeau.jmh' version '0.6.6' apply false
|
|
|
|
id 'net.ltgt.errorprone' version '2.0.2'
|
|
|
|
id 'maven-publish'
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!JavaVersion.current().java11Compatible) {
|
|
|
|
throw new GradleException("Java 11 or later is required to build Besu.\n" +
|
|
|
|
" Detected version ${JavaVersion.current()}")
|
|
|
|
}
|
|
|
|
|
|
|
|
group = 'org.hyperledger.besu'
|
|
|
|
|
|
|
|
defaultTasks 'build', 'checkLicenses', 'javadoc'
|
|
|
|
|
|
|
|
def buildAliases = ['dev': [
|
|
|
|
'spotlessApply',
|
|
|
|
'build',
|
|
|
|
'checkLicenses',
|
|
|
|
'javadoc'
|
|
|
|
]]
|
|
|
|
|
|
|
|
def expandedTaskList = []
|
|
|
|
gradle.startParameter.taskNames.each {
|
|
|
|
expandedTaskList << (buildAliases[it] ? buildAliases[it] : it)
|
|
|
|
}
|
|
|
|
gradle.startParameter.taskNames = expandedTaskList.flatten()
|
|
|
|
|
|
|
|
// Gets an integer command argument, passed with -Pname=x, or the default if not provided.
|
|
|
|
def _intCmdArg(name, defaultValue) {
|
|
|
|
return project.hasProperty(name) ? project.property(name) as int : defaultValue
|
|
|
|
}
|
|
|
|
|
|
|
|
def _intCmdArg(name) {
|
|
|
|
return _intCmdArg(name, null)
|
|
|
|
}
|
|
|
|
|
|
|
|
def _strListCmdArg(name, defaultValue) {
|
|
|
|
if (!project.hasProperty(name))
|
|
|
|
return defaultValue
|
|
|
|
|
|
|
|
return ((String) project.property(name)).tokenize(',')
|
|
|
|
}
|
|
|
|
|
|
|
|
def _strListCmdArg(name) {
|
|
|
|
return _strListCmdArg(name, null)
|
|
|
|
}
|
|
|
|
|
|
|
|
allprojects {
|
|
|
|
apply plugin: 'java-library'
|
|
|
|
apply plugin: 'io.spring.dependency-management'
|
|
|
|
apply plugin: 'jacoco'
|
|
|
|
apply plugin: 'net.ltgt.errorprone'
|
|
|
|
apply from: "${rootDir}/gradle/versions.gradle"
|
|
|
|
apply from: "${rootDir}/gradle/check-licenses.gradle"
|
|
|
|
|
|
|
|
version = rootProject.version
|
|
|
|
|
|
|
|
jacoco {
|
|
|
|
toolVersion = '0.8.8'
|
|
|
|
if (project.tasks.findByName('referenceTests')) {
|
|
|
|
applyTo referenceTests
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
task sourcesJar(type: Jar, dependsOn: classes) {
|
|
|
|
classifier = 'sources'
|
|
|
|
from sourceSets.main.allSource
|
|
|
|
}
|
|
|
|
|
|
|
|
task javadocJar(type: Jar, dependsOn: javadoc) {
|
|
|
|
classifier = 'javadoc'
|
|
|
|
from javadoc.outputDirectory
|
|
|
|
}
|
|
|
|
|
|
|
|
sourceCompatibility = 11
|
|
|
|
targetCompatibility = 11
|
|
|
|
|
|
|
|
repositories {
|
|
|
|
maven {
|
|
|
|
url 'https://hyperledger.jfrog.io/hyperledger/besu-maven'
|
|
|
|
content { includeGroupByRegex('org\\.hyperledger\\..*') }
|
|
|
|
}
|
|
|
|
maven {
|
|
|
|
url 'https://artifacts.consensys.net/public/maven/maven/'
|
|
|
|
content { includeGroupByRegex('tech\\.pegasys(\\..*)?') }
|
|
|
|
}
|
|
|
|
maven {
|
|
|
|
url 'https://dl.cloudsmith.io/public/consensys/quorum-mainnet-launcher/maven/'
|
|
|
|
content { includeGroupByRegex('net\\.consensys\\..*') }
|
|
|
|
}
|
|
|
|
maven {
|
|
|
|
url 'https://splunk.jfrog.io/splunk/ext-releases-local'
|
|
|
|
content { includeGroupByRegex('com\\.splunk\\..*') }
|
|
|
|
}
|
|
|
|
mavenCentral()
|
|
|
|
}
|
|
|
|
|
|
|
|
dependencies { errorprone 'com.google.errorprone:error_prone_core' }
|
|
|
|
|
|
|
|
apply plugin: 'com.diffplug.spotless'
|
|
|
|
spotless {
|
|
|
|
java {
|
|
|
|
// This path needs to be relative to each project
|
|
|
|
target 'src/**/*.java'
|
|
|
|
targetExclude '**/src/reference-test/**', '**/src/main/generated/**', '**/src/test/generated/**', '**/src/jmh/generated/**'
|
|
|
|
removeUnusedImports()
|
|
|
|
googleJavaFormat('1.15.0')
|
|
|
|
importOrder 'org.hyperledger', 'java', ''
|
|
|
|
trimTrailingWhitespace()
|
|
|
|
endWithNewline()
|
|
|
|
}
|
|
|
|
groovyGradle {
|
|
|
|
target '*.gradle'
|
|
|
|
greclipse('4.21.0').configFile(rootProject.file('gradle/formatter.properties'))
|
|
|
|
endWithNewline()
|
|
|
|
}
|
|
|
|
// Below this line are currently only license header tasks
|
|
|
|
format 'groovy', { target '**/src/*/grovy/**/*.groovy' }
|
|
|
|
format 'bash', { target '**/*.sh' }
|
|
|
|
format 'sol', { target '**/*.sol' }
|
|
|
|
}
|
|
|
|
|
|
|
|
tasks.withType(JavaCompile) {
|
|
|
|
options.compilerArgs += [
|
|
|
|
'-Xlint:unchecked',
|
|
|
|
'-Xlint:cast',
|
|
|
|
'-Xlint:rawtypes',
|
|
|
|
'-Xlint:overloads',
|
|
|
|
'-Xlint:divzero',
|
|
|
|
'-Xlint:finally',
|
|
|
|
'-Xlint:static',
|
|
|
|
'-Werror',
|
|
|
|
]
|
|
|
|
|
|
|
|
options.errorprone {
|
|
|
|
excludedPaths = '.*/(generated/*.*|.*ReferenceTest_.*|build/.*/annotation-output/.*)'
|
|
|
|
|
|
|
|
// Our equals need to be symmetric, this checker doesn't respect that.
|
|
|
|
check('EqualsGetClass', CheckSeverity.OFF)
|
|
|
|
// We like to use futures with no return values.
|
|
|
|
check('FutureReturnValueIgnored', CheckSeverity.OFF)
|
|
|
|
// We use the JSR-305 annotations instead of the Google annotations.
|
|
|
|
check('ImmutableEnumChecker', CheckSeverity.OFF)
|
|
|
|
// This is a style check instead of an error-prone pattern.
|
|
|
|
check('UnnecessaryParentheses', CheckSeverity.OFF)
|
|
|
|
|
|
|
|
// This check is broken in Java 12. See https://github.com/google/error-prone/issues/1257
|
|
|
|
if (JavaVersion.current() == JavaVersion.VERSION_12) {
|
|
|
|
check('Finally', CheckSeverity.OFF)
|
|
|
|
}
|
|
|
|
// This check is broken after Java 12. See https://github.com/google/error-prone/issues/1352
|
|
|
|
if (JavaVersion.current() > JavaVersion.VERSION_12) {
|
|
|
|
check('TypeParameterUnusedInFormals', CheckSeverity.OFF)
|
|
|
|
}
|
|
|
|
|
|
|
|
check('FieldCanBeFinal', CheckSeverity.WARN)
|
|
|
|
check('InsecureCryptoUsage', CheckSeverity.WARN)
|
|
|
|
check('WildcardImport', CheckSeverity.WARN)
|
|
|
|
}
|
|
|
|
|
|
|
|
options.encoding = 'UTF-8'
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Pass some system properties provided on the gradle command line to test executions for
|
|
|
|
* convenience.
|
|
|
|
*
|
|
|
|
* The properties passed are:
|
|
|
|
* - 'test.ethereum.include': allows to run a single Ethereum reference tests. For instance,
|
|
|
|
* running a single general state test can be done with:
|
|
|
|
* ./gradlew :ethereum:org.hyperledger.besu.ethereum.vm:test -Dtest.single=GeneralStateTest -Dtest.ethereum.include=callcodecallcallcode_101-Frontier
|
|
|
|
* The meaning being that will be run only the tests for which the value passed as "include"
|
|
|
|
* (which can be a java pattern) matches parts of the test name. Knowing that tests names for
|
|
|
|
* reference tests are of the form:
|
|
|
|
* <name>(-<milestone>([<variant>])?)?
|
|
|
|
* where <name> is the test name as defined in the json file (usually the name of the json file
|
|
|
|
* as well), <milestone> is the Ethereum milestone tested (not all test use it) and <variant>
|
|
|
|
* is only use in some general state tests where for the same json file and same milestone,
|
|
|
|
* multiple variant of that test are run. The variant is a simple number.
|
|
|
|
* - 'test.ethereum.state.eip': for general state tests, allows to only run tests for the
|
|
|
|
* milestone specified by this value. So for instance,
|
|
|
|
* ./gradlew :ethereum:org.hyperledger.besu.ethereum.vm:test -Dtest.single=GeneralStateTest -Dtest.ethereum.state.eip=Frontier
|
|
|
|
* only run general state tests for Frontier. Note that this behavior could be achieved as well
|
|
|
|
* with the 'include' option above since it is a pattern, but this is a slightly more convenient
|
|
|
|
* option.
|
|
|
|
* - 'root.log.level' and 'evm.log.level': allow to control the log level used during the tests.
|
|
|
|
* - 'acctests.keepLogsOfPassingTests': log files of failed acceptance tests are always saved.
|
|
|
|
* This property additionally keeps the log files of successful tests.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
test {
|
|
|
|
jvmArgs = [
|
|
|
|
'-Xmx4g',
|
|
|
|
'-XX:-UseGCOverheadLimit',
|
|
|
|
// Mockito and jackson-databind do some strange reflection during tests.
|
|
|
|
// This suppresses an illegal access warning.
|
|
|
|
'--add-opens',
|
|
|
|
'java.base/java.util=ALL-UNNAMED',
|
|
|
|
'--add-opens',
|
|
|
|
'java.base/java.util.concurrent=ALL-UNNAMED',
|
|
|
|
'--add-opens',
|
|
|
|
'java.base/java.util.concurrent.atomic=ALL-UNNAMED',
|
|
|
|
// errorprone tests need access to the javac compiler
|
|
|
|
'--add-exports',
|
|
|
|
'jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED',
|
|
|
|
'--add-exports',
|
|
|
|
'jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED',
|
|
|
|
'--add-exports',
|
|
|
|
'jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED',
|
|
|
|
'--add-exports',
|
|
|
|
'jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED',
|
|
|
|
'--add-exports',
|
|
|
|
'jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED',
|
|
|
|
'--add-exports',
|
|
|
|
'jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED',
|
|
|
|
'--add-exports',
|
|
|
|
'jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED',
|
|
|
|
'--add-exports',
|
|
|
|
'jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED'
|
|
|
|
]
|
|
|
|
Set toImport = [
|
|
|
|
'test.ethereum.include',
|
|
|
|
'test.ethereum.state.eip',
|
|
|
|
'root.log.level',
|
|
|
|
'evm.log.level',
|
|
|
|
'acctests.keepLogsOfPassingTests'
|
|
|
|
]
|
|
|
|
for (String name : toImport) {
|
|
|
|
if (System.getProperty(name) != null) {
|
|
|
|
systemProperty name, System.getProperty(name)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
useJUnitPlatform {}
|
|
|
|
}
|
|
|
|
|
|
|
|
javadoc {
|
|
|
|
options.addBooleanOption('Xdoclint:all', true)
|
|
|
|
// disable doc lint checking for generated code and acceptance tests dsl.
|
|
|
|
options.addBooleanOption('Xdoclint/package:-org.hyperledger.besu.privacy.contracts.generated,' +
|
|
|
|
'-org.hyperledger.besu.tests.acceptance.*,' +
|
|
|
|
'-org.hyperledger.besu.tests.web3j.generated,' +
|
|
|
|
// TODO: these are temporary disabled (ethereum and sub modules), it should be removed in a future PR.
|
|
|
|
'-org.hyperledger.besu.ethereum,' +
|
|
|
|
'-org.hyperledger.besu.ethereum.*,' +
|
|
|
|
'-org.hyperledger.besu.ethstats.*,' +
|
|
|
|
'-org.hyperledger.besu.evmtool',
|
|
|
|
true)
|
|
|
|
options.addStringOption('Xmaxerrs','65535')
|
|
|
|
options.addStringOption('Xmaxwarns','65535')
|
|
|
|
options.addStringOption('Xwerror', '-html5')
|
|
|
|
options.encoding = 'UTF-8'
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
task deploy() {}
|
|
|
|
|
|
|
|
task checkMavenCoordinateCollisions {
|
|
|
|
doLast {
|
|
|
|
def coordinates = [:]
|
|
|
|
getAllprojects().forEach {
|
|
|
|
if (it.properties.containsKey('publishing') && it.jar?.enabled) {
|
|
|
|
def coordinate = it.publishing?.publications[0].coordinates
|
|
|
|
if (coordinates.containsKey(coordinate)) {
|
|
|
|
throw new GradleException("Duplicate maven coordinates detected, ${coordinate} is used by " +
|
|
|
|
"both ${coordinates[coordinate]} and ${it.path}.\n" +
|
|
|
|
"Please add a `publishing` script block to one or both subprojects.")
|
|
|
|
}
|
|
|
|
coordinates[coordinate] = it.path
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
tasks.register('checkPluginAPIChanges', DefaultTask) {}
|
|
|
|
checkPluginAPIChanges.dependsOn(':plugin-api:checkAPIChanges')
|
|
|
|
check.dependsOn('checkPluginAPIChanges', 'checkMavenCoordinateCollisions')
|
|
|
|
|
|
|
|
subprojects {
|
|
|
|
|
|
|
|
if (file('src/test-support').directory) {
|
|
|
|
sourceSets {
|
|
|
|
// test-support can be consumed as a library by other projects in their tests
|
|
|
|
testSupport {
|
|
|
|
java {
|
|
|
|
compileClasspath += main.output
|
|
|
|
runtimeClasspath += main.output
|
|
|
|
srcDir file('src/test-support/java')
|
|
|
|
}
|
|
|
|
resources.srcDir file('src/test-support/resources')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
dependencies { testImplementation sourceSets.testSupport.output }
|
|
|
|
|
|
|
|
task testSupportJar(type: Jar) {
|
|
|
|
archiveBaseName = "${project.name}-support-test"
|
|
|
|
classifier = 'test-support'
|
|
|
|
from sourceSets.testSupport.output
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (file('src/integration-test').directory) {
|
|
|
|
sourceSets {
|
|
|
|
integrationTest {
|
|
|
|
java {
|
|
|
|
compileClasspath += main.output
|
|
|
|
runtimeClasspath += main.output
|
|
|
|
srcDir file('src/integration-test/java')
|
|
|
|
}
|
|
|
|
resources.srcDir file('src/integration-test/resources')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (file('src/test-support').directory) {
|
|
|
|
dependencies { integrationTestImplementation sourceSets.testSupport.output }
|
|
|
|
}
|
|
|
|
|
|
|
|
task integrationTest(type: Test, dependsOn: ["compileTestJava"]) {
|
|
|
|
group = "verification"
|
|
|
|
description = "Runs the Besu integration tests"
|
|
|
|
|
|
|
|
jvmArgs = [
|
|
|
|
'--add-opens',
|
|
|
|
'java.base/java.util=ALL-UNNAMED',
|
|
|
|
'--add-opens',
|
|
|
|
'java.base/java.util.concurrent=ALL-UNNAMED'
|
|
|
|
]
|
|
|
|
testClassesDirs = sourceSets.integrationTest.output.classesDirs
|
|
|
|
classpath = sourceSets.integrationTest.runtimeClasspath
|
|
|
|
outputs.upToDateWhen { false }
|
|
|
|
useJUnitPlatform {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
def sourceSetIsPopulated = { sourceSetName ->
|
|
|
|
def result = project.sourceSets.names.contains(sourceSetName) && !project.sourceSets.getAt(sourceSetName).allSource.empty
|
|
|
|
logger.info("Project = " + project.name + " Has Source Set (" + sourceSetName + ") = " + result + "(" + project.sourceSets.names + ")")
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
|
|
|
if (sourceSetIsPopulated("main") || sourceSetIsPopulated("testSupport")) {
|
|
|
|
apply plugin: 'com.jfrog.artifactory'
|
|
|
|
apply plugin: 'maven-publish'
|
|
|
|
|
|
|
|
publishing {
|
|
|
|
publications {
|
|
|
|
mavenJava(MavenPublication) {
|
|
|
|
groupId "org.hyperledger.besu.internal"
|
|
|
|
version "${project.version}"
|
|
|
|
if (sourceSetIsPopulated("main")) {
|
|
|
|
from components.java
|
|
|
|
artifact sourcesJar
|
|
|
|
artifact javadocJar
|
|
|
|
}
|
|
|
|
|
|
|
|
if (sourceSetIsPopulated("testSupport")) {
|
|
|
|
artifact testSupportJar
|
|
|
|
}
|
|
|
|
versionMapping {
|
|
|
|
usage('java-api') { fromResolutionOf('runtimeClasspath') }
|
|
|
|
usage('java-runtime') { fromResolutionResult() }
|
|
|
|
}
|
|
|
|
pom {
|
|
|
|
name = "Besu - ${project.name}"
|
|
|
|
url = 'http://github.com/hyperledger/besu'
|
|
|
|
licenses {
|
|
|
|
license {
|
|
|
|
name = 'The Apache License, Version 2.0'
|
|
|
|
url = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
scm {
|
|
|
|
connection = 'scm:git:git://github.com/hyperledger/besu.git'
|
|
|
|
developerConnection = 'scm:git:ssh://github.com/hyperledger/besu.git'
|
|
|
|
url = 'https://github.com/hyperledger/besu'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
def artifactoryUser = project.hasProperty('artifactoryUser') ? project.property('artifactoryUser') : System.getenv('ARTIFACTORY_USER')
|
|
|
|
def artifactoryKey = project.hasProperty('artifactoryApiKey') ? project.property('artifactoryApiKey') : System.getenv('ARTIFACTORY_KEY')
|
|
|
|
def artifactoryRepo = System.getenv('ARTIFACTORY_REPO') ?: 'besu-maven'
|
|
|
|
def artifactoryOrg = System.getenv('ARTIFACTORY_ORG') ?: 'hyperledger'
|
|
|
|
|
|
|
|
artifactory {
|
|
|
|
contextUrl = "https://hyperledger.jfrog.io/${artifactoryOrg}"
|
|
|
|
publish {
|
|
|
|
repository {
|
|
|
|
repoKey = "${artifactoryRepo}"
|
|
|
|
username = artifactoryUser
|
|
|
|
password = artifactoryKey
|
|
|
|
}
|
|
|
|
defaults {
|
|
|
|
publications('mavenJava')
|
|
|
|
publishArtifacts = true
|
|
|
|
publishPom = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
tasks.withType(Test) {
|
|
|
|
// If GRADLE_MAX_TEST_FORKS is not set, use half the available processors
|
|
|
|
maxParallelForks = (System.getenv('GRADLE_MAX_TEST_FORKS') ?: (Runtime.runtime.availableProcessors().intdiv(2) ?: 1)).toInteger()
|
|
|
|
}
|
|
|
|
|
|
|
|
tasks.withType(JavaCompile) {
|
|
|
|
options.fork = true
|
|
|
|
options.incremental = true
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
configurations {
|
|
|
|
testSupportImplementation.extendsFrom implementation
|
|
|
|
integrationTestImplementation.extendsFrom implementation
|
|
|
|
testSupportArtifacts
|
|
|
|
}
|
|
|
|
|
|
|
|
if (file('src/jmh').directory) {
|
|
|
|
apply plugin: 'me.champeau.jmh'
|
|
|
|
|
|
|
|
jmh {
|
|
|
|
// Allows to control JMH execution directly from the command line. I typical execution may look
|
|
|
|
// like:
|
|
|
|
// gradle jmh -Pf=2 -Pwi=3 -Pi=5 -Pinclude=MyBench
|
|
|
|
// which will run 2 forks with 3 warmup iterations and 5 normal ones for each, and will only
|
|
|
|
// run the benchmark matching 'MyBench' (a regexp).
|
|
|
|
warmupForks = _intCmdArg('wf')
|
|
|
|
warmupIterations = _intCmdArg('wi')
|
|
|
|
fork = _intCmdArg('f')
|
|
|
|
iterations = _intCmdArg('i')
|
|
|
|
benchmarkMode = _strListCmdArg('bm')
|
|
|
|
includes = _strListCmdArg('include', [''])
|
|
|
|
humanOutputFile = project.file("${project.buildDir}/reports/jmh/results.txt")
|
|
|
|
resultFormat = 'JSON'
|
|
|
|
duplicateClassesStrategy = DuplicatesStrategy.WARN
|
|
|
|
}
|
|
|
|
|
|
|
|
dependencies { jmh 'org.slf4j:slf4j-api' }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
jar { enabled = false }
|
|
|
|
|
|
|
|
apply plugin: 'application'
|
|
|
|
mainClassName = 'org.hyperledger.besu.Besu'
|
|
|
|
applicationDefaultJvmArgs = [
|
|
|
|
'-Dvertx.disableFileCPResolving=true',
|
|
|
|
// BESU_HOME is replaced by a doFirst block in the run task.
|
|
|
|
'-Dbesu.home=BESU_HOME',
|
|
|
|
// We shutdown log4j ourselves, as otherwise this shutdown hook runs before our own and whatever
|
|
|
|
// happens during shutdown is not logged.
|
|
|
|
'-Dlog4j.shutdownHookEnabled=false',
|
|
|
|
// Disable JNI lookups in log4j messages to improve security
|
|
|
|
'-Dlog4j2.formatMsgNoLookups=true',
|
|
|
|
// Redirect java.util.logging loggers to use log4j2.
|
|
|
|
'-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager',
|
|
|
|
// Suppress Java JPMS warnings. Document the reason for each suppression.
|
|
|
|
// Bouncy Castle needs access to sun.security.provider, which is not open by default.
|
|
|
|
'--add-opens',
|
|
|
|
'java.base/sun.security.provider=ALL-UNNAMED',
|
|
|
|
// Jackson likes to access java.util.OptionalLong's constructor
|
|
|
|
'--add-opens',
|
|
|
|
'java.base/java.util=ALL-UNNAMED',
|
|
|
|
// suppress netty specific module warnings in debug
|
|
|
|
"-Dio.netty.tryReflectionSetAccessible=true",
|
|
|
|
"--add-exports",
|
|
|
|
"java.base/jdk.internal.misc=ALL-UNNAMED",
|
|
|
|
"--add-opens",
|
|
|
|
"java.base/java.nio=ALL-UNNAMED"
|
|
|
|
]
|
|
|
|
|
|
|
|
run {
|
|
|
|
args project.hasProperty("besu.run.args") ? project.property("besu.run.args").toString().split("\\s+") : []
|
|
|
|
doFirst {
|
|
|
|
applicationDefaultJvmArgs = applicationDefaultJvmArgs.collect {
|
|
|
|
it.replace('BESU_HOME', "$buildDir/besu")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
def tweakStartScript(createScriptTask) {
|
|
|
|
def shortenWindowsClasspath = { line ->
|
|
|
|
line.replaceAll(/^set CLASSPATH=.*$/, "set CLASSPATH=%APP_HOME%/lib/*")
|
|
|
|
}
|
|
|
|
|
|
|
|
createScriptTask.unixScript.text = createScriptTask.unixScript.text.replace('BESU_HOME', '\$APP_HOME')
|
|
|
|
createScriptTask.windowsScript.text = createScriptTask.windowsScript.text.replace('BESU_HOME', '%~dp0..')
|
|
|
|
|
|
|
|
// Prevent the error originating from the 8191 chars limit on Windows
|
|
|
|
createScriptTask.windowsScript.text =
|
|
|
|
createScriptTask.windowsScript
|
|
|
|
.readLines()
|
|
|
|
.collect(shortenWindowsClasspath)
|
|
|
|
.join('\r\n')
|
|
|
|
}
|
|
|
|
|
|
|
|
startScripts {
|
|
|
|
unixStartScriptGenerator.template = resources.text.fromFile("${projectDir}/besu/src/main/scripts/unixStartScript.txt")
|
|
|
|
windowsStartScriptGenerator.template = resources.text.fromFile("${projectDir}/besu/src/main/scripts/windowsStartScript.txt")
|
|
|
|
doLast { tweakStartScript(startScripts) }
|
|
|
|
}
|
|
|
|
|
|
|
|
task evmToolStartScripts(type: CreateStartScripts) {
|
|
|
|
mainClass = 'org.hyperledger.besu.evmtool.EvmTool'
|
|
|
|
classpath = startScripts.classpath
|
|
|
|
outputDir = startScripts.outputDir
|
|
|
|
applicationName = 'evm'
|
|
|
|
defaultJvmOpts = [
|
|
|
|
"-Dsecp256k1.randomize=false"
|
|
|
|
]
|
|
|
|
unixStartScriptGenerator.template = resources.text.fromFile("${projectDir}/besu/src/main/scripts/unixStartScript.txt")
|
|
|
|
windowsStartScriptGenerator.template = resources.text.fromFile("${projectDir}/besu/src/main/scripts/windowsStartScript.txt")
|
|
|
|
doLast { tweakStartScript(evmToolStartScripts) }
|
|
|
|
}
|
|
|
|
|
|
|
|
task autocomplete(type: JavaExec) {
|
|
|
|
dependsOn compileJava
|
|
|
|
def tempAutocompleteFile = File.createTempFile("besu", ".autocomplete")
|
|
|
|
standardOutput tempAutocompleteFile.newOutputStream()
|
|
|
|
outputs.file "$buildDir/besu.autocomplete.sh"
|
|
|
|
|
|
|
|
mainClass = application.mainClass
|
|
|
|
args "generate-completion"
|
|
|
|
classpath sourceSets.main.runtimeClasspath
|
|
|
|
doLast {
|
|
|
|
copy {
|
|
|
|
from tempAutocompleteFile
|
|
|
|
into "$buildDir"
|
|
|
|
rename tempAutocompleteFile.getName(), 'besu.autocomplete.sh'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
installDist { dependsOn checkLicenses }
|
|
|
|
|
|
|
|
distTar {
|
|
|
|
dependsOn checkLicenses, autocomplete
|
|
|
|
doFirst {
|
|
|
|
delete fileTree(dir: 'build/distributions', include: '*.tar.gz')
|
|
|
|
}
|
|
|
|
compression = Compression.GZIP
|
|
|
|
archiveExtension = 'tar.gz'
|
|
|
|
}
|
|
|
|
|
|
|
|
distZip {
|
|
|
|
dependsOn checkLicenses, autocomplete
|
|
|
|
doFirst {
|
|
|
|
delete fileTree(dir: 'build/distributions', include: '*.zip')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
publishing {
|
|
|
|
publications {
|
|
|
|
distArtifactory(MavenPublication) {
|
|
|
|
groupId = '.'
|
|
|
|
version = project.version
|
|
|
|
artifactId = 'besu'
|
|
|
|
artifact("$buildDir/distributions/besu-${project.version}.zip")
|
|
|
|
artifact("$buildDir/distributions/besu-${project.version}.tar.gz") { extension = 'tar.gz' }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
def artifactoryUser = project.hasProperty('artifactoryUser') ? project.property('artifactoryUser') : System.getenv('ARTIFACTORY_USER')
|
|
|
|
def artifactoryKey = project.hasProperty('artifactoryApiKey') ? project.property('artifactoryApiKey') : System.getenv('ARTIFACTORY_KEY')
|
|
|
|
def artifactoryOrg = System.getenv('ARTIFACTORY_ORG') ?: 'hyperledger'
|
|
|
|
|
|
|
|
artifactory {
|
|
|
|
contextUrl = "https://hyperledger.jfrog.io/${artifactoryOrg}"
|
|
|
|
publish {
|
|
|
|
repository {
|
|
|
|
repoKey = "besu-binaries"
|
|
|
|
username = artifactoryUser
|
|
|
|
password = artifactoryKey
|
|
|
|
}
|
|
|
|
defaults {
|
|
|
|
publications('distArtifactory')
|
|
|
|
publishArtifacts = true
|
|
|
|
publishPom = false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
artifactoryPublish {
|
|
|
|
dependsOn distTar
|
|
|
|
dependsOn distZip
|
|
|
|
}
|
|
|
|
|
|
|
|
def dockerBuildVersion = project.hasProperty('release.releaseVersion') ? project.property('release.releaseVersion') : "${rootProject.version}"
|
|
|
|
def dockerOrgName = project.hasProperty('dockerOrgName') ? project.getProperty("dockerOrgName") : "hyperledger"
|
|
|
|
def dockerArtifactName = project.hasProperty("dockerArtifactName") ? project.getProperty("dockerArtifactName") : "besu"
|
|
|
|
def dockerImageName = "${dockerOrgName}/${dockerArtifactName}"
|
|
|
|
def dockerVariants = project.hasProperty("dockerVariants") ? project.getProperty("dockerVariants").split(",") : [
|
|
|
|
"openjdk-11",
|
|
|
|
"openjdk-11-debug",
|
|
|
|
"graalvm",
|
|
|
|
"openjdk-latest"
|
|
|
|
]
|
|
|
|
|
|
|
|
// rename the top level dir from besu-<version> to besu and this makes it really
|
|
|
|
// simple for use in docker
|
|
|
|
tasks.register("dockerDistUntar") {
|
|
|
|
dependsOn distTar
|
|
|
|
dependsOn distZip
|
|
|
|
def dockerBuildDir = "build/docker-besu/"
|
|
|
|
def distTarFile = distTar.outputs.files.singleFile
|
|
|
|
def distTarFileName = distTar.outputs.files.singleFile.name.replace(".tar.gz", "")
|
|
|
|
|
|
|
|
doFirst {
|
|
|
|
new File(dockerBuildDir).mkdir()
|
|
|
|
copy {
|
|
|
|
from tarTree(distTarFile)
|
|
|
|
into(dockerBuildDir)
|
|
|
|
}
|
|
|
|
project.delete(files("${dockerBuildDir}/besu"))
|
|
|
|
file("${dockerBuildDir}/${distTarFileName}").renameTo("${dockerBuildDir}/besu")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
task distDocker {
|
|
|
|
dependsOn dockerDistUntar
|
|
|
|
inputs.dir("build/docker-besu/")
|
|
|
|
def dockerBuildDir = "build/docker-besu/"
|
|
|
|
|
|
|
|
doLast {
|
|
|
|
for (def jvmVariant in dockerVariants) {
|
|
|
|
copy {
|
|
|
|
from file("${projectDir}/docker/${jvmVariant}/Dockerfile")
|
|
|
|
into(dockerBuildDir)
|
|
|
|
}
|
|
|
|
exec {
|
|
|
|
def image = "${dockerImageName}:${dockerBuildVersion}-${jvmVariant}"
|
|
|
|
def dockerPlatform = ""
|
|
|
|
if (project.hasProperty('docker-platform')){
|
|
|
|
dockerPlatform = "--platform ${project.getProperty('docker-platform')}"
|
|
|
|
println "Building for platform ${project.getProperty('docker-platform')}"
|
|
|
|
}
|
|
|
|
executable "sh"
|
|
|
|
workingDir dockerBuildDir
|
|
|
|
args "-c", "docker build ${dockerPlatform} --build-arg BUILD_DATE=${buildTime()} --build-arg VERSION=${dockerBuildVersion} --build-arg VCS_REF=${getCheckedOutGitCommitHash()} -t ${image} ."
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// tag the "default" (which is the variant in the zero position)
|
|
|
|
exec {
|
|
|
|
executable "sh"
|
|
|
|
args "-c", "docker tag '${dockerImageName}:${dockerBuildVersion}-${dockerVariants[0]}' '${dockerImageName}:${dockerBuildVersion}'"
|
|
|
|
}
|
|
|
|
// create a static tag for the benchmark target
|
|
|
|
exec {
|
|
|
|
executable "sh"
|
|
|
|
args "-c", "docker tag '${dockerImageName}:${dockerBuildVersion}-${dockerVariants[0]}' '${dockerImageName}:benchmark'"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
task testDocker {
|
|
|
|
dependsOn distDocker
|
|
|
|
def dockerReportsDir = "docker/reports/"
|
|
|
|
|
|
|
|
doFirst {
|
|
|
|
new File(dockerReportsDir).mkdir()
|
|
|
|
}
|
|
|
|
|
|
|
|
doLast {
|
|
|
|
for (def variant in dockerVariants) {
|
|
|
|
exec {
|
|
|
|
def image = project.hasProperty('release.releaseVersion') ? "${dockerImageName}:" + project.property('release.releaseVersion') : "${dockerImageName}:${project.version}"
|
|
|
|
workingDir "${projectDir}/docker/${variant}"
|
|
|
|
executable "sh"
|
|
|
|
args "-c", "bash ../test.sh ${image}-${variant}"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
task dockerUpload {
|
|
|
|
dependsOn distDocker
|
|
|
|
def architecture = System.getenv('architecture')
|
|
|
|
def image = "${dockerImageName}:${dockerBuildVersion}"
|
|
|
|
def additionalTags = []
|
|
|
|
|
|
|
|
if (project.hasProperty('branch') && project.property('branch') == 'main') {
|
|
|
|
additionalTags.add('develop')
|
|
|
|
}
|
|
|
|
|
|
|
|
def isInterimBuild = (dockerBuildVersion ==~ /.*-SNAPSHOT/) || (dockerBuildVersion ==~ /.*-alpha/)
|
|
|
|
|| (dockerBuildVersion ==~ /.*-beta/) || (dockerBuildVersion ==~ /.*-RC.*/)
|
|
|
|
|
|
|
|
if (!isInterimBuild) {
|
|
|
|
additionalTags.add(dockerBuildVersion.split(/\./)[0..1].join('.'))
|
|
|
|
}
|
|
|
|
|
|
|
|
doLast {
|
|
|
|
for (def variant in dockerVariants) {
|
|
|
|
def variantImage = "${image}-${variant}"
|
|
|
|
exec {
|
|
|
|
def archVariantImage = "${variantImage}-${architecture}"
|
|
|
|
def cmd = "docker tag '${variantImage}' '${archVariantImage}' && docker push '${archVariantImage}'"
|
|
|
|
additionalTags.each { tag -> cmd += " && docker tag '${variantImage}' '${dockerImageName}:${tag.trim()}-${variant}-${architecture}' && docker push '${dockerImageName}:${tag.trim()}-${variant}-${architecture}'" }
|
|
|
|
executable "sh"
|
|
|
|
args "-c", cmd
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
exec {
|
|
|
|
def archImage = "${image}-${architecture}"
|
|
|
|
def cmd = "docker tag ${image} ${archImage} && docker push '${archImage}'"
|
|
|
|
additionalTags.each { tag -> cmd += " && docker tag '${image}' '${dockerImageName}:${tag.trim()}-${architecture}' && docker push '${dockerImageName}:${tag.trim()}-${architecture}'" }
|
|
|
|
executable "sh"
|
|
|
|
args "-c", cmd
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
task dockerUploadRelease {
|
|
|
|
def archs = ["arm64", "amd64"]
|
|
|
|
def image = "${dockerImageName}:${dockerBuildVersion}"
|
|
|
|
|
|
|
|
doLast {
|
|
|
|
for (def architecture in archs) {
|
|
|
|
for (def variant in dockerVariants) {
|
|
|
|
def variantImage = "${image}-${variant}"
|
|
|
|
exec {
|
|
|
|
def cmd = "docker pull '${variantImage}-${architecture}' && docker tag '${variantImage}-${architecture}' '${dockerImageName}:latest-${variant}-${architecture}' && docker push '${dockerImageName}:latest-${variant}-${architecture}'"
|
|
|
|
executable "sh"
|
|
|
|
args "-c", cmd
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
exec {
|
|
|
|
def archImage = "${image}-${architecture}"
|
|
|
|
def cmd = "docker pull '${archImage}' && docker tag ${archImage} '${dockerImageName}:latest-${architecture}' && docker push '${dockerImageName}:latest-${architecture}'"
|
|
|
|
executable "sh"
|
|
|
|
args "-c", cmd
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
task manifestDocker {
|
|
|
|
def image = "${dockerImageName}:${dockerBuildVersion}"
|
|
|
|
def archs = ["arm64", "amd64"]
|
|
|
|
def tags = ["${image}"]
|
|
|
|
|
|
|
|
if (project.hasProperty('branch') && project.property('branch') == 'main') {
|
|
|
|
tags.add("${dockerImageName}:develop")
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!(dockerBuildVersion ==~ /.*-SNAPSHOT/)) {
|
|
|
|
tags.add("${dockerImageName}:" + dockerBuildVersion.split(/\./)[0..1].join('.'))
|
|
|
|
}
|
|
|
|
|
|
|
|
doLast {
|
|
|
|
for (baseTag in tags) {
|
|
|
|
for (def variant in dockerVariants) {
|
|
|
|
def variantImage = "${baseTag}-${variant}"
|
|
|
|
def targets = ""
|
|
|
|
archs.forEach { arch -> targets += "'${variantImage}-${arch}' " }
|
|
|
|
|
|
|
|
exec {
|
|
|
|
def cmd = "docker manifest create '${variantImage}' ${targets} && docker manifest push '${variantImage}'"
|
|
|
|
executable "sh"
|
|
|
|
args "-c", cmd
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
exec {
|
|
|
|
def targets = ""
|
|
|
|
archs.forEach { arch -> targets += "'${baseTag}-${arch}' " }
|
|
|
|
def cmd = "docker manifest create '${baseTag}' ${targets} && docker manifest push '${baseTag}'"
|
|
|
|
executable "sh"
|
|
|
|
args "-c", cmd
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
task manifestDockerRelease {
|
|
|
|
def archs = ["arm64", "amd64"]
|
|
|
|
def baseTag = "${dockerImageName}:latest";
|
|
|
|
|
|
|
|
doLast {
|
|
|
|
for (def variant in dockerVariants) {
|
|
|
|
def variantImage = "${baseTag}-${variant}"
|
|
|
|
def targets = ""
|
|
|
|
archs.forEach { arch -> targets += "'${variantImage}-${arch}' " }
|
|
|
|
|
|
|
|
exec {
|
|
|
|
def cmd = "docker manifest create '${variantImage}' ${targets} && docker manifest push '${variantImage}'"
|
|
|
|
executable "sh"
|
|
|
|
args "-c", cmd
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
exec {
|
|
|
|
def targets = ""
|
|
|
|
archs.forEach { arch -> targets += "'${baseTag}-${arch}' " }
|
|
|
|
def cmd = "docker manifest create '${baseTag}' ${targets} && docker manifest push '${baseTag}'"
|
|
|
|
executable "sh"
|
|
|
|
args "-c", cmd
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
task checkSpdxHeader(type: CheckSpdxHeader) {
|
|
|
|
apply plugin: 'groovy'
|
|
|
|
|
|
|
|
rootPath = "${projectDir}"
|
|
|
|
spdxHeader = "* SPDX-License-Identifier: Apache-2.0"
|
|
|
|
filesRegex = "(.*.java)|(.*.groovy)"
|
|
|
|
excludeRegex = [
|
|
|
|
"(.*/generalstate/GeneralStateRegressionReferenceTest.*)",
|
|
|
|
"(.*/generalstate/GeneralStateReferenceTest.*)",
|
|
|
|
"(.*/generalstate/LegacyGeneralStateReferenceTest.*)",
|
|
|
|
"(.*/blockchain/BlockchainReferenceTest.*)",
|
|
|
|
"(.*/blockchain/LegacyBlockchainReferenceTest.*)",
|
|
|
|
"(.*/.gradle/.*)",
|
|
|
|
"(.*/.idea/.*)",
|
|
|
|
"(.*/out/.*)",
|
|
|
|
"(.*/build/.*)",
|
|
|
|
"(.*/src/[^/]+/generated/.*)",
|
|
|
|
].join("|")
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
task jacocoRootReport(type: org.gradle.testing.jacoco.tasks.JacocoReport) {
|
|
|
|
additionalSourceDirs.from files(subprojects.sourceSets.main.allSource.srcDirs)
|
|
|
|
sourceDirectories.from files(subprojects.sourceSets.main.allSource.srcDirs)
|
|
|
|
classDirectories.from files(subprojects.sourceSets.main.output).asFileTree.matching { exclude 'org/hyperledger/besu/tests/acceptance/**' }
|
|
|
|
executionData.from fileTree(dir: '.', includes: ['**/jacoco/*.exec'])
|
|
|
|
reports {
|
|
|
|
xml.required = true
|
|
|
|
xml.enabled = true
|
|
|
|
csv.required = true
|
|
|
|
html.destination file("build/reports/jacocoHtml")
|
|
|
|
}
|
|
|
|
onlyIf = { true }
|
|
|
|
}
|
|
|
|
|
|
|
|
configurations { annotationProcessor }
|
|
|
|
|
|
|
|
// Prevent errorprone-checks being dependent upon errorprone-checks!
|
|
|
|
// However, ensure all subprojects comply with the custom rules.
|
|
|
|
configure(subprojects.findAll { it.name != 'errorprone-checks' }) {
|
|
|
|
dependencies { annotationProcessor project(":errorprone-checks") }
|
|
|
|
|
|
|
|
tasks.withType(JavaCompile) {
|
|
|
|
options.annotationProcessorPath = configurations.annotationProcessor
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// http://label-schema.org/rc1/
|
|
|
|
// using the RFC3339 format "2016-04-12T23:20:50.52Z"
|
|
|
|
def buildTime() {
|
|
|
|
def df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'")
|
|
|
|
df.setTimeZone(TimeZone.getTimeZone("UTC"))
|
|
|
|
return df.format(new Date())
|
|
|
|
}
|
|
|
|
|
|
|
|
// Takes the version, and if -SNAPSHOT is part of it replaces SNAPSHOT
|
|
|
|
// with the git commit version.
|
|
|
|
@Memoized
|
|
|
|
def calculateVersion() {
|
|
|
|
String version = rootProject.version
|
|
|
|
if (version.endsWith("-SNAPSHOT")) {
|
|
|
|
version = version.replace("-SNAPSHOT", "-dev-" + getCheckedOutGitCommitHash())
|
|
|
|
}
|
|
|
|
return version
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def getCheckedOutGitCommitHash(length = 8) {
|
|
|
|
try {
|
|
|
|
def gitFolder = "$projectDir/.git/"
|
|
|
|
if (!file(gitFolder).isDirectory()) {
|
|
|
|
// We are in a submodule. The file's contents are `gitdir: <gitFolder>\n`.
|
|
|
|
// Read the file, cut off the front, and trim the whitespace.
|
|
|
|
gitFolder = file(gitFolder).text.substring(length).trim() + "/"
|
|
|
|
}
|
|
|
|
def takeFromHash = length
|
|
|
|
/*
|
|
|
|
* '.git/HEAD' contains either
|
|
|
|
* in case of detached head: the currently checked out commit hash
|
|
|
|
* otherwise: a reference to a file containing the current commit hash
|
|
|
|
*/
|
|
|
|
def head = new File(gitFolder + "HEAD").text.split(":") // .git/HEAD
|
|
|
|
def isCommit = head.length == 1 // e5a7c79edabbf7dd39888442df081b1c9d8e88fd
|
|
|
|
|
|
|
|
if (isCommit) return head[0].trim().take(takeFromHash) // e5a7c79edabb
|
|
|
|
|
|
|
|
def refHead = new File(gitFolder + head[1].trim()) // .git/refs/heads/master
|
|
|
|
refHead.text.trim().take takeFromHash
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.warn('Could not calculate git commit, using "xxxxxxxx" (run with --info for stacktrace)')
|
|
|
|
logger.info('Error retrieving git commit', e)
|
|
|
|
return "xxxxxxxx"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
tasks.register("verifyDistributions") {
|
|
|
|
dependsOn distTar
|
|
|
|
dependsOn distZip
|
|
|
|
def distTarFile = distTar.outputs.files.singleFile
|
|
|
|
def distZipFile = distZip.outputs.files.singleFile
|
|
|
|
def minDistributionSize = 20000000
|
|
|
|
|
|
|
|
// Sanity check the distributions by checking they are at least a reasonable size
|
|
|
|
doFirst {
|
|
|
|
if (distTarFile.length() < minDistributionSize) {
|
|
|
|
throw new GradleException("Distribution tar is suspiciously small: " + distTarFile.length() + " bytes")
|
|
|
|
}
|
|
|
|
if (distZipFile.length() < minDistributionSize) {
|
|
|
|
throw new GradleException("Distribution zip is suspiciously small: " + distZipFile.length() + " bytes")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
dependencies {
|
|
|
|
implementation project(':besu')
|
|
|
|
implementation project(':ethereum:evmtool')
|
|
|
|
errorprone 'com.google.errorprone:error_prone_core'
|
|
|
|
}
|
|
|
|
|
|
|
|
distributions {
|
|
|
|
main {
|
|
|
|
contents {
|
|
|
|
from("./LICENSE") { into "." }
|
|
|
|
from("build/reports/license/license-dependency.html") { into "." }
|
|
|
|
from("./docs/GettingStartedBinaries.md") { into "." }
|
|
|
|
from("./docs/DocsArchive0.8.0.html") { into "." }
|
|
|
|
from("build/besu.autocomplete.sh") { into "." }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
check.dependsOn checkSpdxHeader
|
|
|
|
build.dependsOn verifyDistributions
|
|
|
|
artifactoryPublish.dependsOn verifyDistributions
|
|
|
|
artifactoryPublish.mustRunAfter(distTar)
|
|
|
|
artifactoryPublish.mustRunAfter(distZip)
|
|
|
|
artifactoryPublish.mustRunAfter(javadocJar)
|