Consolidate all metric collectors into the metrics system (#7877)

* Consolidate all metric collectors into the metrics system

Signed-off-by: Fabio Di Fabio <fabio.difabio@consensys.net>

* Fixes

Signed-off-by: Fabio Di Fabio <fabio.difabio@consensys.net>

* Do not recreate the Prometheus metric system

Signed-off-by: Fabio Di Fabio <fabio.difabio@consensys.net>

---------

Signed-off-by: Fabio Di Fabio <fabio.difabio@consensys.net>
pull/7800/head
Fabio Di Fabio 2 days ago committed by GitHub
parent 833a5ce5dd
commit 02722bcd8a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 37
      ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/jsonrpc/internal/methods/TraceBlock.java
  2. 28
      ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/jsonrpc/internal/methods/TraceFilter.java
  3. 25
      ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/jsonrpc/internal/methods/TraceReplayBlockTransactions.java
  4. 6
      ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/jsonrpc/methods/JsonRpcMethodsFactory.java
  5. 15
      ethereum/api/src/main/java/org/hyperledger/besu/ethereum/api/jsonrpc/methods/TraceJsonRpcMethods.java
  6. 4
      ethereum/api/src/test/java/org/hyperledger/besu/ethereum/api/jsonrpc/internal/methods/TraceFilterTest.java
  7. 2
      ethereum/core/build.gradle
  8. 32
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/chain/DefaultBlockchain.java
  9. 13
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/trie/diffbased/bonsai/cache/BonsaiCachedMerkleTrieLoader.java
  10. 1
      ethereum/p2p/build.gradle
  11. 1
      metrics/core/build.gradle
  12. 31
      metrics/core/src/main/java/org/hyperledger/besu/metrics/ObservableMetricsSystem.java
  13. 17
      metrics/core/src/main/java/org/hyperledger/besu/metrics/noop/NoOpMetricsSystem.java
  14. 15
      metrics/core/src/main/java/org/hyperledger/besu/metrics/opentelemetry/OpenTelemetrySystem.java
  15. 130
      metrics/core/src/main/java/org/hyperledger/besu/metrics/prometheus/PrometheusMetricsSystem.java
  16. 21
      metrics/core/src/test-support/java/org/hyperledger/besu/metrics/StubMetricsSystem.java
  17. 2
      metrics/rocksdb/build.gradle
  18. 87
      metrics/rocksdb/src/main/java/org/hyperledger/besu/metrics/rocksdb/RocksDBStats.java
  19. 2
      plugin-api/build.gradle
  20. 46
      plugin-api/src/main/java/org/hyperledger/besu/plugin/services/MetricsSystem.java
  21. 36
      plugin-api/src/main/java/org/hyperledger/besu/plugin/services/metrics/ExternalSummary.java
  22. 1
      plugins/rocksdb/build.gradle
  23. 6
      plugins/rocksdb/src/main/java/org/hyperledger/besu/plugin/services/storage/rocksdb/RocksDBMetricsFactory.java

@ -25,7 +25,6 @@ import org.hyperledger.besu.ethereum.api.jsonrpc.internal.parameters.JsonRpcPara
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.processor.Tracer;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.processor.TransactionTrace;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.RpcErrorType;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.results.tracing.flat.FlatTraceGenerator;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.results.tracing.flat.RewardTraceGenerator;
import org.hyperledger.besu.ethereum.api.query.BlockchainQueries;
import org.hyperledger.besu.ethereum.api.util.ArrayNodeWrapper;
@ -41,12 +40,11 @@ import org.hyperledger.besu.ethereum.vm.DebugOperationTracer;
import org.hyperledger.besu.evm.worldstate.WorldUpdater;
import org.hyperledger.besu.metrics.BesuMetricCategory;
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
import org.hyperledger.besu.metrics.prometheus.PrometheusMetricsSystem;
import org.hyperledger.besu.plugin.services.MetricsSystem;
import org.hyperledger.besu.plugin.services.metrics.Counter;
import org.hyperledger.besu.plugin.services.metrics.LabelledMetric;
import org.hyperledger.besu.services.pipeline.Pipeline;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
@ -58,10 +56,21 @@ public class TraceBlock extends AbstractBlockParameterMethod {
private static final Logger LOG = LoggerFactory.getLogger(TraceBlock.class);
private static final ObjectMapper MAPPER = new ObjectMapper();
protected final ProtocolSchedule protocolSchedule;
private final LabelledMetric<Counter> outputCounter;
public TraceBlock(final ProtocolSchedule protocolSchedule, final BlockchainQueries queries) {
public TraceBlock(
final ProtocolSchedule protocolSchedule,
final BlockchainQueries queries,
final MetricsSystem metricsSystem) {
super(queries);
this.protocolSchedule = protocolSchedule;
this.outputCounter =
metricsSystem.createLabelledCounter(
BesuMetricCategory.BLOCKCHAIN,
"transactions_traceblock_pipeline_processed_total",
"Number of transactions processed for each block",
"step",
"action");
}
@Override
@ -115,14 +124,6 @@ public class TraceBlock extends AbstractBlockParameterMethod {
final ChainUpdater chainUpdater = new ChainUpdater(traceableState);
TransactionSource transactionSource = new TransactionSource(block);
final LabelledMetric<Counter> outputCounter =
new PrometheusMetricsSystem(BesuMetricCategory.DEFAULT_METRIC_CATEGORIES, false)
.createLabelledCounter(
BesuMetricCategory.BLOCKCHAIN,
"transactions_traceblock_pipeline_processed_total",
"Number of transactions processed for each block",
"step",
"action");
DebugOperationTracer debugOperationTracer =
new DebugOperationTracer(new TraceOptions(false, false, true), false);
ExecuteTransactionStep executeTransactionStep =
@ -173,18 +174,6 @@ public class TraceBlock extends AbstractBlockParameterMethod {
.orElse(emptyResult());
}
protected void generateTracesFromTransactionTraceAndBlock(
final Optional<FilterParameter> filterParameter,
final List<TransactionTrace> transactionTraces,
final Block block,
final ArrayNodeWrapper resultArrayNode) {
transactionTraces.forEach(
transactionTrace ->
FlatTraceGenerator.generateFromTransactionTraceAndBlock(
protocolSchedule, transactionTrace, block)
.forEachOrdered(resultArrayNode::addPOJO));
}
protected void generateRewardsFromBlock(
final Optional<FilterParameter> maybeFilterParameter,
final Block block,

@ -23,7 +23,6 @@ import org.hyperledger.besu.ethereum.api.jsonrpc.internal.exception.InvalidJsonR
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.parameters.BlockParameter;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.parameters.FilterParameter;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.parameters.JsonRpcParameter.JsonRpcParameterException;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.processor.BlockTracer;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.processor.Tracer;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.processor.TransactionTrace;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcErrorResponse;
@ -45,7 +44,7 @@ import org.hyperledger.besu.ethereum.mainnet.ProtocolSpec;
import org.hyperledger.besu.ethereum.vm.DebugOperationTracer;
import org.hyperledger.besu.metrics.BesuMetricCategory;
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
import org.hyperledger.besu.metrics.prometheus.PrometheusMetricsSystem;
import org.hyperledger.besu.plugin.services.MetricsSystem;
import org.hyperledger.besu.plugin.services.metrics.Counter;
import org.hyperledger.besu.plugin.services.metrics.LabelledMetric;
import org.hyperledger.besu.services.pipeline.Pipeline;
@ -58,7 +57,6 @@ import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
@ -68,17 +66,24 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TraceFilter extends TraceBlock {
private static final Logger LOG = LoggerFactory.getLogger(TraceFilter.class);
private final Long maxRange;
private final LabelledMetric<Counter> outputCounter;
public TraceFilter(
final Supplier<BlockTracer> blockTracerSupplier,
final ProtocolSchedule protocolSchedule,
final BlockchainQueries blockchainQueries,
final Long maxRange) {
super(protocolSchedule, blockchainQueries);
final Long maxRange,
final MetricsSystem metricsSystem) {
super(protocolSchedule, blockchainQueries, metricsSystem);
this.maxRange = maxRange;
this.outputCounter =
metricsSystem.createLabelledCounter(
BesuMetricCategory.BLOCKCHAIN,
"transactions_tracefilter_pipeline_processed_total",
"Number of transactions processed for trace_filter",
"step",
"action");
}
@Override
@ -157,15 +162,6 @@ public class TraceFilter extends TraceBlock {
final MainnetTransactionProcessor transactionProcessor =
protocolSpec.getTransactionProcessor();
final ChainUpdater chainUpdater = new ChainUpdater(traceableState);
final LabelledMetric<Counter> outputCounter =
new PrometheusMetricsSystem(
BesuMetricCategory.DEFAULT_METRIC_CATEGORIES, false)
.createLabelledCounter(
BesuMetricCategory.BLOCKCHAIN,
"transactions_tracefilter_pipeline_processed_total",
"Number of transactions processed for trace_filter",
"step",
"action");
DebugOperationTracer debugOperationTracer =
new DebugOperationTracer(new TraceOptions(false, false, true), false);

@ -39,7 +39,7 @@ import org.hyperledger.besu.ethereum.mainnet.ProtocolSpec;
import org.hyperledger.besu.ethereum.vm.DebugOperationTracer;
import org.hyperledger.besu.metrics.BesuMetricCategory;
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
import org.hyperledger.besu.metrics.prometheus.PrometheusMetricsSystem;
import org.hyperledger.besu.plugin.services.MetricsSystem;
import org.hyperledger.besu.plugin.services.metrics.Counter;
import org.hyperledger.besu.plugin.services.metrics.LabelledMetric;
import org.hyperledger.besu.services.pipeline.Pipeline;
@ -57,13 +57,23 @@ import org.slf4j.LoggerFactory;
public class TraceReplayBlockTransactions extends AbstractBlockParameterMethod {
private static final Logger LOG = LoggerFactory.getLogger(TraceReplayBlockTransactions.class);
private final ProtocolSchedule protocolSchedule;
private static final ObjectMapper MAPPER = new ObjectMapper();
private final ProtocolSchedule protocolSchedule;
private final LabelledMetric<Counter> outputCounter;
public TraceReplayBlockTransactions(
final ProtocolSchedule protocolSchedule, final BlockchainQueries queries) {
final ProtocolSchedule protocolSchedule,
final BlockchainQueries queries,
final MetricsSystem metricsSystem) {
super(queries);
this.protocolSchedule = protocolSchedule;
this.outputCounter =
metricsSystem.createLabelledCounter(
BesuMetricCategory.BLOCKCHAIN,
"transactions_tracereplayblock_pipeline_processed_total",
"Number of transactions processed for each block",
"step",
"action");
}
@Override
@ -131,14 +141,7 @@ public class TraceReplayBlockTransactions extends AbstractBlockParameterMethod {
final ChainUpdater chainUpdater = new ChainUpdater(traceableState);
final TransactionSource transactionSource = new TransactionSource(block);
final LabelledMetric<Counter> outputCounter =
new PrometheusMetricsSystem(BesuMetricCategory.DEFAULT_METRIC_CATEGORIES, false)
.createLabelledCounter(
BesuMetricCategory.BLOCKCHAIN,
"transactions_tracereplayblock_pipeline_processed_total",
"Number of transactions processed for each block",
"step",
"action");
final DebugOperationTracer debugOperationTracer =
new DebugOperationTracer(new TraceOptions(false, false, true), false);
final ExecuteTransactionStep executeTransactionStep =

@ -151,7 +151,11 @@ public class JsonRpcMethodsFactory {
blockchainQueries, protocolSchedule, transactionPool, privacyParameters),
new Web3JsonRpcMethods(clientNodeName),
new TraceJsonRpcMethods(
blockchainQueries, protocolSchedule, protocolContext, apiConfiguration),
blockchainQueries,
protocolSchedule,
protocolContext,
apiConfiguration,
metricsSystem),
new TxPoolJsonRpcMethods(transactionPool),
new PluginsJsonRpcMethods(namedPlugins));

@ -31,6 +31,7 @@ import org.hyperledger.besu.ethereum.api.jsonrpc.internal.processor.BlockTracer;
import org.hyperledger.besu.ethereum.api.query.BlockchainQueries;
import org.hyperledger.besu.ethereum.mainnet.ProtocolSchedule;
import org.hyperledger.besu.ethereum.transaction.TransactionSimulator;
import org.hyperledger.besu.plugin.services.MetricsSystem;
import java.util.Map;
@ -38,19 +39,21 @@ public class TraceJsonRpcMethods extends ApiGroupJsonRpcMethods {
private final BlockchainQueries blockchainQueries;
private final ProtocolSchedule protocolSchedule;
private final ApiConfiguration apiConfiguration;
private final ProtocolContext protocolContext;
private final MetricsSystem metricsSystem;
TraceJsonRpcMethods(
final BlockchainQueries blockchainQueries,
final ProtocolSchedule protocolSchedule,
final ProtocolContext protocolContext,
final ApiConfiguration apiConfiguration) {
final ApiConfiguration apiConfiguration,
final MetricsSystem metricsSystem) {
this.blockchainQueries = blockchainQueries;
this.protocolSchedule = protocolSchedule;
this.protocolContext = protocolContext;
this.apiConfiguration = apiConfiguration;
this.metricsSystem = metricsSystem;
}
@Override
@ -63,16 +66,16 @@ public class TraceJsonRpcMethods extends ApiGroupJsonRpcMethods {
final BlockReplay blockReplay =
new BlockReplay(protocolSchedule, protocolContext, blockchainQueries.getBlockchain());
return mapOf(
new TraceReplayBlockTransactions(protocolSchedule, blockchainQueries),
new TraceReplayBlockTransactions(protocolSchedule, blockchainQueries, metricsSystem),
new TraceFilter(
() -> new BlockTracer(blockReplay),
protocolSchedule,
blockchainQueries,
apiConfiguration.getMaxTraceFilterRange()),
apiConfiguration.getMaxTraceFilterRange(),
metricsSystem),
new TraceGet(() -> new BlockTracer(blockReplay), blockchainQueries, protocolSchedule),
new TraceTransaction(
() -> new BlockTracer(blockReplay), protocolSchedule, blockchainQueries),
new TraceBlock(protocolSchedule, blockchainQueries),
new TraceBlock(protocolSchedule, blockchainQueries, metricsSystem),
new TraceCall(
blockchainQueries,
protocolSchedule,

@ -26,6 +26,7 @@ import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcRespon
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.RpcErrorType;
import org.hyperledger.besu.ethereum.api.query.BlockchainQueries;
import org.hyperledger.besu.ethereum.mainnet.ProtocolSchedule;
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
import java.util.function.Supplier;
@ -69,7 +70,8 @@ public class TraceFilterTest {
new JsonRpcRequest("2.0", "trace_filter", new Object[] {filterParameter}));
method =
new TraceFilter(blockTracerSupplier, protocolSchedule, blockchainQueries, maxFilterRange);
new TraceFilter(
protocolSchedule, blockchainQueries, maxFilterRange, new NoOpMetricsSystem());
final JsonRpcResponse response = method.response(request);
assertThat(response).isInstanceOf(JsonRpcErrorResponse.class);

@ -65,8 +65,6 @@ dependencies {
implementation 'org.immutables:value-annotations'
implementation 'tech.pegasys:jc-kzg-4844'
implementation 'io.prometheus:simpleclient_guava'
implementation 'org.xerial.snappy:snappy-java'
annotationProcessor 'org.immutables:value'

@ -19,6 +19,7 @@ import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.Collections.emptyList;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static org.hyperledger.besu.metrics.BesuMetricCategory.BLOCKCHAIN;
import org.hyperledger.besu.datatypes.Hash;
import org.hyperledger.besu.ethereum.chain.BlockchainStorage.Updater;
@ -32,7 +33,6 @@ import org.hyperledger.besu.ethereum.core.Transaction;
import org.hyperledger.besu.ethereum.core.TransactionReceipt;
import org.hyperledger.besu.metrics.BesuMetricCategory;
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
import org.hyperledger.besu.metrics.prometheus.PrometheusMetricsSystem;
import org.hyperledger.besu.plugin.services.MetricsSystem;
import org.hyperledger.besu.plugin.services.metrics.Counter;
import org.hyperledger.besu.util.InvalidConfigurationException;
@ -56,7 +56,6 @@ import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Lists;
import com.google.common.collect.Streams;
import io.prometheus.client.guava.cache.CacheMetricsCollector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -134,13 +133,12 @@ public class DefaultBlockchain implements MutableBlockchain {
totalDifficultyCache =
Optional.of(
CacheBuilder.newBuilder().recordStats().maximumSize(numberOfBlocksToCache).build());
CacheMetricsCollector cacheMetrics = new CacheMetricsCollector();
cacheMetrics.addCache("blockHeaders", blockHeadersCache.get());
cacheMetrics.addCache("blockBodies", blockBodiesCache.get());
cacheMetrics.addCache("transactionReceipts", transactionReceiptsCache.get());
cacheMetrics.addCache("totalDifficulty", totalDifficultyCache.get());
if (metricsSystem instanceof PrometheusMetricsSystem prometheusMetricsSystem)
prometheusMetricsSystem.addCollector(BesuMetricCategory.BLOCKCHAIN, () -> cacheMetrics);
metricsSystem.createGuavaCacheCollector(BLOCKCHAIN, "blockHeaders", blockHeadersCache.get());
metricsSystem.createGuavaCacheCollector(BLOCKCHAIN, "blockBodies", blockBodiesCache.get());
metricsSystem.createGuavaCacheCollector(
BLOCKCHAIN, "transactionReceipts", transactionReceiptsCache.get());
metricsSystem.createGuavaCacheCollector(
BLOCKCHAIN, "totalDifficulty", totalDifficultyCache.get());
} else {
blockHeadersCache = Optional.empty();
blockBodiesCache = Optional.empty();
@ -155,11 +153,11 @@ public class DefaultBlockchain implements MutableBlockchain {
private void createCounters(final MetricsSystem metricsSystem) {
gasUsedCounter =
metricsSystem.createCounter(
BesuMetricCategory.BLOCKCHAIN, "chain_head_gas_used_counter", "Counter for Gas used");
BLOCKCHAIN, "chain_head_gas_used_counter", "Counter for Gas used");
numberOfTransactionsCounter =
metricsSystem.createCounter(
BesuMetricCategory.BLOCKCHAIN,
BLOCKCHAIN,
"chain_head_transaction_count_counter",
"Counter for the number of transactions");
}
@ -184,37 +182,37 @@ public class DefaultBlockchain implements MutableBlockchain {
this::getSafeBlockNumber);
metricsSystem.createGauge(
BesuMetricCategory.BLOCKCHAIN,
BLOCKCHAIN,
"difficulty_total",
"Total difficulty of the chainhead",
() -> this.getChainHead().getTotalDifficulty().toBigInteger().doubleValue());
metricsSystem.createLongGauge(
BesuMetricCategory.BLOCKCHAIN,
BLOCKCHAIN,
"chain_head_timestamp",
"Timestamp from the current chain head",
() -> getChainHeadHeader().getTimestamp());
metricsSystem.createLongGauge(
BesuMetricCategory.BLOCKCHAIN,
BLOCKCHAIN,
"chain_head_gas_used",
"Gas used by the current chain head block",
() -> getChainHeadHeader().getGasUsed());
metricsSystem.createLongGauge(
BesuMetricCategory.BLOCKCHAIN,
BLOCKCHAIN,
"chain_head_gas_limit",
"Block gas limit of the current chain head block",
() -> getChainHeadHeader().getGasLimit());
metricsSystem.createIntegerGauge(
BesuMetricCategory.BLOCKCHAIN,
BLOCKCHAIN,
"chain_head_transaction_count",
"Number of transactions in the current chain head block",
() -> chainHeadTransactionCount);
metricsSystem.createIntegerGauge(
BesuMetricCategory.BLOCKCHAIN,
BLOCKCHAIN,
"chain_head_ommer_count",
"Number of ommers in the current chain head block",
() -> chainHeadOmmerCount);

@ -14,6 +14,8 @@
*/
package org.hyperledger.besu.ethereum.trie.diffbased.bonsai.cache;
import static org.hyperledger.besu.metrics.BesuMetricCategory.BLOCKCHAIN;
import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.datatypes.Hash;
import org.hyperledger.besu.datatypes.StorageSlotKey;
@ -22,9 +24,7 @@ import org.hyperledger.besu.ethereum.trie.MerkleTrieException;
import org.hyperledger.besu.ethereum.trie.diffbased.bonsai.storage.BonsaiWorldStateKeyValueStorage;
import org.hyperledger.besu.ethereum.trie.diffbased.common.StorageSubscriber;
import org.hyperledger.besu.ethereum.trie.patricia.StoredMerklePatriciaTrie;
import org.hyperledger.besu.metrics.BesuMetricCategory;
import org.hyperledger.besu.metrics.ObservableMetricsSystem;
import org.hyperledger.besu.metrics.prometheus.PrometheusMetricsSystem;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
@ -33,7 +33,6 @@ import java.util.function.Function;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import io.prometheus.client.guava.cache.CacheMetricsCollector;
import org.apache.tuweni.bytes.Bytes;
import org.apache.tuweni.bytes.Bytes32;
@ -47,12 +46,8 @@ public class BonsaiCachedMerkleTrieLoader implements StorageSubscriber {
CacheBuilder.newBuilder().recordStats().maximumSize(STORAGE_CACHE_SIZE).build();
public BonsaiCachedMerkleTrieLoader(final ObservableMetricsSystem metricsSystem) {
CacheMetricsCollector cacheMetrics = new CacheMetricsCollector();
cacheMetrics.addCache("accountsNodes", accountNodes);
cacheMetrics.addCache("storageNodes", storageNodes);
if (metricsSystem instanceof PrometheusMetricsSystem prometheusMetricsSystem)
prometheusMetricsSystem.addCollector(BesuMetricCategory.BLOCKCHAIN, () -> cacheMetrics);
metricsSystem.createGuavaCacheCollector(BLOCKCHAIN, "accountsNodes", accountNodes);
metricsSystem.createGuavaCacheCollector(BLOCKCHAIN, "storageNodes", storageNodes);
}
public void preLoadAccount(

@ -43,7 +43,6 @@ dependencies {
implementation 'com.google.guava:guava'
implementation 'dnsjava:dnsjava'
implementation 'io.netty:netty-transport-native-unix-common'
implementation 'io.prometheus:simpleclient'
implementation 'io.vertx:vertx-core'
implementation 'io.tmio:tuweni-bytes'

@ -57,6 +57,7 @@ dependencies {
implementation 'io.prometheus:simpleclient'
implementation 'io.prometheus:simpleclient_common'
implementation 'io.prometheus:simpleclient_guava'
implementation 'io.prometheus:simpleclient_hotspot'
implementation 'io.prometheus:simpleclient_pushgateway'
implementation 'io.vertx:vertx-core'

@ -17,42 +17,25 @@ package org.hyperledger.besu.metrics;
import org.hyperledger.besu.plugin.services.MetricsSystem;
import org.hyperledger.besu.plugin.services.metrics.MetricCategory;
import java.util.Set;
import java.util.stream.Stream;
/** The interface Observable metrics system. */
/** The observable metrics system is used to inspect metrics for debug reasons */
public interface ObservableMetricsSystem extends MetricsSystem {
/**
* Stream observations.
* Stream observations by category
*
* @param category the category
* @return the stream
* @return the observations stream
*/
Stream<Observation> streamObservations(MetricCategory category);
/**
* Stream observations.
* Stream observations
*
* @return the stream
* @return the observations stream
*/
Stream<Observation> streamObservations();
/**
* Provides an immutable view into the metric categories enabled for metric collection.
*
* @return the set of enabled metric categories.
*/
Set<MetricCategory> getEnabledCategories();
/**
* Checks if a particular category of metrics is enabled.
*
* @param category the category to check
* @return true if the category is enabled, false otherwise
*/
default boolean isCategoryEnabled(final MetricCategory category) {
return getEnabledCategories().stream()
.anyMatch(metricCategory -> metricCategory.getName().equals(category.getName()));
}
/** Unregister all the collectors and perform other cleanup tasks */
void shutdown();
}

@ -17,6 +17,7 @@ package org.hyperledger.besu.metrics.noop;
import org.hyperledger.besu.metrics.ObservableMetricsSystem;
import org.hyperledger.besu.metrics.Observation;
import org.hyperledger.besu.plugin.services.metrics.Counter;
import org.hyperledger.besu.plugin.services.metrics.ExternalSummary;
import org.hyperledger.besu.plugin.services.metrics.LabelledGauge;
import org.hyperledger.besu.plugin.services.metrics.LabelledMetric;
import org.hyperledger.besu.plugin.services.metrics.MetricCategory;
@ -27,9 +28,11 @@ import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.function.DoubleSupplier;
import java.util.function.Supplier;
import java.util.stream.Stream;
import com.google.common.base.Preconditions;
import com.google.common.cache.Cache;
/** The NoOp metrics system. */
public class NoOpMetricsSystem implements ObservableMetricsSystem {
@ -113,6 +116,13 @@ public class NoOpMetricsSystem implements ObservableMetricsSystem {
return getOperationTimerLabelledMetric(labelNames.length);
}
@Override
public void trackExternalSummary(
final MetricCategory category,
final String name,
final String help,
final Supplier<ExternalSummary> summarySupplier) {}
@Override
public LabelledMetric<OperationTimer> createLabelledTimer(
final MetricCategory category,
@ -144,6 +154,10 @@ public class NoOpMetricsSystem implements ObservableMetricsSystem {
final String help,
final DoubleSupplier valueSupplier) {}
@Override
public void createGuavaCacheCollector(
final MetricCategory category, final String name, final Cache<?, ?> cache) {}
@Override
public LabelledGauge createLabelledGauge(
final MetricCategory category,
@ -187,6 +201,9 @@ public class NoOpMetricsSystem implements ObservableMetricsSystem {
return Collections.emptySet();
}
@Override
public void shutdown() {}
/**
* The Label counting NoOp metric.
*

@ -20,6 +20,7 @@ import org.hyperledger.besu.metrics.Observation;
import org.hyperledger.besu.metrics.StandardMetricCategory;
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
import org.hyperledger.besu.plugin.services.metrics.Counter;
import org.hyperledger.besu.plugin.services.metrics.ExternalSummary;
import org.hyperledger.besu.plugin.services.metrics.LabelledGauge;
import org.hyperledger.besu.plugin.services.metrics.LabelledMetric;
import org.hyperledger.besu.plugin.services.metrics.MetricCategory;
@ -40,9 +41,11 @@ import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.function.DoubleSupplier;
import java.util.function.Supplier;
import java.util.stream.Stream;
import javax.inject.Singleton;
import com.google.common.cache.Cache;
import com.google.common.collect.ImmutableSet;
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
@ -242,6 +245,13 @@ public class OpenTelemetrySystem implements ObservableMetricsSystem {
return createLabelledTimer(category, name, help, labelNames);
}
@Override
public void trackExternalSummary(
final MetricCategory category,
final String name,
final String help,
final Supplier<ExternalSummary> summarySupplier) {}
@Override
public LabelledMetric<OperationTimer> createLabelledTimer(
final MetricCategory category,
@ -277,6 +287,10 @@ public class OpenTelemetrySystem implements ObservableMetricsSystem {
}
}
@Override
public void createGuavaCacheCollector(
final MetricCategory category, final String name, final Cache<?, ?> cache) {}
@Override
public LabelledGauge createLabelledGauge(
final MetricCategory category,
@ -376,6 +390,7 @@ public class OpenTelemetrySystem implements ObservableMetricsSystem {
}
/** Shuts down the OpenTelemetry exporters, blocking until they have completed orderly. */
@Override
public void shutdown() {
final CompletableResultCode result =
CompletableResultCode.ofAll(

@ -18,6 +18,7 @@ import org.hyperledger.besu.metrics.ObservableMetricsSystem;
import org.hyperledger.besu.metrics.Observation;
import org.hyperledger.besu.metrics.StandardMetricCategory;
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
import org.hyperledger.besu.plugin.services.metrics.ExternalSummary;
import org.hyperledger.besu.plugin.services.metrics.LabelledGauge;
import org.hyperledger.besu.plugin.services.metrics.LabelledMetric;
import org.hyperledger.besu.plugin.services.metrics.MetricCategory;
@ -34,6 +35,7 @@ import java.util.function.DoubleSupplier;
import java.util.function.Supplier;
import java.util.stream.Stream;
import com.google.common.cache.Cache;
import com.google.common.collect.ImmutableSet;
import io.prometheus.client.Collector;
import io.prometheus.client.Collector.MetricFamilySamples;
@ -42,6 +44,7 @@ import io.prometheus.client.CollectorRegistry;
import io.prometheus.client.Counter;
import io.prometheus.client.Histogram;
import io.prometheus.client.Summary;
import io.prometheus.client.guava.cache.CacheMetricsCollector;
import io.prometheus.client.hotspot.BufferPoolsExports;
import io.prometheus.client.hotspot.ClassLoadingExports;
import io.prometheus.client.hotspot.GarbageCollectorExports;
@ -52,6 +55,7 @@ import io.vertx.core.impl.ConcurrentHashSet;
/** The Prometheus metrics system. */
public class PrometheusMetricsSystem implements ObservableMetricsSystem {
private static final List<String> EXTERNAL_SUMMARY_LABELS = List.of("quantile");
private final Map<MetricCategory, Collection<Collector>> collectors = new ConcurrentHashMap<>();
private final CollectorRegistry registry = new CollectorRegistry(true);
@ -60,6 +64,9 @@ public class PrometheusMetricsSystem implements ObservableMetricsSystem {
private final Map<String, LabelledMetric<OperationTimer>> cachedTimers =
new ConcurrentHashMap<>();
private final Set<String> totalSuffixedCounters = new ConcurrentHashSet<>();
private final Map<MetricCategory, CacheMetricsCollector> guavaCacheCollectors =
new ConcurrentHashMap<>();
private final Set<String> guavaCacheNames = new ConcurrentHashSet<>();
private final Set<MetricCategory> enabledCategories;
private final boolean timersEnabled;
@ -78,12 +85,16 @@ public class PrometheusMetricsSystem implements ObservableMetricsSystem {
/** Init. */
public void init() {
addCollector(StandardMetricCategory.PROCESS, StandardExports::new);
addCollector(StandardMetricCategory.JVM, MemoryPoolsExports::new);
addCollector(StandardMetricCategory.JVM, BufferPoolsExports::new);
addCollector(StandardMetricCategory.JVM, GarbageCollectorExports::new);
addCollector(StandardMetricCategory.JVM, ThreadExports::new);
addCollector(StandardMetricCategory.JVM, ClassLoadingExports::new);
if (isCategoryEnabled(StandardMetricCategory.PROCESS)) {
registerCollector(StandardMetricCategory.PROCESS, new StandardExports());
}
if (isCategoryEnabled(StandardMetricCategory.JVM)) {
registerCollector(StandardMetricCategory.JVM, new MemoryPoolsExports());
registerCollector(StandardMetricCategory.JVM, new BufferPoolsExports());
registerCollector(StandardMetricCategory.JVM, new GarbageCollectorExports());
registerCollector(StandardMetricCategory.JVM, new ThreadExports());
registerCollector(StandardMetricCategory.JVM, new ClassLoadingExports());
}
}
@Override
@ -103,7 +114,7 @@ public class PrometheusMetricsSystem implements ObservableMetricsSystem {
(k) -> {
if (isCategoryEnabled(category)) {
final Counter counter = Counter.build(metricName, help).labelNames(labelNames).create();
addCollectorUnchecked(category, counter);
registerCollector(category, counter);
return new PrometheusCounter(counter);
} else {
return NoOpMetricsSystem.getCounterLabelledMetric(labelNames.length);
@ -132,7 +143,7 @@ public class PrometheusMetricsSystem implements ObservableMetricsSystem {
.quantile(1.0, 0)
.labelNames(labelNames)
.create();
addCollectorUnchecked(category, summary);
registerCollector(category, summary);
return new PrometheusTimer(summary);
} else {
return NoOpMetricsSystem.getOperationTimerLabelledMetric(labelNames.length);
@ -153,7 +164,7 @@ public class PrometheusMetricsSystem implements ObservableMetricsSystem {
if (timersEnabled && isCategoryEnabled(category)) {
final Histogram histogram =
Histogram.build(metricName, help).labelNames(labelNames).buckets(1D).create();
addCollectorUnchecked(category, histogram);
registerCollector(category, histogram);
return new PrometheusSimpleTimer(histogram);
} else {
return NoOpMetricsSystem.getOperationTimerLabelledMetric(labelNames.length);
@ -170,7 +181,61 @@ public class PrometheusMetricsSystem implements ObservableMetricsSystem {
final String metricName = convertToPrometheusName(category, name);
if (isCategoryEnabled(category)) {
final Collector collector = new CurrentValueCollector(metricName, help, valueSupplier);
addCollectorUnchecked(category, collector);
registerCollector(category, collector);
}
}
@Override
public void trackExternalSummary(
final MetricCategory category,
final String name,
final String help,
final Supplier<ExternalSummary> summarySupplier) {
if (isCategoryEnabled(category)) {
final var externalSummaryCollector =
new Collector() {
@Override
public List<MetricFamilySamples> collect() {
final var externalSummary = summarySupplier.get();
final var quantileValues =
externalSummary.quantiles().stream()
.map(
quantile ->
new Sample(
name,
EXTERNAL_SUMMARY_LABELS,
List.of(Double.toString(quantile.quantile())),
quantile.value()))
.toList();
return List.of(
new MetricFamilySamples(
name, Type.SUMMARY, "RocksDB histogram for " + name, quantileValues));
}
};
registerCollector(category, externalSummaryCollector);
}
}
@Override
public void createGuavaCacheCollector(
final MetricCategory category, final String name, final Cache<?, ?> cache) {
if (isCategoryEnabled(category)) {
if (guavaCacheNames.contains(name)) {
throw new IllegalStateException("Cache already registered: " + name);
}
guavaCacheNames.add(name);
final var guavaCacheCollector =
guavaCacheCollectors.computeIfAbsent(
category,
unused -> {
final var cmc = new CacheMetricsCollector();
registerCollector(category, cmc);
return cmc;
});
guavaCacheCollector.addCache(name, cache);
}
}
@ -183,46 +248,33 @@ public class PrometheusMetricsSystem implements ObservableMetricsSystem {
final String metricName = convertToPrometheusName(category, name);
if (isCategoryEnabled(category)) {
final PrometheusGauge gauge = new PrometheusGauge(metricName, help, List.of(labelNames));
addCollectorUnchecked(category, gauge);
registerCollector(category, gauge);
return gauge;
}
return NoOpMetricsSystem.getLabelledGauge(labelNames.length);
}
/**
* Add collector.
*
* @param category the category
* @param metricSupplier the metric supplier
*/
public void addCollector(
final MetricCategory category, final Supplier<Collector> metricSupplier) {
if (isCategoryEnabled(category)) {
addCollectorUnchecked(category, metricSupplier.get());
}
}
private void addCollectorUnchecked(final MetricCategory category, final Collector metric) {
final Collection<Collector> metrics =
private void registerCollector(final MetricCategory category, final Collector collector) {
final Collection<Collector> categoryCollectors =
this.collectors.computeIfAbsent(
category, key -> Collections.newSetFromMap(new ConcurrentHashMap<>()));
final List<String> newSamples =
metric.collect().stream().map(metricFamilySamples -> metricFamilySamples.name).toList();
collector.collect().stream().map(metricFamilySamples -> metricFamilySamples.name).toList();
metrics.stream()
categoryCollectors.stream()
.filter(
collector ->
collector.collect().stream()
c ->
c.collect().stream()
.anyMatch(metricFamilySamples -> newSamples.contains(metricFamilySamples.name)))
.findFirst()
.ifPresent(
collector -> {
metrics.remove(collector);
registry.unregister(collector);
c -> {
categoryCollectors.remove(c);
registry.unregister(c);
});
metrics.add(metric.register(registry));
categoryCollectors.add(collector.register(registry));
}
@Override
@ -237,6 +289,16 @@ public class PrometheusMetricsSystem implements ObservableMetricsSystem {
return collectors.keySet().stream().flatMap(this::streamObservations);
}
@Override
public void shutdown() {
registry.clear();
collectors.clear();
cachedCounters.clear();
cachedTimers.clear();
guavaCacheCollectors.clear();
guavaCacheNames.clear();
}
private Stream<Observation> convertSamplesToObservations(
final MetricCategory category, final MetricFamilySamples familySamples) {
return familySamples.samples.stream()

@ -18,6 +18,7 @@ import static java.util.Arrays.asList;
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
import org.hyperledger.besu.plugin.services.metrics.Counter;
import org.hyperledger.besu.plugin.services.metrics.ExternalSummary;
import org.hyperledger.besu.plugin.services.metrics.LabelledGauge;
import org.hyperledger.besu.plugin.services.metrics.LabelledMetric;
import org.hyperledger.besu.plugin.services.metrics.MetricCategory;
@ -29,8 +30,11 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.DoubleSupplier;
import java.util.function.Supplier;
import java.util.stream.Stream;
import com.google.common.cache.Cache;
public class StubMetricsSystem implements ObservableMetricsSystem {
private final Map<String, StubLabelledCounter> counters = new HashMap<>();
@ -84,6 +88,13 @@ public class StubMetricsSystem implements ObservableMetricsSystem {
return labelValues -> NoOpMetricsSystem.NO_OP_OPERATION_TIMER;
}
@Override
public void trackExternalSummary(
final MetricCategory category,
final String name,
final String help,
final Supplier<ExternalSummary> summarySupplier) {}
@Override
public void createGauge(
final MetricCategory category,
@ -93,6 +104,10 @@ public class StubMetricsSystem implements ObservableMetricsSystem {
gauges.put(name, valueSupplier);
}
@Override
public void createGuavaCacheCollector(
final MetricCategory category, final String name, final Cache<?, ?> cache) {}
public double getGaugeValue(final String name) {
final DoubleSupplier gauge = gauges.get(name);
if (gauge == null) {
@ -116,6 +131,12 @@ public class StubMetricsSystem implements ObservableMetricsSystem {
return Collections.emptySet();
}
@Override
public void shutdown() {
counters.clear();
gauges.clear();
}
public static class StubLabelledCounter implements LabelledMetric<Counter> {
private final Map<List<String>, StubCounter> metrics = new HashMap<>();

@ -40,7 +40,5 @@ dependencies {
implementation project(':metrics:core')
implementation project(':plugin-api')
implementation 'com.google.guava:guava'
implementation 'io.prometheus:simpleclient'
implementation 'org.rocksdb:rocksdbjni'
}

@ -16,15 +16,14 @@ package org.hyperledger.besu.metrics.rocksdb;
import static org.hyperledger.besu.metrics.BesuMetricCategory.KVSTORE_ROCKSDB_STATS;
import org.hyperledger.besu.metrics.prometheus.PrometheusMetricsSystem;
import org.hyperledger.besu.plugin.services.MetricsSystem;
import org.hyperledger.besu.plugin.services.metrics.ExternalSummary;
import org.hyperledger.besu.plugin.services.metrics.ExternalSummary.Quantile;
import org.hyperledger.besu.plugin.services.metrics.MetricCategory;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import io.prometheus.client.Collector;
import org.rocksdb.HistogramData;
import org.rocksdb.HistogramType;
import org.rocksdb.Statistics;
@ -32,22 +31,9 @@ import org.rocksdb.TickerType;
/** The Rocks db stats. */
public class RocksDBStats {
/** The Labels. */
static final List<String> LABELS = Collections.singletonList("quantile");
/** The Label 50. */
static final List<String> LABEL_50 = Collections.singletonList("0.5");
/** The Label 95. */
static final List<String> LABEL_95 = Collections.singletonList("0.95");
/** The Label 99. */
static final List<String> LABEL_99 = Collections.singletonList("0.99");
/** The constant TICKERS. */
/** The constant TICKER_TYPES. */
// Tickers - RocksDB equivalent of counters
static final TickerType[] TICKERS = {
static final TickerType[] TICKER_TYPES = {
TickerType.BLOCK_CACHE_ADD,
TickerType.BLOCK_CACHE_HIT,
TickerType.BLOCK_CACHE_ADD_FAILURES,
@ -133,9 +119,9 @@ public class RocksDBStats {
TickerType.NUMBER_MULTIGET_KEYS_FOUND,
};
/** The constant HISTOGRAMS. */
/** The constant HISTOGRAM_TYPES. */
// Histograms - treated as prometheus summaries
static final HistogramType[] HISTOGRAMS = {
static final HistogramType[] HISTOGRAM_TYPES = {
HistogramType.DB_GET,
HistogramType.DB_WRITE,
HistogramType.COMPACTION_TIME,
@ -175,47 +161,40 @@ public class RocksDBStats {
* @param category the category
*/
public static void registerRocksDBMetrics(
final Statistics stats,
final PrometheusMetricsSystem metricsSystem,
final MetricCategory category) {
if (!metricsSystem.isCategoryEnabled(category)) {
return;
}
for (final TickerType ticker : TICKERS) {
final String promCounterName = ticker.name().toLowerCase(Locale.ROOT);
final Statistics stats, final MetricsSystem metricsSystem, final MetricCategory category) {
for (final var tickerType : TICKER_TYPES) {
final String promCounterName = tickerType.name().toLowerCase(Locale.ROOT);
metricsSystem.createLongGauge(
category,
promCounterName,
"RocksDB reported statistics for " + ticker.name(),
() -> stats.getTickerCount(ticker));
"RocksDB reported statistics for " + tickerType.name(),
() -> stats.getTickerCount(tickerType));
}
for (final HistogramType histogram : HISTOGRAMS) {
metricsSystem.addCollector(category, () -> histogramToCollector(stats, histogram));
for (final var histogramType : HISTOGRAM_TYPES) {
metricsSystem.trackExternalSummary(
KVSTORE_ROCKSDB_STATS,
KVSTORE_ROCKSDB_STATS.getName() + "_" + histogramType.name().toLowerCase(Locale.ROOT),
"RocksDB histogram for " + histogramType.name(),
() -> provideExternalSummary(stats, histogramType));
}
}
private static Collector histogramToCollector(
final Statistics stats, final HistogramType histogram) {
return new Collector() {
final String metricName =
KVSTORE_ROCKSDB_STATS.getName() + "_" + histogram.name().toLowerCase(Locale.ROOT);
private static ExternalSummary provideExternalSummary(
final Statistics stats, final HistogramType histogramType) {
final HistogramData data = stats.getHistogramData(histogramType);
@Override
public List<MetricFamilySamples> collect() {
final HistogramData data = stats.getHistogramData(histogram);
return Collections.singletonList(
new MetricFamilySamples(
metricName,
Type.SUMMARY,
"RocksDB histogram for " + metricName,
Arrays.asList(
new MetricFamilySamples.Sample(metricName, LABELS, LABEL_50, data.getMedian()),
new MetricFamilySamples.Sample(
metricName, LABELS, LABEL_95, data.getPercentile95()),
new MetricFamilySamples.Sample(
metricName, LABELS, LABEL_99, data.getPercentile99()))));
}
};
return new ExternalSummary(
data.getCount(),
data.getSum(),
List.of(
new Quantile(0.0, data.getMin()),
new Quantile(0.5, data.getMedian()),
new Quantile(0.95, data.getPercentile95()),
new Quantile(0.99, data.getPercentile99()),
new Quantile(1.0, data.getMax())));
}
}

@ -71,7 +71,7 @@ Calculated : ${currentHash}
tasks.register('checkAPIChanges', FileStateChecker) {
description = "Checks that the API for the Plugin-API project does not change without deliberate thought"
files = sourceSets.main.allJava.files
knownHash = '8rPIE3fYl48RPRQXxYhMk559e/r+wHSKU9bGSJmruKQ='
knownHash = 'aYWbsgPoKTGDgq9d4QUBvQEaZYbKNJGMiBufzyKnusA='
}
check.dependsOn('checkAPIChanges')

@ -15,14 +15,19 @@
package org.hyperledger.besu.plugin.services;
import org.hyperledger.besu.plugin.services.metrics.Counter;
import org.hyperledger.besu.plugin.services.metrics.ExternalSummary;
import org.hyperledger.besu.plugin.services.metrics.LabelledGauge;
import org.hyperledger.besu.plugin.services.metrics.LabelledMetric;
import org.hyperledger.besu.plugin.services.metrics.MetricCategory;
import org.hyperledger.besu.plugin.services.metrics.OperationTimer;
import java.util.Set;
import java.util.function.DoubleSupplier;
import java.util.function.IntSupplier;
import java.util.function.LongSupplier;
import java.util.function.Supplier;
import com.google.common.cache.Cache;
/** An interface for creating various Metrics components. */
public interface MetricsSystem extends BesuService {
@ -159,4 +164,45 @@ public interface MetricsSystem extends BesuService {
final LongSupplier valueSupplier) {
createGauge(category, name, help, () -> (double) valueSupplier.getAsLong());
}
/**
* Track a summary that is computed externally to this metric system. Useful when existing
* libraries calculate the summary data on their own, and we want to export that summary via the
* configured metric system. A notable example are RocksDB statistics.
*
* @param category The {@link MetricCategory} this external summary is assigned to.
* @param name A name for the metric.
* @param help A human readable description of the metric.
* @param summarySupplier A supplier to retrieve the summary data when needed.
*/
void trackExternalSummary(
MetricCategory category, String name, String help, Supplier<ExternalSummary> summarySupplier);
/**
* Collect metrics from Guava cache.
*
* @param category The {@link MetricCategory} this Guava cache is assigned to.
* @param name the name to identify this Guava cache, must be unique.
* @param cache the Guava cache
*/
void createGuavaCacheCollector(MetricCategory category, String name, Cache<?, ?> cache);
/**
* Provides an immutable view into the metric categories enabled for metric collection.
*
* @return the set of enabled metric categories.
*/
Set<MetricCategory> getEnabledCategories();
/**
* Checks if a particular category of metrics is enabled.
*
* @param category the category to check
* @return true if the category is enabled, false otherwise
*/
default boolean isCategoryEnabled(final MetricCategory category) {
return getEnabledCategories().stream()
.map(MetricCategory::getName)
.anyMatch(category.getName()::equals);
}
}

@ -0,0 +1,36 @@
/*
* Copyright contributors to Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.plugin.services.metrics;
import java.util.List;
/**
* Record of summary data the is kept outside the metric system. Useful when existing libraries
* calculate the summary data on their own, and we want to export that summary via the configured
* metric system. A notable example are RocksDB statistics.
*
* @param count the number of observations
* @param sum the sum of all the observations
* @param quantiles a list of quantiles with values
*/
public record ExternalSummary(long count, double sum, List<Quantile> quantiles) {
/**
* Represent a single quantile and its value
*
* @param quantile the quantile
* @param value the value
*/
public record Quantile(double quantile, double value) {}
}

@ -46,7 +46,6 @@ dependencies {
implementation 'com.google.guava:guava'
implementation 'info.picocli:picocli'
implementation 'io.opentelemetry:opentelemetry-api'
implementation 'io.prometheus:simpleclient'
implementation 'io.tmio:tuweni-bytes'
implementation 'org.rocksdb:rocksdbjni'
implementation project(path: ':ethereum:core')

@ -15,7 +15,6 @@
package org.hyperledger.besu.plugin.services.storage.rocksdb;
import org.hyperledger.besu.metrics.BesuMetricCategory;
import org.hyperledger.besu.metrics.prometheus.PrometheusMetricsSystem;
import org.hyperledger.besu.metrics.rocksdb.RocksDBStats;
import org.hyperledger.besu.plugin.services.MetricsSystem;
import org.hyperledger.besu.plugin.services.metrics.Counter;
@ -107,10 +106,7 @@ public class RocksDBMetricsFactory {
"database")
.labels(rocksDbConfiguration.getLabel());
if (metricsSystem instanceof PrometheusMetricsSystem) {
RocksDBStats.registerRocksDBMetrics(
stats, (PrometheusMetricsSystem) metricsSystem, statsDbMetricCategory);
}
RocksDBStats.registerRocksDBMetrics(stats, metricsSystem, statsDbMetricCategory);
metricsSystem.createLongGauge(
rocksDbMetricCategory,

Loading…
Cancel
Save