Improve the high spec flag (#6354)

* Improve the high spec flag, limit it to few column families
* Update changelog
* spotless
* Update the plugin API hash as one of the interfaces was changed\
* Fix failing unit tests

Signed-off-by: Ameziane H <ameziane.hamlat@consensys.net>
Co-authored-by: Sally MacFarlane <macfarla.github@gmail.com>
pull/6456/head
ahamlat 10 months ago committed by garyschulte
parent f1c2921421
commit 666f795b63
  1. 3
      CHANGELOG.md
  2. 32
      ethereum/core/src/main/java/org/hyperledger/besu/ethereum/storage/keyvalue/KeyValueSegmentIdentifier.java
  3. 2
      plugin-api/build.gradle
  4. 8
      plugin-api/src/main/java/org/hyperledger/besu/plugin/services/storage/SegmentIdentifier.java
  5. 139
      plugins/rocksdb/src/main/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/RocksDBColumnarKeyValueStorage.java
  6. 2
      plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/RocksDBKeyValuePrivacyStorageFactoryTest.java
  7. 2
      plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/RocksDBKeyValueStorageFactoryTest.java
  8. 2
      plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/OptimisticTransactionDBRocksDBColumnarKeyValueStorageTest.java
  9. 55
      plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/RocksDBColumnarKeyValueStorageTest.java
  10. 2
      plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/TransactionDBRocksDBColumnarKeyValueStorageTest.java
  11. 5
      services/kvstore/src/main/java/org/hyperledger/besu/services/kvstore/InMemoryKeyValueStorage.java

@ -8,6 +8,8 @@
### Additions and Improvements
- Add `OperationTracer.tracePrepareTransaction`, where the sender account has not yet been altered[#6453](https://github.com/hyperledger/besu/pull/6453)
- Improve the high spec flag by limiting it to a few column families [#6354](https://github.com/hyperledger/besu/pull/6354)
### Bug fixes
- Fix the way an advertised host configured with `--p2p-host` is treated when communicating with the originator of a PING packet [#6225](https://github.com/hyperledger/besu/pull/6225)
@ -61,6 +63,7 @@
- Set Ethereum Classic mainnet activation block for Spiral network upgrade [#6267](https://github.com/hyperledger/besu/pull/6267)
- Add custom genesis file name to config overview if specified [#6297](https://github.com/hyperledger/besu/pull/6297)
- Update Gradle plugins and replace unmaintained License Gradle Plugin with the actively maintained Gradle License Report [#6275](https://github.com/hyperledger/besu/pull/6275)
- Optimize RocksDB WAL files, allows for faster restart and a more linear disk space utilization [#6328](https://github.com/hyperledger/besu/pull/6328)
### Bug fixes
- Hotfix for selfdestruct preimages on bonsai [#6359]((https://github.com/hyperledger/besu/pull/6359)

@ -16,18 +16,21 @@ package org.hyperledger.besu.ethereum.storage.keyvalue;
import org.hyperledger.besu.plugin.services.storage.SegmentIdentifier;
import java.nio.charset.StandardCharsets;
import org.bouncycastle.util.Arrays;
public enum KeyValueSegmentIdentifier implements SegmentIdentifier {
BLOCKCHAIN(new byte[] {1}, true),
WORLD_STATE(new byte[] {2}, new int[] {0, 1}),
DEFAULT("default".getBytes(StandardCharsets.UTF_8)),
BLOCKCHAIN(new byte[] {1}, true, true),
WORLD_STATE(new byte[] {2}, new int[] {0, 1}, false, true),
PRIVATE_TRANSACTIONS(new byte[] {3}),
PRIVATE_STATE(new byte[] {4}),
PRUNING_STATE(new byte[] {5}, new int[] {0, 1}),
ACCOUNT_INFO_STATE(new byte[] {6}, new int[] {2}),
ACCOUNT_INFO_STATE(new byte[] {6}, new int[] {2}, false, true),
CODE_STORAGE(new byte[] {7}, new int[] {2}),
ACCOUNT_STORAGE_STORAGE(new byte[] {8}, new int[] {2}),
TRIE_BRANCH_STORAGE(new byte[] {9}, new int[] {2}),
ACCOUNT_STORAGE_STORAGE(new byte[] {8}, new int[] {2}, false, true),
TRIE_BRANCH_STORAGE(new byte[] {9}, new int[] {2}, false, true),
TRIE_LOG_STORAGE(new byte[] {10}, new int[] {2}),
VARIABLES(new byte[] {11}), // formerly GOQUORUM_PRIVATE_WORLD_STATE
@ -45,24 +48,30 @@ public enum KeyValueSegmentIdentifier implements SegmentIdentifier {
private final byte[] id;
private final int[] versionList;
private final boolean containsStaticData;
private final boolean eligibleToHighSpecFlag;
KeyValueSegmentIdentifier(final byte[] id) {
this(id, new int[] {0, 1, 2});
}
KeyValueSegmentIdentifier(final byte[] id, final boolean containsStaticData) {
this(id, new int[] {0, 1, 2}, containsStaticData);
KeyValueSegmentIdentifier(
final byte[] id, final boolean containsStaticData, final boolean eligibleToHighSpecFlag) {
this(id, new int[] {0, 1, 2}, containsStaticData, eligibleToHighSpecFlag);
}
KeyValueSegmentIdentifier(final byte[] id, final int[] versionList) {
this(id, versionList, false);
this(id, versionList, false, false);
}
KeyValueSegmentIdentifier(
final byte[] id, final int[] versionList, final boolean containsStaticData) {
final byte[] id,
final int[] versionList,
final boolean containsStaticData,
final boolean eligibleToHighSpecFlag) {
this.id = id;
this.versionList = versionList;
this.containsStaticData = containsStaticData;
this.eligibleToHighSpecFlag = eligibleToHighSpecFlag;
}
@Override
@ -80,6 +89,11 @@ public enum KeyValueSegmentIdentifier implements SegmentIdentifier {
return containsStaticData;
}
@Override
public boolean isEligibleToHighSpecFlag() {
return eligibleToHighSpecFlag;
}
@Override
public boolean includeInDatabaseVersion(final int version) {
return Arrays.contains(versionList, version);

@ -69,7 +69,7 @@ Calculated : ${currentHash}
tasks.register('checkAPIChanges', FileStateChecker) {
description = "Checks that the API for the Plugin-API project does not change without deliberate thought"
files = sourceSets.main.allJava.files
knownHash = 'IGq+V3KaStHCRFkeK3KwPxJYKO4RX9YM1O4JYITk8S8='
knownHash = 'ZsovOR0oPfomcLP4b+HjikWzM0Tx6sCwi68mf5qwZf4='
}
check.dependsOn('checkAPIChanges')

@ -56,4 +56,12 @@ public interface SegmentIdentifier {
* @return true if the segment contains only static data
*/
boolean containsStaticData();
/**
* This flag defines which segment is eligible for the high spec flag, so basically what column
* family is involved with high spec flag
*
* @return true if the segment is involved with the high spec flag
*/
boolean isEligibleToHighSpecFlag();
}

@ -70,18 +70,16 @@ import org.slf4j.LoggerFactory;
public abstract class RocksDBColumnarKeyValueStorage implements SegmentedKeyValueStorage {
private static final Logger LOG = LoggerFactory.getLogger(RocksDBColumnarKeyValueStorage.class);
static final String DEFAULT_COLUMN = "default";
private static final int ROCKSDB_FORMAT_VERSION = 5;
private static final long ROCKSDB_BLOCK_SIZE = 32768;
/** RocksDb blockcache size when using the high spec option */
protected static final long ROCKSDB_BLOCKCACHE_SIZE_HIGH_SPEC = 1_073_741_824L;
/** RocksDb memtable size when using the high spec option */
protected static final long ROCKSDB_MEMTABLE_SIZE_HIGH_SPEC = 1_073_741_824L;
protected static final long ROCKSDB_MEMTABLE_SIZE_HIGH_SPEC = 536_870_912L;
/** Max total size of all WAL file, after which a flush is triggered */
protected static final long WAL_MAX_TOTAL_SIZE = 1_073_741_824L;
/** Expected size of a single WAL file, to determine how many WAL files to keep around */
protected static final long EXPECTED_WAL_FILE_SIZE = 67_108_864L;
/** RocksDb number of log files to keep on disk */
private static final long NUMBER_OF_LOG_FILES_TO_KEEP = 7;
/** RocksDb Time to roll a log file (1 day = 3600 * 24 seconds) */
@ -144,7 +142,6 @@ public abstract class RocksDBColumnarKeyValueStorage implements SegmentedKeyValu
this.rocksDBMetricsFactory = rocksDBMetricsFactory;
try {
final ColumnFamilyOptions columnFamilyOptions = new ColumnFamilyOptions();
trimmedSegments = new ArrayList<>(defaultSegments);
final List<byte[]> existingColumnFamilies =
RocksDB.listColumnFamilies(new Options(), configuration.getDatabaseDir().toString());
@ -156,14 +153,9 @@ public abstract class RocksDBColumnarKeyValueStorage implements SegmentedKeyValu
.noneMatch(existed -> Arrays.equals(existed, ignorableSegment.getId())))
.forEach(trimmedSegments::remove);
columnDescriptors =
trimmedSegments.stream().map(this::createColumnDescriptor).collect(Collectors.toList());
columnDescriptors.add(
new ColumnFamilyDescriptor(
DEFAULT_COLUMN.getBytes(StandardCharsets.UTF_8),
columnFamilyOptions
.setTtl(0)
.setCompressionType(CompressionType.LZ4_COMPRESSION)
.setTableFormatConfig(createBlockBasedTableConfig(configuration))));
trimmedSegments.stream()
.map(segment -> createColumnDescriptor(segment, configuration))
.collect(Collectors.toList());
setGlobalOptions(configuration, stats);
@ -174,6 +166,80 @@ public abstract class RocksDBColumnarKeyValueStorage implements SegmentedKeyValu
}
}
/**
* Create a Column Family Descriptor for a given segment It defines basically the different
* options to apply to the corresponding Column Family
*
* @param segment the segment identifier
* @param configuration RocksDB configuration
* @return a column family descriptor
*/
private ColumnFamilyDescriptor createColumnDescriptor(
final SegmentIdentifier segment, final RocksDBConfiguration configuration) {
BlockBasedTableConfig basedTableConfig = createBlockBasedTableConfig(segment, configuration);
final var options =
new ColumnFamilyOptions()
.setTtl(0)
.setCompressionType(CompressionType.LZ4_COMPRESSION)
.setTableFormatConfig(basedTableConfig);
if (segment.containsStaticData()) {
options
.setEnableBlobFiles(true)
.setEnableBlobGarbageCollection(false)
.setMinBlobSize(100)
.setBlobCompressionType(CompressionType.LZ4_COMPRESSION);
}
return new ColumnFamilyDescriptor(segment.getId(), options);
}
/***
* Create a Block Base Table configuration for each segment, depending on the configuration in place
* and the segment itself
*
* @param segment The segment related to the column family
* @param config RocksDB configuration
* @return Block Base Table configuration
*/
private BlockBasedTableConfig createBlockBasedTableConfig(
final SegmentIdentifier segment, final RocksDBConfiguration config) {
final LRUCache cache =
new LRUCache(
config.isHighSpec() && segment.isEligibleToHighSpecFlag()
? ROCKSDB_BLOCKCACHE_SIZE_HIGH_SPEC
: config.getCacheCapacity());
return new BlockBasedTableConfig()
.setFormatVersion(ROCKSDB_FORMAT_VERSION)
.setBlockCache(cache)
.setFilterPolicy(new BloomFilter(10, false))
.setPartitionFilters(true)
.setCacheIndexAndFilterBlocks(false)
.setBlockSize(ROCKSDB_BLOCK_SIZE);
}
/***
* Set Global options (DBOptions)
*
* @param configuration RocksDB configuration
* @param stats The statistics object
*/
private void setGlobalOptions(final RocksDBConfiguration configuration, final Statistics stats) {
options = new DBOptions();
options
.setCreateIfMissing(true)
.setMaxOpenFiles(configuration.getMaxOpenFiles())
.setStatistics(stats)
.setCreateMissingColumnFamilies(true)
.setLogFileTimeToRoll(TIME_TO_ROLL_LOG_FILE)
.setKeepLogFileNum(NUMBER_OF_LOG_FILES_TO_KEEP)
.setEnv(Env.getDefault().setBackgroundThreads(configuration.getBackgroundThreadCount()))
.setMaxTotalWalSize(WAL_MAX_TOTAL_SIZE)
.setRecycleLogFileNum(WAL_MAX_TOTAL_SIZE / EXPECTED_WAL_FILE_SIZE);
}
/**
* Parse RocksDBException and wrap in StorageException
*
@ -219,42 +285,6 @@ public abstract class RocksDBColumnarKeyValueStorage implements SegmentedKeyValu
}
}
private ColumnFamilyDescriptor createColumnDescriptor(final SegmentIdentifier segment) {
final var options =
new ColumnFamilyOptions()
.setTtl(0)
.setCompressionType(CompressionType.LZ4_COMPRESSION)
.setTableFormatConfig(createBlockBasedTableConfig(configuration));
if (segment.containsStaticData()) {
options
.setEnableBlobFiles(true)
.setEnableBlobGarbageCollection(false)
.setMinBlobSize(100)
.setBlobCompressionType(CompressionType.LZ4_COMPRESSION);
}
return new ColumnFamilyDescriptor(segment.getId(), options);
}
private void setGlobalOptions(final RocksDBConfiguration configuration, final Statistics stats) {
options = new DBOptions();
options
.setCreateIfMissing(true)
.setMaxOpenFiles(configuration.getMaxOpenFiles())
.setMaxTotalWalSize(WAL_MAX_TOTAL_SIZE)
.setRecycleLogFileNum(WAL_MAX_TOTAL_SIZE / EXPECTED_WAL_FILE_SIZE)
.setStatistics(stats)
.setCreateMissingColumnFamilies(true)
.setLogFileTimeToRoll(TIME_TO_ROLL_LOG_FILE)
.setKeepLogFileNum(NUMBER_OF_LOG_FILES_TO_KEEP)
.setEnv(Env.getDefault().setBackgroundThreads(configuration.getBackgroundThreadCount()));
if (configuration.isHighSpec()) {
options.setDbWriteBufferSize(ROCKSDB_MEMTABLE_SIZE_HIGH_SPEC);
}
}
void initMetrics() {
metrics = rocksDBMetricsFactory.create(metricsSystem, configuration, getDB(), stats);
}
@ -287,19 +317,6 @@ public abstract class RocksDBColumnarKeyValueStorage implements SegmentedKeyValu
}));
}
BlockBasedTableConfig createBlockBasedTableConfig(final RocksDBConfiguration config) {
final LRUCache cache =
new LRUCache(
config.isHighSpec() ? ROCKSDB_BLOCKCACHE_SIZE_HIGH_SPEC : config.getCacheCapacity());
return new BlockBasedTableConfig()
.setFormatVersion(ROCKSDB_FORMAT_VERSION)
.setBlockCache(cache)
.setFilterPolicy(new BloomFilter(10, false))
.setPartitionFilters(true)
.setCacheIndexAndFilterBlocks(false)
.setBlockSize(ROCKSDB_BLOCK_SIZE);
}
/**
* Safe method to map segment identifier to column handle.
*

@ -45,7 +45,7 @@ public class RocksDBKeyValuePrivacyStorageFactoryTest {
@TempDir private Path temporaryFolder;
private final ObservableMetricsSystem metricsSystem = new NoOpMetricsSystem();
private final SegmentIdentifier segment = TestSegment.BAR;
private final List<SegmentIdentifier> segments = List.of(segment);
private final List<SegmentIdentifier> segments = List.of(TestSegment.DEFAULT, segment);
@Test
public void shouldDetectVersion1DatabaseIfNoMetadataFileFound() throws Exception {

@ -50,7 +50,7 @@ public class RocksDBKeyValueStorageFactoryTest {
@TempDir public Path temporaryFolder;
private final ObservableMetricsSystem metricsSystem = new NoOpMetricsSystem();
private final SegmentIdentifier segment = TestSegment.FOO;
private final List<SegmentIdentifier> segments = List.of(segment);
private final List<SegmentIdentifier> segments = List.of(TestSegment.DEFAULT, segment);
@Test
public void shouldCreateCorrectMetadataFileForLatestVersion() throws Exception {

@ -39,7 +39,7 @@ public class OptimisticTransactionDBRocksDBColumnarKeyValueStorageTest
new RocksDBConfigurationBuilder()
.databaseDir(Files.createTempDirectory("segmentedStore"))
.build(),
Arrays.asList(TestSegment.FOO, TestSegment.BAR),
Arrays.asList(TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR),
List.of(),
new NoOpMetricsSystem(),
RocksDBMetricsFactory.PUBLIC_ROCKS_DB_METRICS);

@ -194,13 +194,17 @@ public abstract class RocksDBColumnarKeyValueStorageTest extends AbstractKeyValu
SegmentedKeyValueStorage store =
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
Arrays.asList(
TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
List.of(TestSegment.EXPERIMENTAL));
store.close();
// new db will be backward compatible with db without knowledge of experimental column family
store =
createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of());
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR),
List.of());
store.close();
}
@ -212,14 +216,18 @@ public abstract class RocksDBColumnarKeyValueStorageTest extends AbstractKeyValu
SegmentedKeyValueStorage store =
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
Arrays.asList(
TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
List.of());
store.close();
// new db will not be backward compatible with db without knowledge of experimental column
// family
try {
createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of());
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR),
List.of());
fail("DB without knowledge of experimental column family should fail");
} catch (StorageException e) {
assertThat(e.getMessage()).contains("Unhandled column families");
@ -230,7 +238,8 @@ public abstract class RocksDBColumnarKeyValueStorageTest extends AbstractKeyValu
store =
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
Arrays.asList(
TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
List.of(TestSegment.EXPERIMENTAL));
store.close();
}
@ -242,27 +251,35 @@ public abstract class RocksDBColumnarKeyValueStorageTest extends AbstractKeyValu
SegmentedKeyValueStorage store =
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
Arrays.asList(
TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
List.of(TestSegment.EXPERIMENTAL));
store.close();
// new db will be backward compatible with db without knowledge of experimental column family
store =
createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of());
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR),
List.of());
store.close();
// Create new db without ignoring experimental colum family will add column to db
store =
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
Arrays.asList(
TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
List.of());
store.close();
// Now, the db will be backward incompatible with db without knowledge of experimental column
// family
try {
createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of());
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR),
List.of());
fail("DB without knowledge of experimental column family should fail");
} catch (StorageException e) {
assertThat(e.getMessage()).contains("Unhandled column families");
@ -293,7 +310,10 @@ public abstract class RocksDBColumnarKeyValueStorageTest extends AbstractKeyValu
final SegmentedKeyValueStorage store =
createSegmentedStore(
folder, metricsSystemMock, List.of(TestSegment.FOO), List.of(TestSegment.EXPERIMENTAL));
folder,
metricsSystemMock,
List.of(TestSegment.DEFAULT, TestSegment.FOO),
List.of(TestSegment.EXPERIMENTAL));
KeyValueStorage keyValueStorage = new SegmentedKeyValueStorageAdapter(TestSegment.FOO, store);
@ -343,24 +363,28 @@ public abstract class RocksDBColumnarKeyValueStorageTest extends AbstractKeyValu
}
public enum TestSegment implements SegmentIdentifier {
DEFAULT("default".getBytes(StandardCharsets.UTF_8)),
FOO(new byte[] {1}),
BAR(new byte[] {2}),
EXPERIMENTAL(new byte[] {3}),
STATIC_DATA(new byte[] {4}, true);
STATIC_DATA(new byte[] {4}, true, false);
private final byte[] id;
private final String nameAsUtf8;
private final boolean containsStaticData;
private final boolean eligibleToHighSpecFlag;
TestSegment(final byte[] id) {
this(id, false);
this(id, false, false);
}
TestSegment(final byte[] id, final boolean containsStaticData) {
TestSegment(
final byte[] id, final boolean containsStaticData, final boolean eligibleToHighSpecFlag) {
this.id = id;
this.nameAsUtf8 = new String(id, StandardCharsets.UTF_8);
this.containsStaticData = containsStaticData;
this.eligibleToHighSpecFlag = eligibleToHighSpecFlag;
}
@Override
@ -377,6 +401,11 @@ public abstract class RocksDBColumnarKeyValueStorageTest extends AbstractKeyValu
public boolean containsStaticData() {
return containsStaticData;
}
@Override
public boolean isEligibleToHighSpecFlag() {
return eligibleToHighSpecFlag;
}
}
protected abstract SegmentedKeyValueStorage createSegmentedStore() throws Exception;

@ -36,7 +36,7 @@ public class TransactionDBRocksDBColumnarKeyValueStorageTest
protected SegmentedKeyValueStorage createSegmentedStore() throws Exception {
return new TransactionDBRocksDBColumnarKeyValueStorage(
new RocksDBConfigurationBuilder().databaseDir(getTempSubFolder(folder)).build(),
Arrays.asList(TestSegment.FOO, TestSegment.BAR),
Arrays.asList(TestSegment.DEFAULT, TestSegment.FOO, TestSegment.BAR),
List.of(),
new NoOpMetricsSystem(),
RocksDBMetricsFactory.PUBLIC_ROCKS_DB_METRICS);

@ -50,6 +50,11 @@ public class InMemoryKeyValueStorage extends SegmentedKeyValueStorageAdapter {
public boolean containsStaticData() {
return false;
}
@Override
public boolean isEligibleToHighSpecFlag() {
return false;
}
};
private static ConcurrentMap<SegmentIdentifier, Map<Bytes, Optional<byte[]>>> asSegmentMap(

Loading…
Cancel
Save