Ignore extra RocksDB column families for experimental features (#4842)

Signed-off-by: Zhenyang Shi <wcgcyx@gmail.com>
pull/4854/head
Zhenyang Shi 2 years ago committed by GitHub
parent 7d5988d4b5
commit 13213e2c7d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 9
      besu/src/main/java/org/hyperledger/besu/cli/BesuCommand.java
  2. 27
      plugins/rocksdb/src/main/java/org/hyperledger/besu/plugin/services/storage/rocksdb/RocksDBKeyValueStorageFactory.java
  3. 11
      plugins/rocksdb/src/main/java/org/hyperledger/besu/plugin/services/storage/rocksdb/RocksDBPlugin.java
  4. 27
      plugins/rocksdb/src/main/java/org/hyperledger/besu/plugin/services/storage/rocksdb/segmented/RocksDBColumnarKeyValueStorage.java
  5. 100
      plugins/rocksdb/src/test/java/org/hyperledger/besu/plugin/services/storage/rocksdb/unsegmented/RocksDBColumnarKeyValueStorageTest.java

@ -140,6 +140,7 @@ import org.hyperledger.besu.ethereum.permissioning.SmartContractPermissioningCon
import org.hyperledger.besu.ethereum.privacy.storage.keyvalue.PrivacyKeyValueStorageProvider;
import org.hyperledger.besu.ethereum.privacy.storage.keyvalue.PrivacyKeyValueStorageProviderBuilder;
import org.hyperledger.besu.ethereum.storage.StorageProvider;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueStorageProvider;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueStorageProviderBuilder;
import org.hyperledger.besu.ethereum.worldstate.DefaultWorldStateArchive;
@ -1425,6 +1426,8 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
// set merge config on the basis of genesis config
setMergeConfigOptions();
setIgnorableStorageSegments();
instantiateSignatureAlgorithmFactory();
logger.info("Starting Besu");
@ -3352,6 +3355,12 @@ public class BesuCommand implements DefaultCommandValues, Runnable {
.isPresent());
}
private void setIgnorableStorageSegments() {
if (!unstableChainPruningOptions.getChainDataPruningEnabled()) {
rocksDBPlugin.addIgnorableSegmentIdentifier(KeyValueSegmentIdentifier.CHAIN_PRUNER_STATE);
}
}
private void validatePostMergeCheckpointBlockRequirements() {
final GenesisConfigOptions genesisOptions =
Optional.ofNullable(genesisConfigOptions)

@ -58,23 +58,42 @@ public class RocksDBKeyValueStorageFactory implements KeyValueStorageFactory {
private final Supplier<RocksDBFactoryConfiguration> configuration;
private final List<SegmentIdentifier> segments;
private final List<SegmentIdentifier> ignorableSegments;
public RocksDBKeyValueStorageFactory(
final Supplier<RocksDBFactoryConfiguration> configuration,
final List<SegmentIdentifier> segments,
final List<SegmentIdentifier> ignorableSegments,
final int defaultVersion,
final RocksDBMetricsFactory rocksDBMetricsFactory) {
this.configuration = configuration;
this.segments = segments;
this.ignorableSegments = ignorableSegments;
this.defaultVersion = defaultVersion;
this.rocksDBMetricsFactory = rocksDBMetricsFactory;
}
public RocksDBKeyValueStorageFactory(
final Supplier<RocksDBFactoryConfiguration> configuration,
final List<SegmentIdentifier> segments,
final int defaultVersion,
final RocksDBMetricsFactory rocksDBMetricsFactory) {
this(configuration, segments, List.of(), defaultVersion, rocksDBMetricsFactory);
}
public RocksDBKeyValueStorageFactory(
final Supplier<RocksDBFactoryConfiguration> configuration,
final List<SegmentIdentifier> segments,
final List<SegmentIdentifier> ignorableSegments,
final RocksDBMetricsFactory rocksDBMetricsFactory) {
this(configuration, segments, ignorableSegments, DEFAULT_VERSION, rocksDBMetricsFactory);
}
public RocksDBKeyValueStorageFactory(
final Supplier<RocksDBFactoryConfiguration> configuration,
final List<SegmentIdentifier> segments,
final RocksDBMetricsFactory rocksDBMetricsFactory) {
this(configuration, segments, DEFAULT_VERSION, rocksDBMetricsFactory);
this(configuration, segments, List.of(), DEFAULT_VERSION, rocksDBMetricsFactory);
}
int getDefaultVersion() {
@ -123,7 +142,11 @@ public class RocksDBKeyValueStorageFactory implements KeyValueStorageFactory {
segmentedStorage =
new RocksDBColumnarKeyValueStorage(
rocksDBConfiguration, segmentsForVersion, metricsSystem, rocksDBMetricsFactory);
rocksDBConfiguration,
segmentsForVersion,
ignorableSegments,
metricsSystem,
rocksDBMetricsFactory);
}
final RocksDbSegmentIdentifier rocksSegment =
segmentedStorage.getSegmentIdentifierByName(segment);

@ -23,6 +23,7 @@ import org.hyperledger.besu.plugin.services.storage.rocksdb.configuration.RocksD
import org.hyperledger.besu.plugin.services.storage.rocksdb.configuration.RocksDBFactoryConfiguration;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
@ -37,6 +38,7 @@ public class RocksDBPlugin implements BesuPlugin {
private static final String NAME = "rocksdb";
private final RocksDBCLIOptions options;
private final List<SegmentIdentifier> ignorableSegments = new ArrayList<>();
private BesuContext context;
private RocksDBKeyValueStorageFactory factory;
private RocksDBKeyValuePrivacyStorageFactory privacyFactory;
@ -45,6 +47,10 @@ public class RocksDBPlugin implements BesuPlugin {
this.options = RocksDBCLIOptions.create();
}
public void addIgnorableSegmentIdentifier(final SegmentIdentifier ignorable) {
ignorableSegments.add(ignorable);
}
@Override
public void register(final BesuContext context) {
LOG.debug("Registering plugin");
@ -106,7 +112,10 @@ public class RocksDBPlugin implements BesuPlugin {
Suppliers.memoize(options::toDomainObject);
factory =
new RocksDBKeyValueStorageFactory(
configuration, segments, RocksDBMetricsFactory.PUBLIC_ROCKS_DB_METRICS);
configuration,
segments,
ignorableSegments,
RocksDBMetricsFactory.PUBLIC_ROCKS_DB_METRICS);
privacyFactory = new RocksDBKeyValuePrivacyStorageFactory(factory);
service.registerKeyValueStorage(factory);

@ -32,6 +32,7 @@ import org.hyperledger.besu.services.kvstore.SegmentedKeyValueStorageTransaction
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@ -54,6 +55,8 @@ import org.rocksdb.DBOptions;
import org.rocksdb.Env;
import org.rocksdb.LRUCache;
import org.rocksdb.OptimisticTransactionDB;
import org.rocksdb.Options;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException;
import org.rocksdb.RocksIterator;
import org.rocksdb.Statistics;
@ -93,10 +96,30 @@ public class RocksDBColumnarKeyValueStorage
final MetricsSystem metricsSystem,
final RocksDBMetricsFactory rocksDBMetricsFactory)
throws StorageException {
this(configuration, segments, List.of(), metricsSystem, rocksDBMetricsFactory);
}
public RocksDBColumnarKeyValueStorage(
final RocksDBConfiguration configuration,
final List<SegmentIdentifier> segments,
final List<SegmentIdentifier> ignorableSegments,
final MetricsSystem metricsSystem,
final RocksDBMetricsFactory rocksDBMetricsFactory)
throws StorageException {
try (final ColumnFamilyOptions columnFamilyOptions = new ColumnFamilyOptions()) {
final List<SegmentIdentifier> trimmedSegments = new ArrayList<>(segments);
final List<byte[]> existingColumnFamilies =
RocksDB.listColumnFamilies(new Options(), configuration.getDatabaseDir().toString());
// Only ignore if not existed currently
ignorableSegments.stream()
.filter(
ignorableSegment ->
existingColumnFamilies.stream()
.noneMatch(existed -> Arrays.equals(existed, ignorableSegment.getId())))
.forEach(trimmedSegments::remove);
final List<ColumnFamilyDescriptor> columnDescriptors =
segments.stream()
trimmedSegments.stream()
.map(
segment ->
new ColumnFamilyDescriptor(
@ -147,7 +170,7 @@ public class RocksDBColumnarKeyValueStorage
options, configuration.getDatabaseDir().toString(), columnDescriptors, columnHandles);
metrics = rocksDBMetricsFactory.create(metricsSystem, configuration, db, stats);
final Map<Bytes, String> segmentsById =
segments.stream()
trimmedSegments.stream()
.collect(
Collectors.toMap(
segment -> Bytes.wrap(segment.getId()), SegmentIdentifier::getName));

@ -15,9 +15,11 @@
package org.hyperledger.besu.plugin.services.storage.rocksdb.unsegmented;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import org.hyperledger.besu.kvstore.AbstractKeyValueStorageTest;
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem;
import org.hyperledger.besu.plugin.services.exception.StorageException;
import org.hyperledger.besu.plugin.services.storage.KeyValueStorage;
import org.hyperledger.besu.plugin.services.storage.SegmentIdentifier;
import org.hyperledger.besu.plugin.services.storage.rocksdb.RocksDBMetricsFactory;
@ -29,7 +31,9 @@ import org.hyperledger.besu.services.kvstore.SegmentedKeyValueStorage.Transactio
import org.hyperledger.besu.services.kvstore.SegmentedKeyValueStorageAdapter;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.function.Consumer;
@ -177,9 +181,91 @@ public class RocksDBColumnarKeyValueStorageTest extends AbstractKeyValueStorageT
store.close();
}
@Test
public void dbShouldIgnoreExperimentalSegmentsIfNotExisted() throws Exception {
final Path testPath = folder.newFolder().toPath();
// Create new db should ignore experimental column family
SegmentedKeyValueStorage<RocksDbSegmentIdentifier> store =
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
List.of(TestSegment.EXPERIMENTAL));
store.close();
// new db will be backward compatible with db without knowledge of experimental column family
store =
createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of());
store.close();
}
@Test
public void dbShouldNotIgnoreExperimentalSegmentsIfExisted() throws Exception {
final Path testPath = folder.newFolder().toPath();
// Create new db with experimental column family
SegmentedKeyValueStorage<RocksDbSegmentIdentifier> store =
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
List.of());
store.close();
// new db will not be backward compatible with db without knowledge of experimental column
// family
try {
createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of());
fail("DB without knowledge of experimental column family should fail");
} catch (StorageException e) {
assertThat(e.getMessage()).contains("Column families not opened");
}
// Even if the column family is marked as ignored, as long as it exists, it will not be ignored
// and the db opens normally
store =
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
List.of(TestSegment.EXPERIMENTAL));
store.close();
}
@Test
public void dbWillBeBackwardIncompatibleAfterExperimentalSegmentsAreAdded() throws Exception {
final Path testPath = folder.newFolder().toPath();
// Create new db should ignore experimental column family
SegmentedKeyValueStorage<RocksDbSegmentIdentifier> store =
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
List.of(TestSegment.EXPERIMENTAL));
store.close();
// new db will be backward compatible with db without knowledge of experimental column family
store =
createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of());
store.close();
// Create new db without ignoring experimental colum family will add column to db
store =
createSegmentedStore(
testPath,
Arrays.asList(TestSegment.FOO, TestSegment.BAR, TestSegment.EXPERIMENTAL),
List.of());
store.close();
// Now, the db will be backward incompatible with db without knowledge of experimental column
// family
try {
createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of());
fail("DB without knowledge of experimental column family should fail");
} catch (StorageException e) {
assertThat(e.getMessage()).contains("Column families not opened");
}
}
public enum TestSegment implements SegmentIdentifier {
FOO(new byte[] {1}),
BAR(new byte[] {2});
BAR(new byte[] {2}),
EXPERIMENTAL(new byte[] {3});
private final byte[] id;
private final String nameAsUtf8;
@ -209,6 +295,18 @@ public class RocksDBColumnarKeyValueStorageTest extends AbstractKeyValueStorageT
RocksDBMetricsFactory.PUBLIC_ROCKS_DB_METRICS);
}
private SegmentedKeyValueStorage<RocksDbSegmentIdentifier> createSegmentedStore(
final Path path,
final List<SegmentIdentifier> segments,
final List<SegmentIdentifier> ignorableSegments) {
return new RocksDBColumnarKeyValueStorage(
new RocksDBConfigurationBuilder().databaseDir(path).build(),
segments,
ignorableSegments,
new NoOpMetricsSystem(),
RocksDBMetricsFactory.PUBLIC_ROCKS_DB_METRICS);
}
@Override
protected KeyValueStorage createStore() throws Exception {
return new SegmentedKeyValueStorageAdapter<>(TestSegment.FOO, createSegmentedStore());

Loading…
Cancel
Save