Refactoring Rocksdb as a module (#1889)

Signed-off-by: Adrian Sutton <adrian.sutton@consensys.net>
pull/2/head
CJ Hare 5 years ago committed by GitHub
parent ccb939fb77
commit dec01db6f9
  1. 1
      acceptance-tests/build.gradle
  2. 3
      acceptance-tests/src/test/java/tech/pegasys/pantheon/tests/acceptance/dsl/node/ProcessPantheonNodeRunner.java
  3. 42
      acceptance-tests/src/test/java/tech/pegasys/pantheon/tests/acceptance/dsl/node/ThreadPantheonNodeRunner.java
  4. 33
      acceptance-tests/src/test/java/tech/pegasys/pantheon/tests/acceptance/dsl/privacy/PrivacyNode.java
  5. 5
      ethereum/core/build.gradle
  6. 10
      ethereum/core/src/jmh/java/tech/pegasys/pantheon/ethereum/vm/operations/OperationBenchmarkHelper.java
  7. 47
      ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/core/PrivacyParameters.java
  8. 21
      ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/privacy/PrivateStateKeyValueStorage.java
  9. 14
      ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/privacy/PrivateTransactionKeyValueStorage.java
  10. 2
      ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/storage/StorageProvider.java
  11. 28
      ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/storage/keyvalue/KeyValueSegmentIdentifier.java
  12. 14
      ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/storage/keyvalue/KeyValueStoragePrefixedKeyBlockchainStorage.java
  13. 2
      ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/storage/keyvalue/KeyValueStorageProvider.java
  14. 72
      ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/storage/keyvalue/KeyValueStorageProviderBuilder.java
  15. 152
      ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/storage/keyvalue/RocksDbStorageProvider.java
  16. 25
      ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/storage/keyvalue/WorldStateKeyValueStorage.java
  17. 23
      ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/storage/keyvalue/WorldStatePreimageKeyValueStorage.java
  18. 19
      ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/worldstate/MarkSweepPruner.java
  19. 2
      ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/worldstate/WorldStateStorage.java
  20. 2
      ethereum/core/src/test-support/java/tech/pegasys/pantheon/ethereum/core/ExecutionContextTestFixture.java
  21. 2
      ethereum/core/src/test-support/java/tech/pegasys/pantheon/ethereum/core/InMemoryStorageProvider.java
  22. 2
      ethereum/core/src/test/java/tech/pegasys/pantheon/ethereum/chain/DefaultBlockchainTest.java
  23. 104
      ethereum/core/src/test/java/tech/pegasys/pantheon/ethereum/storage/keyvalue/RocksDbStorageProviderTest.java
  24. 2
      ethereum/core/src/test/java/tech/pegasys/pantheon/ethereum/worldstate/DefaultMutableWorldStateTest.java
  25. 23
      ethereum/core/src/test/java/tech/pegasys/pantheon/ethereum/worldstate/MarkSweepPrunerTest.java
  26. 3
      ethereum/eth/build.gradle
  27. 41
      ethereum/eth/src/jmh/java/tech/pegasys/pantheon/ethereum/eth/sync/worldstate/WorldStateDownloaderBenchmark.java
  28. 1
      ethereum/retesteth/build.gradle
  29. 9
      ethereum/trie/src/main/java/tech/pegasys/pantheon/ethereum/trie/KeyValueMerkleStorage.java
  30. 2
      ethereum/trie/src/test/java/tech/pegasys/pantheon/ethereum/trie/AbstractMerklePatriciaTrieTest.java
  31. 8
      ethereum/trie/src/test/java/tech/pegasys/pantheon/ethereum/trie/StoredMerklePatriciaTrieTest.java
  32. 71
      ethereum/trie/src/test/java/tech/pegasys/pantheon/ethereum/trie/TrieNodeDecoderTest.java
  33. 5
      gradle/check-licenses.gradle
  34. 1
      gradle/versions.gradle
  35. 1
      metrics/rocksdb/build.gradle
  36. 1
      pantheon/build.gradle
  37. 1
      pantheon/src/main/java/tech/pegasys/pantheon/cli/DefaultCommandValues.java
  38. 84
      pantheon/src/main/java/tech/pegasys/pantheon/cli/PantheonCommand.java
  39. 2
      pantheon/src/main/java/tech/pegasys/pantheon/cli/subcommands/blocks/BlocksSubCommand.java
  40. 30
      pantheon/src/main/java/tech/pegasys/pantheon/controller/PantheonControllerBuilder.java
  41. 38
      pantheon/src/main/java/tech/pegasys/pantheon/services/PantheonConfigurationImpl.java
  42. 52
      pantheon/src/main/java/tech/pegasys/pantheon/services/PantheonPluginContextImpl.java
  43. 63
      pantheon/src/main/java/tech/pegasys/pantheon/services/StorageServiceImpl.java
  44. 32
      pantheon/src/test/java/tech/pegasys/pantheon/PrivacyTest.java
  45. 31
      pantheon/src/test/java/tech/pegasys/pantheon/RunnerTest.java
  46. 35
      pantheon/src/test/java/tech/pegasys/pantheon/cli/CommandTestAbstract.java
  47. 76
      pantheon/src/test/java/tech/pegasys/pantheon/cli/PantheonCommandTest.java
  48. 51
      pantheon/src/test/java/tech/pegasys/pantheon/cli/options/RocksDBOptionsTest.java
  49. 3
      pantheon/src/test/resources/everything_config.toml
  50. 2
      plugin-api/build.gradle
  51. 8
      plugin-api/src/main/java/tech/pegasys/pantheon/plugin/services/storage/KeyValueStorageFactory.java
  52. 14
      plugins/build.gradle
  53. 26
      plugins/rocksdb/build.gradle
  54. 44
      plugins/rocksdb/src/main/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/RocksDBKeyValuePrivacyStorageFactory.java
  55. 151
      plugins/rocksdb/src/main/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/RocksDBKeyValueStorageFactory.java
  56. 14
      plugins/rocksdb/src/main/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/RocksDBMetrics.java
  57. 117
      plugins/rocksdb/src/main/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/RocksDBPlugin.java
  58. 2
      plugins/rocksdb/src/main/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/RocksDbUtil.java
  59. 10
      plugins/rocksdb/src/main/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/configuration/DatabaseMetadata.java
  60. 51
      plugins/rocksdb/src/main/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/configuration/RocksDBCLIOptions.java
  61. 64
      plugins/rocksdb/src/main/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/configuration/RocksDBConfiguration.java
  62. 78
      plugins/rocksdb/src/main/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/configuration/RocksDBConfigurationBuilder.java
  63. 48
      plugins/rocksdb/src/main/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/configuration/RocksDBFactoryConfiguration.java
  64. 99
      plugins/rocksdb/src/main/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/segmented/RocksDBColumnarKeyValueStorage.java
  65. 144
      plugins/rocksdb/src/main/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/unsegmented/RocksDBKeyValueStorage.java
  66. 82
      plugins/rocksdb/src/main/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/unsegmented/RocksDBTransaction.java
  67. 115
      plugins/rocksdb/src/test/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/RocksDBCLIOptionsTest.java
  68. 141
      plugins/rocksdb/src/test/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/RocksDBKeyValueStorageFactoryTest.java
  69. 33
      plugins/rocksdb/src/test/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/RocksDBMetricsTest.java
  70. 130
      plugins/rocksdb/src/test/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/segmented/RocksDBKeyValueStorageTest.java
  71. 72
      plugins/rocksdb/src/test/java/tech/pegasys/pantheon/plugin/services/storage/rocksdb/unsegmented/RocksDBColumnarKeyValueStorageTest.java
  72. 3
      services/kvstore/build.gradle
  73. 55
      services/kvstore/src/main/java/tech/pegasys/pantheon/services/kvstore/InMemoryKeyValueStorage.java
  74. 126
      services/kvstore/src/main/java/tech/pegasys/pantheon/services/kvstore/KeyValueStorage.java
  75. 56
      services/kvstore/src/main/java/tech/pegasys/pantheon/services/kvstore/KeyValueStorageTransactionTransitionValidatorDecorator.java
  76. 53
      services/kvstore/src/main/java/tech/pegasys/pantheon/services/kvstore/LimitedInMemoryKeyValueStorage.java
  77. 128
      services/kvstore/src/main/java/tech/pegasys/pantheon/services/kvstore/RocksDbConfiguration.java
  78. 68
      services/kvstore/src/main/java/tech/pegasys/pantheon/services/kvstore/SegmentedKeyValueStorage.java
  79. 33
      services/kvstore/src/main/java/tech/pegasys/pantheon/services/kvstore/SegmentedKeyValueStorageAdapter.java
  80. 56
      services/kvstore/src/main/java/tech/pegasys/pantheon/services/kvstore/SegmentedKeyValueStorageTransactionTransitionValidatorDecorator.java
  81. 384
      services/kvstore/src/test/java/tech/pegasys/pantheon/services/kvstore/AbstractKeyValueStorageTest.java
  82. 5
      services/kvstore/src/test/java/tech/pegasys/pantheon/services/kvstore/InMemoryKeyValueStorageTest.java
  83. 16
      services/kvstore/src/test/java/tech/pegasys/pantheon/services/kvstore/LimitedInMemoryKeyValueStorageTest.java
  84. 1
      services/tasks/build.gradle
  85. 2
      settings.gradle
  86. 5
      testutil/build.gradle
  87. 398
      testutil/src/main/java/tech/pegasys/pantheon/kvstore/AbstractKeyValueStorageTest.java

@ -33,6 +33,7 @@ dependencies {
testImplementation project(':metrics:core')
testImplementation project(':pantheon')
testImplementation project(path: ':pantheon', configuration: 'testArtifacts')
testImplementation project(':plugins:rocksdb')
testImplementation project(':plugin-api')
testImplementation project(':services:kvstore')
testImplementation project(':testutil')

@ -228,6 +228,9 @@ public class ProcessPantheonNodeRunner implements PantheonNodeRunner {
});
params.addAll(node.getExtraCLIOptions());
params.add("--key-value-storage");
params.add("rocksdb");
LOG.info("Creating pantheon process with params {}", params);
final ProcessBuilder processBuilder =
new ProcessBuilder(params)

@ -26,14 +26,19 @@ import tech.pegasys.pantheon.ethereum.eth.transactions.TransactionPoolConfigurat
import tech.pegasys.pantheon.ethereum.graphql.GraphQLConfiguration;
import tech.pegasys.pantheon.ethereum.p2p.peers.EnodeURL;
import tech.pegasys.pantheon.ethereum.permissioning.PermissioningConfiguration;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueStorageProvider;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueStorageProviderBuilder;
import tech.pegasys.pantheon.metrics.ObservableMetricsSystem;
import tech.pegasys.pantheon.metrics.prometheus.PrometheusMetricsSystem;
import tech.pegasys.pantheon.plugin.services.PantheonConfiguration;
import tech.pegasys.pantheon.plugin.services.PantheonEvents;
import tech.pegasys.pantheon.plugin.services.PicoCLIOptions;
import tech.pegasys.pantheon.plugin.services.StorageService;
import tech.pegasys.pantheon.services.PantheonConfigurationImpl;
import tech.pegasys.pantheon.services.PantheonEventsImpl;
import tech.pegasys.pantheon.services.PantheonPluginContextImpl;
import tech.pegasys.pantheon.services.PicoCLIOptionsImpl;
import tech.pegasys.pantheon.services.kvstore.RocksDbConfiguration;
import tech.pegasys.pantheon.services.StorageServiceImpl;
import java.io.File;
import java.io.IOException;
@ -63,8 +68,15 @@ public class ThreadPantheonNodeRunner implements PantheonNodeRunner {
private final Map<Node, PantheonPluginContextImpl> pantheonPluginContextMap = new HashMap<>();
private PantheonPluginContextImpl buildPluginContext(final PantheonNode node) {
private PantheonPluginContextImpl buildPluginContext(
final PantheonNode node,
final StorageServiceImpl storageService,
final PantheonConfiguration commonPluginConfiguration) {
final CommandLine commandLine = new CommandLine(CommandSpec.create());
final PantheonPluginContextImpl pantheonPluginContext = new PantheonPluginContextImpl();
pantheonPluginContext.addService(StorageService.class, storageService);
pantheonPluginContext.addService(PicoCLIOptions.class, new PicoCLIOptionsImpl(commandLine));
final Path pluginsPath = node.homeDirectory().resolve("plugins");
final File pluginsDirFile = pluginsPath.toFile();
if (!pluginsDirFile.isDirectory()) {
@ -73,22 +85,26 @@ public class ThreadPantheonNodeRunner implements PantheonNodeRunner {
}
System.setProperty("pantheon.plugins.dir", pluginsPath.toString());
pantheonPluginContext.registerPlugins(pluginsPath);
commandLine.parseArgs(node.getConfiguration().getExtraCLIOptions().toArray(new String[0]));
pantheonPluginContext.addService(PantheonConfiguration.class, commonPluginConfiguration);
return pantheonPluginContext;
}
@Override
@SuppressWarnings("UnstableApiUsage")
public void startNode(final PantheonNode node) {
if (nodeExecutor == null || nodeExecutor.isShutdown()) {
nodeExecutor = Executors.newCachedThreadPool();
}
final CommandLine commandLine = new CommandLine(CommandSpec.create());
final StorageServiceImpl storageService = new StorageServiceImpl();
final PantheonConfiguration commonPluginConfiguration =
new PantheonConfigurationImpl(Files.createTempDir().toPath());
final PantheonPluginContextImpl pantheonPluginContext =
pantheonPluginContextMap.computeIfAbsent(node, n -> buildPluginContext(node));
pantheonPluginContext.addService(PicoCLIOptions.class, new PicoCLIOptionsImpl(commandLine));
commandLine.parseArgs(node.getConfiguration().getExtraCLIOptions().toArray(new String[0]));
pantheonPluginContextMap.computeIfAbsent(
node, n -> buildPluginContext(node, storageService, commonPluginConfiguration));
final ObservableMetricsSystem metricsSystem =
PrometheusMetricsSystem.init(node.getMetricsConfiguration());
@ -103,7 +119,13 @@ public class ThreadPantheonNodeRunner implements PantheonNodeRunner {
final EthNetworkConfig ethNetworkConfig = networkConfigBuilder.build();
final PantheonControllerBuilder<?> builder =
new PantheonController.Builder().fromEthNetworkConfig(ethNetworkConfig);
final Path tempDir = Files.createTempDir().toPath();
final KeyValueStorageProvider storageProvider =
new KeyValueStorageProviderBuilder()
.withStorageFactory(storageService.getByName("rocksdb"))
.withCommonConfiguration(commonPluginConfiguration)
.withMetricsSystem(metricsSystem)
.build();
final PantheonController<?> pantheonController;
try {
@ -116,10 +138,10 @@ public class ThreadPantheonNodeRunner implements PantheonNodeRunner {
.nodePrivateKeyFile(KeyPairUtil.getDefaultKeyFile(node.homeDirectory()))
.metricsSystem(metricsSystem)
.transactionPoolConfiguration(TransactionPoolConfiguration.builder().build())
.rocksDbConfiguration(RocksDbConfiguration.builder().databaseDir(tempDir).build())
.ethProtocolConfiguration(EthProtocolConfiguration.defaultConfig())
.clock(Clock.systemUTC())
.isRevertReasonEnabled(node.isRevertReasonEnabled())
.storageProvider(storageProvider)
.build();
} catch (final IOException e) {
throw new RuntimeException("Error building PantheonController", e);

@ -21,6 +21,13 @@ import tech.pegasys.pantheon.enclave.types.SendRequest;
import tech.pegasys.pantheon.enclave.types.SendRequestLegacy;
import tech.pegasys.pantheon.ethereum.core.Address;
import tech.pegasys.pantheon.ethereum.core.PrivacyParameters;
import tech.pegasys.pantheon.ethereum.storage.StorageProvider;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueSegmentIdentifier;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueStorageProviderBuilder;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.RocksDBKeyValuePrivacyStorageFactory;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBFactoryConfiguration;
import tech.pegasys.pantheon.services.PantheonConfigurationImpl;
import tech.pegasys.pantheon.tests.acceptance.dsl.condition.Condition;
import tech.pegasys.pantheon.tests.acceptance.dsl.node.PantheonNode;
import tech.pegasys.pantheon.tests.acceptance.dsl.node.PantheonNodeRunner;
@ -44,7 +51,12 @@ import org.apache.logging.log4j.Logger;
import org.awaitility.Awaitility;
public class PrivacyNode implements AutoCloseable {
private static final Logger LOG = LogManager.getLogger();
private static final int MAX_OPEN_FILES = 1024;
private static final long CACHE_CAPACITY = 8388608;
private static final int MAX_BACKGROUND_COMPACTIONS = 4;
private static final int BACKGROUND_THREAD_COUNT = 4;
private final OrionTestHarness orion;
private final PantheonNode pantheon;
@ -129,13 +141,16 @@ public class PrivacyNode implements AutoCloseable {
orion.start();
final PrivacyParameters privacyParameters;
try {
final Path dataDir = Files.createTempDirectory("acctest-privacy");
privacyParameters =
new PrivacyParameters.Builder()
.setEnabled(true)
.setEnclaveUrl(orion.clientUrl())
.setEnclavePublicKeyUsingFile(orion.getConfig().publicKeys().get(0).toFile())
.setDataDir(Files.createTempDirectory("acctest-privacy"))
.setStorageProvider(createKeyValueStorageProvider(dataDir))
.setPrivateKeyPath(KeyPairUtil.getDefaultKeyFile(pantheon.homeDirectory()).toPath())
.build();
} catch (IOException e) {
@ -188,4 +203,20 @@ public class PrivacyNode implements AutoCloseable {
public NodeConfiguration getConfiguration() {
return pantheon.getConfiguration();
}
private StorageProvider createKeyValueStorageProvider(final Path dbLocation) {
return new KeyValueStorageProviderBuilder()
.withStorageFactory(
new RocksDBKeyValuePrivacyStorageFactory(
() ->
new RocksDBFactoryConfiguration(
MAX_OPEN_FILES,
MAX_BACKGROUND_COMPACTIONS,
BACKGROUND_THREAD_COUNT,
CACHE_CAPACITY),
Arrays.asList(KeyValueSegmentIdentifier.values())))
.withCommonConfiguration(new PantheonConfigurationImpl(dbLocation))
.withMetricsSystem(new NoOpMetricsSystem())
.build();
}
}

@ -35,6 +35,9 @@ dependencies {
implementation project(':plugin-api')
implementation project(':services:kvstore')
// Runtime dependency gets the Plugin included in the distribution
runtime project(":plugins:rocksdb")
implementation 'com.fasterxml.jackson.core:jackson-databind'
implementation 'com.google.guava:guava'
implementation 'io.vertx:vertx-core'
@ -74,6 +77,8 @@ dependencies {
jmhImplementation project(':ethereum:trie')
jmhImplementation project(':metrics:core')
jmhImplementation project(':services:kvstore')
jmhImplementation project(':plugin-api')
jmhImplementation project(':plugins:rocksdb')
jmhImplementation project(':util')
jmhImplementation 'com.google.guava:guava'

@ -22,9 +22,9 @@ import tech.pegasys.pantheon.ethereum.core.ExecutionContextTestFixture;
import tech.pegasys.pantheon.ethereum.core.MessageFrameTestFixture;
import tech.pegasys.pantheon.ethereum.vm.MessageFrame;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.RocksDbConfiguration;
import tech.pegasys.pantheon.services.kvstore.RocksDbKeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBConfigurationBuilder;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.unsegmented.RocksDBKeyValueStorage;
import tech.pegasys.pantheon.util.uint.UInt256;
import java.io.IOException;
@ -52,8 +52,8 @@ public class OperationBenchmarkHelper {
public static OperationBenchmarkHelper create() throws IOException {
final Path storageDirectory = Files.createTempDirectory("benchmark");
final KeyValueStorage keyValueStorage =
RocksDbKeyValueStorage.create(
RocksDbConfiguration.builder().databaseDir(storageDirectory).build(),
new RocksDBKeyValueStorage(
new RocksDBConfigurationBuilder().databaseDir(storageDirectory).build(),
new NoOpMetricsSystem());
final ExecutionContextTestFixture executionContext =

@ -19,13 +19,9 @@ import tech.pegasys.pantheon.crypto.SECP256K1.KeyPair;
import tech.pegasys.pantheon.ethereum.privacy.PrivateStateStorage;
import tech.pegasys.pantheon.ethereum.privacy.PrivateTransactionStorage;
import tech.pegasys.pantheon.ethereum.storage.StorageProvider;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.RocksDbStorageProvider;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStateArchive;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStatePreimageStorage;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStateStorage;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.services.kvstore.RocksDbConfiguration;
import java.io.File;
import java.io.IOException;
@ -36,6 +32,7 @@ import java.util.Optional;
import com.google.common.io.Files;
public class PrivacyParameters {
public static final URI DEFAULT_ENCLAVE_URL = URI.create("http://localhost:8888");
public static final PrivacyParameters DEFAULT = new PrivacyParameters();
@ -138,16 +135,14 @@ public class PrivacyParameters {
}
public static class Builder {
private final String PRIVATE_DATABASE_PATH = "private";
private boolean enabled;
private URI enclaveUrl;
private Integer privacyAddress = Address.PRIVACY;
private MetricsSystem metricsSystem = new NoOpMetricsSystem();
private Path dataDir;
private File enclavePublicKeyFile;
private String enclavePublicKey;
private Path privateKeyPath;
private StorageProvider storageProvider;
public Builder setPrivacyAddress(final Integer privacyAddress) {
this.privacyAddress = privacyAddress;
@ -164,13 +159,8 @@ public class PrivacyParameters {
return this;
}
public Builder setMetricsSystem(final MetricsSystem metricsSystem) {
this.metricsSystem = metricsSystem;
return this;
}
public Builder setDataDir(final Path dataDir) {
this.dataDir = dataDir;
public Builder setStorageProvider(final StorageProvider privateStorageProvider) {
this.storageProvider = privateStorageProvider;
return this;
}
@ -180,32 +170,23 @@ public class PrivacyParameters {
}
public PrivacyParameters build() throws IOException {
PrivacyParameters config = new PrivacyParameters();
final PrivacyParameters config = new PrivacyParameters();
if (enabled) {
Path privateDbPath = dataDir.resolve(PRIVATE_DATABASE_PATH);
StorageProvider privateStorageProvider =
RocksDbStorageProvider.create(
RocksDbConfiguration.builder()
.databaseDir(privateDbPath)
.label("private_state")
.build(),
metricsSystem);
WorldStateStorage privateWorldStateStorage =
privateStorageProvider.createWorldStateStorage();
WorldStatePreimageStorage privatePreimageStorage =
privateStorageProvider.createWorldStatePreimageStorage();
WorldStateArchive privateWorldStateArchive =
final WorldStateStorage privateWorldStateStorage =
storageProvider.createWorldStateStorage();
final WorldStatePreimageStorage privatePreimageStorage =
storageProvider.createWorldStatePreimageStorage();
final WorldStateArchive privateWorldStateArchive =
new WorldStateArchive(privateWorldStateStorage, privatePreimageStorage);
PrivateTransactionStorage privateTransactionStorage =
privateStorageProvider.createPrivateTransactionStorage();
PrivateStateStorage privateStateStorage =
privateStorageProvider.createPrivateStateStorage();
final PrivateTransactionStorage privateTransactionStorage =
storageProvider.createPrivateTransactionStorage();
final PrivateStateStorage privateStateStorage = storageProvider.createPrivateStateStorage();
config.setPrivateWorldStateArchive(privateWorldStateArchive);
config.setEnclavePublicKey(enclavePublicKey);
config.setEnclavePublicKeyFile(enclavePublicKeyFile);
config.setPrivateStorageProvider(privateStorageProvider);
config.setPrivateStorageProvider(storageProvider);
config.setPrivateTransactionStorage(privateTransactionStorage);
config.setPrivateStateStorage(privateStateStorage);
if (privateKeyPath != null) {

@ -13,7 +13,8 @@
package tech.pegasys.pantheon.ethereum.privacy;
import tech.pegasys.pantheon.ethereum.core.Hash;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.util.bytes.Bytes32;
import tech.pegasys.pantheon.util.bytes.BytesValue;
@ -29,10 +30,13 @@ public class PrivateStateKeyValueStorage implements PrivateStateStorage {
@Override
public Optional<Hash> getPrivateAccountState(final BytesValue privacyId) {
if (keyValueStorage.get(privacyId).isPresent())
return Optional.of(
Hash.wrap(Bytes32.wrap(keyValueStorage.get(privacyId).get().extractArray())));
else return Optional.empty();
final byte[] id = privacyId.getArrayUnsafe();
if (keyValueStorage.get(id).isPresent()) {
return Optional.of(Hash.wrap(Bytes32.wrap(keyValueStorage.get(id).get())));
} else {
return Optional.empty();
}
}
@Override
@ -46,16 +50,17 @@ public class PrivateStateKeyValueStorage implements PrivateStateStorage {
}
public static class Updater implements PrivateStateStorage.Updater {
private final KeyValueStorage.Transaction transaction;
private Updater(final KeyValueStorage.Transaction transaction) {
private final KeyValueStorageTransaction transaction;
private Updater(final KeyValueStorageTransaction transaction) {
this.transaction = transaction;
}
@Override
public PrivateStateStorage.Updater putPrivateAccountState(
final BytesValue privacyId, final Hash privateStateHash) {
transaction.put(privacyId, BytesValue.wrap(privateStateHash.extractArray()));
transaction.put(privacyId.getArrayUnsafe(), privateStateHash.extractArray());
return this;
}

@ -17,7 +17,8 @@ import static java.nio.charset.StandardCharsets.UTF_8;
import tech.pegasys.pantheon.ethereum.core.Log;
import tech.pegasys.pantheon.ethereum.core.LogSeries;
import tech.pegasys.pantheon.ethereum.rlp.RLP;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.util.bytes.Bytes32;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import tech.pegasys.pantheon.util.bytes.BytesValues;
@ -57,7 +58,9 @@ public class PrivateTransactionKeyValueStorage implements PrivateTransactionStor
}
private Optional<BytesValue> get(final BytesValue key, final BytesValue keySuffix) {
return keyValueStorage.get(BytesValues.concatenate(key, keySuffix));
return keyValueStorage
.get(BytesValues.concatenate(key, keySuffix).getArrayUnsafe())
.map(BytesValue::wrap);
}
@Override
@ -67,9 +70,9 @@ public class PrivateTransactionKeyValueStorage implements PrivateTransactionStor
public static class Updater implements PrivateTransactionStorage.Updater {
private final KeyValueStorage.Transaction transaction;
private final KeyValueStorageTransaction transaction;
private Updater(final KeyValueStorage.Transaction transaction) {
private Updater(final KeyValueStorageTransaction transaction) {
this.transaction = transaction;
}
@ -88,7 +91,8 @@ public class PrivateTransactionKeyValueStorage implements PrivateTransactionStor
}
private void set(final BytesValue key, final BytesValue keySuffix, final BytesValue value) {
transaction.put(BytesValues.concatenate(key, keySuffix), value);
transaction.put(
BytesValues.concatenate(key, keySuffix).getArrayUnsafe(), value.getArrayUnsafe());
}
@Override

@ -18,7 +18,7 @@ import tech.pegasys.pantheon.ethereum.privacy.PrivateStateStorage;
import tech.pegasys.pantheon.ethereum.privacy.PrivateTransactionStorage;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStatePreimageStorage;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStateStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import java.io.Closeable;

@ -0,0 +1,28 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.ethereum.storage.keyvalue;
import tech.pegasys.pantheon.plugin.services.storage.SegmentIdentifier;
public enum KeyValueSegmentIdentifier implements SegmentIdentifier {
BLOCKCHAIN,
WORLD_STATE,
PRIVATE_TRANSACTIONS,
PRIVATE_STATE,
PRUNING_STATE;
@Override
public String getName() {
return name();
}
}

@ -20,7 +20,8 @@ import tech.pegasys.pantheon.ethereum.core.BlockHeaderFunctions;
import tech.pegasys.pantheon.ethereum.core.Hash;
import tech.pegasys.pantheon.ethereum.core.TransactionReceipt;
import tech.pegasys.pantheon.ethereum.rlp.RLP;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.util.bytes.Bytes32;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import tech.pegasys.pantheon.util.bytes.BytesValues;
@ -117,14 +118,14 @@ public class KeyValueStoragePrefixedKeyBlockchainStorage implements BlockchainSt
}
private Optional<BytesValue> get(final BytesValue prefix, final BytesValue key) {
return storage.get(BytesValues.concatenate(prefix, key));
return storage.get(BytesValues.concatenate(prefix, key).getArrayUnsafe()).map(BytesValue::wrap);
}
public static class Updater implements BlockchainStorage.Updater {
private final KeyValueStorage.Transaction transaction;
private final KeyValueStorageTransaction transaction;
private Updater(final KeyValueStorage.Transaction transaction) {
private Updater(final KeyValueStorageTransaction transaction) {
this.transaction = transaction;
}
@ -193,11 +194,12 @@ public class KeyValueStoragePrefixedKeyBlockchainStorage implements BlockchainSt
}
private void set(final BytesValue prefix, final BytesValue key, final BytesValue value) {
transaction.put(BytesValues.concatenate(prefix, key), value);
transaction.put(
BytesValues.concatenate(prefix, key).getArrayUnsafe(), value.getArrayUnsafe());
}
private void remove(final BytesValue prefix, final BytesValue key) {
transaction.remove(BytesValues.concatenate(prefix, key));
transaction.remove(BytesValues.concatenate(prefix, key).getArrayUnsafe());
}
private BytesValue rlpEncode(final List<TransactionReceipt> receipts) {

@ -22,7 +22,7 @@ import tech.pegasys.pantheon.ethereum.privacy.PrivateTransactionStorage;
import tech.pegasys.pantheon.ethereum.storage.StorageProvider;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStatePreimageStorage;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStateStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import java.io.IOException;

@ -0,0 +1,72 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.ethereum.storage.keyvalue;
import static com.google.common.base.Preconditions.checkNotNull;
import static tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.BLOCKCHAIN;
import static tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.PRIVATE_STATE;
import static tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.PRIVATE_TRANSACTIONS;
import static tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.PRUNING_STATE;
import static tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.WORLD_STATE;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.plugin.services.PantheonConfiguration;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageFactory;
import tech.pegasys.pantheon.services.kvstore.LimitedInMemoryKeyValueStorage;
public class KeyValueStorageProviderBuilder {
private static final long DEFAULT_WORLD_STATE_PRE_IMAGE_CACHE_SIZE = 5_000L;
private KeyValueStorageFactory storageFactory;
private PantheonConfiguration commonConfiguration;
private MetricsSystem metricsSystem;
public KeyValueStorageProviderBuilder withStorageFactory(
final KeyValueStorageFactory storageFactory) {
this.storageFactory = storageFactory;
return this;
}
public KeyValueStorageProviderBuilder withCommonConfiguration(
final PantheonConfiguration commonConfiguration) {
this.commonConfiguration = commonConfiguration;
return this;
}
public KeyValueStorageProviderBuilder withMetricsSystem(final MetricsSystem metricsSystem) {
this.metricsSystem = metricsSystem;
return this;
}
public KeyValueStorageProvider build() {
checkNotNull(storageFactory, "Cannot build a storage provider without a storage factory.");
checkNotNull(
commonConfiguration,
"Cannot build a storage provider without the plugin common configuration.");
checkNotNull(metricsSystem, "Cannot build a storage provider without a metrics system.");
final KeyValueStorage worldStatePreImageStorage =
new LimitedInMemoryKeyValueStorage(DEFAULT_WORLD_STATE_PRE_IMAGE_CACHE_SIZE);
return new KeyValueStorageProvider(
storageFactory.create(BLOCKCHAIN, commonConfiguration, metricsSystem),
storageFactory.create(WORLD_STATE, commonConfiguration, metricsSystem),
worldStatePreImageStorage,
storageFactory.create(PRIVATE_TRANSACTIONS, commonConfiguration, metricsSystem),
storageFactory.create(PRIVATE_STATE, commonConfiguration, metricsSystem),
storageFactory.create(PRUNING_STATE, commonConfiguration, metricsSystem),
storageFactory.isSegmentIsolationSupported());
}
}

@ -1,152 +0,0 @@
/*
* Copyright 2018 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.ethereum.storage.keyvalue;
import static java.util.AbstractMap.SimpleEntry;
import static java.util.Arrays.asList;
import tech.pegasys.pantheon.ethereum.storage.StorageProvider;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.services.kvstore.ColumnarRocksDbKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.LimitedInMemoryKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.RocksDbConfiguration;
import tech.pegasys.pantheon.services.kvstore.RocksDbKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorage.Segment;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorageAdapter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
import java.util.Optional;
import java.util.TreeMap;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class RocksDbStorageProvider {
public static long DEFAULT_WORLD_STATE_PREIMAGE_CACHE_SIZE = 5_000L;
private static final Logger LOG = LogManager.getLogger();
public static final int DEFAULT_VERSION = 1;
/** This key is the version and the value is the function used to create or load the database. */
private static final TreeMap<Integer, StorageProviderFunction> PROVIDERS_BY_VERSION =
new TreeMap<>(
Map.ofEntries(
new SimpleEntry<>(0, RocksDbStorageProvider::ofUnsegmented),
new SimpleEntry<>(1, RocksDbStorageProvider::ofSegmented)));
public static StorageProvider create(
final RocksDbConfiguration rocksDbConfiguration, final MetricsSystem metricsSystem)
throws IOException {
return create(rocksDbConfiguration, metricsSystem, DEFAULT_WORLD_STATE_PREIMAGE_CACHE_SIZE);
}
public static StorageProvider create(
final RocksDbConfiguration rocksDbConfiguration,
final MetricsSystem metricsSystem,
final long worldStatePreimageCacheSize)
throws IOException {
final Path databaseDir = rocksDbConfiguration.getDatabaseDir();
final boolean databaseExists = databaseDir.resolve("IDENTITY").toFile().exists();
final int databaseVersion;
if (databaseExists) {
databaseVersion = DatabaseMetadata.fromDirectory(databaseDir).getVersion();
LOG.info("Existing database detected at {}. Version {}", databaseDir, databaseVersion);
} else {
databaseVersion = DEFAULT_VERSION;
LOG.info(
"No existing database detected at {}. Using version {}", databaseDir, databaseVersion);
Files.createDirectories(databaseDir);
new DatabaseMetadata(databaseVersion).writeToDirectory(databaseDir);
}
final StorageProviderFunction providerFunction =
Optional.ofNullable(PROVIDERS_BY_VERSION.get(databaseVersion))
.orElseThrow(
() ->
new IllegalStateException(
String.format(
"Invalid database version %d. Valid versions are: %s. Default version is %d",
databaseVersion,
PROVIDERS_BY_VERSION.navigableKeySet().toString(),
DEFAULT_VERSION)));
return providerFunction.apply(rocksDbConfiguration, metricsSystem, worldStatePreimageCacheSize);
}
private static StorageProvider ofUnsegmented(
final RocksDbConfiguration rocksDbConfiguration,
final MetricsSystem metricsSystem,
final long worldStatePreimageCacheSize) {
final KeyValueStorage kv = RocksDbKeyValueStorage.create(rocksDbConfiguration, metricsSystem);
final KeyValueStorage preimageKv =
new LimitedInMemoryKeyValueStorage(worldStatePreimageCacheSize);
return new KeyValueStorageProvider(kv, kv, preimageKv, kv, kv, kv, false);
}
private static StorageProvider ofSegmented(
final RocksDbConfiguration rocksDbConfiguration,
final MetricsSystem metricsSystem,
final long worldStatePreimageCacheSize) {
final SegmentedKeyValueStorage<?> columnarStorage =
ColumnarRocksDbKeyValueStorage.create(
rocksDbConfiguration, asList(RocksDbSegment.values()), metricsSystem);
final KeyValueStorage preimageStorage =
new LimitedInMemoryKeyValueStorage(worldStatePreimageCacheSize);
return new KeyValueStorageProvider(
new SegmentedKeyValueStorageAdapter<>(RocksDbSegment.BLOCKCHAIN, columnarStorage),
new SegmentedKeyValueStorageAdapter<>(RocksDbSegment.WORLD_STATE, columnarStorage),
preimageStorage,
new SegmentedKeyValueStorageAdapter<>(RocksDbSegment.PRIVATE_TRANSACTIONS, columnarStorage),
new SegmentedKeyValueStorageAdapter<>(RocksDbSegment.PRIVATE_STATE, columnarStorage),
new SegmentedKeyValueStorageAdapter<>(RocksDbSegment.PRUNING_STATE, columnarStorage),
true);
}
private enum RocksDbSegment implements Segment {
BLOCKCHAIN((byte) 1),
WORLD_STATE((byte) 2),
PRIVATE_TRANSACTIONS((byte) 3),
PRIVATE_STATE((byte) 4),
PRUNING_STATE((byte) 5);
private final byte[] id;
RocksDbSegment(final byte... id) {
this.id = id;
}
@Override
public String getName() {
return name();
}
@Override
public byte[] getId() {
return id;
}
}
private interface StorageProviderFunction {
StorageProvider apply(
final RocksDbConfiguration rocksDbConfiguration,
final MetricsSystem metricsSystem,
final long worldStatePreimageCacheSize)
throws IOException;
}
}

@ -15,7 +15,8 @@ package tech.pegasys.pantheon.ethereum.storage.keyvalue;
import tech.pegasys.pantheon.ethereum.core.Hash;
import tech.pegasys.pantheon.ethereum.trie.MerklePatriciaTrie;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStateStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.util.Subscribers;
import tech.pegasys.pantheon.util.bytes.Bytes32;
import tech.pegasys.pantheon.util.bytes.BytesValue;
@ -39,7 +40,7 @@ public class WorldStateKeyValueStorage implements WorldStateStorage {
if (codeHash.equals(Hash.EMPTY)) {
return Optional.of(BytesValue.EMPTY);
} else {
return keyValueStorage.get(codeHash);
return keyValueStorage.get(codeHash.getArrayUnsafe()).map(BytesValue::wrap);
}
}
@ -57,7 +58,7 @@ public class WorldStateKeyValueStorage implements WorldStateStorage {
if (nodeHash.equals(MerklePatriciaTrie.EMPTY_TRIE_NODE_HASH)) {
return Optional.of(MerklePatriciaTrie.EMPTY_TRIE_NODE);
} else {
return keyValueStorage.get(nodeHash);
return keyValueStorage.get(nodeHash.getArrayUnsafe()).map(BytesValue::wrap);
}
}
@ -68,7 +69,7 @@ public class WorldStateKeyValueStorage implements WorldStateStorage {
} else if (hash.equals(Hash.EMPTY)) {
return Optional.of(BytesValue.EMPTY);
} else {
return keyValueStorage.get(hash);
return keyValueStorage.get(hash.getArrayUnsafe()).map(BytesValue::wrap);
}
}
@ -83,8 +84,8 @@ public class WorldStateKeyValueStorage implements WorldStateStorage {
}
@Override
public long prune(final Predicate<BytesValue> inUseCheck) {
return keyValueStorage.removeUnless(inUseCheck);
public long prune(final Predicate<byte[]> inUseCheck) {
return keyValueStorage.removeAllKeysUnless(inUseCheck);
}
@Override
@ -99,12 +100,12 @@ public class WorldStateKeyValueStorage implements WorldStateStorage {
public static class Updater implements WorldStateStorage.Updater {
private final KeyValueStorage.Transaction transaction;
private final KeyValueStorageTransaction transaction;
private final Subscribers<NodesAddedListener> nodeAddedListeners;
private final List<Bytes32> addedNodes = new ArrayList<>();
public Updater(
final KeyValueStorage.Transaction transaction,
final KeyValueStorageTransaction transaction,
final Subscribers<NodesAddedListener> nodeAddedListeners) {
this.transaction = transaction;
this.nodeAddedListeners = nodeAddedListeners;
@ -112,7 +113,7 @@ public class WorldStateKeyValueStorage implements WorldStateStorage {
@Override
public Updater removeAccountStateTrieNode(final Bytes32 nodeHash) {
transaction.remove(nodeHash);
transaction.remove(nodeHash.getArrayUnsafe());
return this;
}
@ -124,7 +125,7 @@ public class WorldStateKeyValueStorage implements WorldStateStorage {
}
addedNodes.add(codeHash);
transaction.put(codeHash, code);
transaction.put(codeHash.getArrayUnsafe(), code.getArrayUnsafe());
return this;
}
@ -135,7 +136,7 @@ public class WorldStateKeyValueStorage implements WorldStateStorage {
return this;
}
addedNodes.add(nodeHash);
transaction.put(nodeHash, node);
transaction.put(nodeHash.getArrayUnsafe(), node.getArrayUnsafe());
return this;
}
@ -146,7 +147,7 @@ public class WorldStateKeyValueStorage implements WorldStateStorage {
return this;
}
addedNodes.add(nodeHash);
transaction.put(nodeHash, node);
transaction.put(nodeHash.getArrayUnsafe(), node.getArrayUnsafe());
return this;
}

@ -14,9 +14,10 @@ package tech.pegasys.pantheon.ethereum.storage.keyvalue;
import tech.pegasys.pantheon.ethereum.core.Address;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStatePreimageStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage.Transaction;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.util.bytes.Bytes32;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import tech.pegasys.pantheon.util.uint.UInt256;
import java.util.Optional;
@ -31,8 +32,8 @@ public class WorldStatePreimageKeyValueStorage implements WorldStatePreimageStor
@Override
public Optional<UInt256> getStorageTrieKeyPreimage(final Bytes32 trieKey) {
return keyValueStorage
.get(trieKey)
.filter(val -> val.size() == UInt256.SIZE)
.get(trieKey.getArrayUnsafe())
.filter(val -> val.length == UInt256.SIZE)
.map(Bytes32::wrap)
.map(UInt256::wrap);
}
@ -40,9 +41,9 @@ public class WorldStatePreimageKeyValueStorage implements WorldStatePreimageStor
@Override
public Optional<Address> getAccountTrieKeyPreimage(final Bytes32 trieKey) {
return keyValueStorage
.get(trieKey)
.filter(val -> val.size() == Address.SIZE)
.map(Address::wrap);
.get(trieKey.getArrayUnsafe())
.filter(val -> val.length == Address.SIZE)
.map(val -> Address.wrap(BytesValue.wrap(val)));
}
@Override
@ -51,23 +52,23 @@ public class WorldStatePreimageKeyValueStorage implements WorldStatePreimageStor
}
public static class Updater implements WorldStatePreimageStorage.Updater {
private final KeyValueStorage.Transaction transaction;
private final KeyValueStorageTransaction transaction;
public Updater(final Transaction transaction) {
public Updater(final KeyValueStorageTransaction transaction) {
this.transaction = transaction;
}
@Override
public WorldStatePreimageStorage.Updater putStorageTrieKeyPreimage(
final Bytes32 trieKey, final UInt256 preimage) {
transaction.put(trieKey, preimage.getBytes());
transaction.put(trieKey.getArrayUnsafe(), preimage.getBytes().getArrayUnsafe());
return this;
}
@Override
public WorldStatePreimageStorage.Updater putAccountTrieKeyPreimage(
final Bytes32 trieKey, final Address preimage) {
transaction.put(trieKey, preimage);
transaction.put(trieKey.getArrayUnsafe(), preimage.getArrayUnsafe());
return this;
}

@ -17,11 +17,11 @@ import tech.pegasys.pantheon.ethereum.core.Hash;
import tech.pegasys.pantheon.ethereum.rlp.RLP;
import tech.pegasys.pantheon.ethereum.trie.MerklePatriciaTrie;
import tech.pegasys.pantheon.ethereum.trie.StoredMerklePatriciaTrie;
import tech.pegasys.pantheon.metrics.ObservableMetricsSystem;
import tech.pegasys.pantheon.metrics.PantheonMetricCategory;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.plugin.services.metrics.Counter;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage.Transaction;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.util.bytes.Bytes32;
import tech.pegasys.pantheon.util.bytes.BytesValue;
@ -37,9 +37,10 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class MarkSweepPruner {
private static final int DEFAULT_OPS_PER_TRANSACTION = 1000;
private static final Logger LOG = LogManager.getLogger();
private static final BytesValue IN_USE = BytesValue.of(1);
private static final byte[] IN_USE = BytesValue.of(1).getArrayUnsafe();
private final int operationsPerTransaction;
private final WorldStateStorage worldStateStorage;
@ -57,7 +58,7 @@ public class MarkSweepPruner {
final WorldStateStorage worldStateStorage,
final MutableBlockchain blockchain,
final KeyValueStorage markStorage,
final MetricsSystem metricsSystem) {
final ObservableMetricsSystem metricsSystem) {
this(worldStateStorage, blockchain, markStorage, metricsSystem, DEFAULT_OPS_PER_TRANSACTION);
}
@ -65,7 +66,7 @@ public class MarkSweepPruner {
final WorldStateStorage worldStateStorage,
final MutableBlockchain blockchain,
final KeyValueStorage markStorage,
final MetricsSystem metricsSystem,
final ObservableMetricsSystem metricsSystem,
final int operationsPerTransaction) {
this.worldStateStorage = worldStateStorage;
this.markStorage = markStorage;
@ -137,7 +138,7 @@ public class MarkSweepPruner {
break;
}
if (!markStorage.containsKey(candidateStateRootHash)) {
if (!markStorage.containsKey(candidateStateRootHash.getArrayUnsafe())) {
updater.removeAccountStateTrieNode(candidateStateRootHash);
prunedNodeCount++;
if (prunedNodeCount % operationsPerTransaction == 0) {
@ -200,8 +201,8 @@ public class MarkSweepPruner {
void flushPendingMarks() {
markLock.lock();
try {
final Transaction transaction = markStorage.startTransaction();
pendingMarks.forEach(node -> transaction.put(node, IN_USE));
final KeyValueStorageTransaction transaction = markStorage.startTransaction();
pendingMarks.forEach(node -> transaction.put(node.getArrayUnsafe(), IN_USE));
transaction.commit();
pendingMarks.clear();
} finally {

@ -38,7 +38,7 @@ public interface WorldStateStorage {
Updater updater();
long prune(Predicate<BytesValue> inUseCheck);
long prune(Predicate<byte[]> inUseCheck);
long addNodeAddedListener(NodesAddedListener listener);

@ -26,8 +26,8 @@ import tech.pegasys.pantheon.ethereum.mainnet.ProtocolScheduleBuilder;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueStoragePrefixedKeyBlockchainStorage;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStateArchive;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.InMemoryKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import java.math.BigInteger;
import java.util.function.Function;

@ -31,8 +31,8 @@ import tech.pegasys.pantheon.ethereum.worldstate.WorldStateArchive;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStatePreimageStorage;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStateStorage;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.InMemoryKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
public class InMemoryStorageProvider implements StorageProvider {

@ -26,8 +26,8 @@ import tech.pegasys.pantheon.ethereum.core.TransactionReceipt;
import tech.pegasys.pantheon.ethereum.mainnet.MainnetBlockHeaderFunctions;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueStoragePrefixedKeyBlockchainStorage;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.InMemoryKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.util.uint.UInt256;
import java.util.ArrayList;

@ -1,104 +0,0 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.ethereum.storage.keyvalue;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.when;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.services.kvstore.RocksDbConfiguration;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class RocksDbStorageProviderTest {
@Mock private RocksDbConfiguration rocksDbConfiguration;
@Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();
private final MetricsSystem metricsSystem = new NoOpMetricsSystem();
@Test
public void shouldCreateCorrectMetadataFileForLatestVersion() throws Exception {
final Path tempDatabaseDir = temporaryFolder.newFolder().toPath().resolve("db");
when(rocksDbConfiguration.getDatabaseDir()).thenReturn(tempDatabaseDir);
RocksDbStorageProvider.create(rocksDbConfiguration, metricsSystem);
assertEquals(
RocksDbStorageProvider.DEFAULT_VERSION,
DatabaseMetadata.fromDirectory(rocksDbConfiguration.getDatabaseDir()).getVersion());
}
@Test
public void shouldDetectVersion0DatabaseIfNoMetadataFileFound() throws Exception {
final Path tempDatabaseDir = temporaryFolder.newFolder().toPath().resolve("db");
Files.createDirectories(tempDatabaseDir);
tempDatabaseDir.resolve("IDENTITY").toFile().createNewFile();
when(rocksDbConfiguration.getDatabaseDir()).thenReturn(tempDatabaseDir);
RocksDbStorageProvider.create(rocksDbConfiguration, metricsSystem);
assertEquals(0, DatabaseMetadata.fromDirectory(tempDatabaseDir).getVersion());
}
@Test
public void shouldDetectCorrectVersionIfMetadataFileExists() throws Exception {
final Path tempDatabaseDir = temporaryFolder.newFolder().toPath().resolve("db");
Files.createDirectories(tempDatabaseDir);
tempDatabaseDir.resolve("IDENTITY").toFile().createNewFile();
new DatabaseMetadata(1).writeToDirectory(tempDatabaseDir);
when(rocksDbConfiguration.getDatabaseDir()).thenReturn(tempDatabaseDir);
RocksDbStorageProvider.create(rocksDbConfiguration, metricsSystem);
assertEquals(1, DatabaseMetadata.fromDirectory(tempDatabaseDir).getVersion());
}
@Test
public void shouldThrowExceptionWhenVersionNumberIsInvalid() throws Exception {
final Path tempDatabaseDir = temporaryFolder.newFolder().toPath().resolve("db");
Files.createDirectories(tempDatabaseDir);
tempDatabaseDir.resolve("IDENTITY").toFile().createNewFile();
new DatabaseMetadata(-1).writeToDirectory(tempDatabaseDir);
when(rocksDbConfiguration.getDatabaseDir()).thenReturn(tempDatabaseDir);
assertThatThrownBy(() -> RocksDbStorageProvider.create(rocksDbConfiguration, metricsSystem))
.isInstanceOf(IllegalStateException.class);
}
@Test
public void shouldThrowExceptionWhenMetaDataFileIsCorrupted() throws Exception {
final Path tempDatabaseDir = temporaryFolder.newFolder().toPath().resolve("db");
Files.createDirectories(tempDatabaseDir);
when(rocksDbConfiguration.getDatabaseDir()).thenReturn(tempDatabaseDir);
tempDatabaseDir.resolve("IDENTITY").toFile().createNewFile();
final String badVersion = "{\"🦄\":1}";
Files.write(
tempDatabaseDir.resolve(DatabaseMetadata.METADATA_FILENAME),
badVersion.getBytes(Charset.defaultCharset()));
assertThatThrownBy(() -> RocksDbStorageProvider.create(rocksDbConfiguration, metricsSystem))
.isInstanceOf(IllegalStateException.class);
final String badValue = "{\"version\":\"iomedae\"}";
Files.write(
tempDatabaseDir.resolve(DatabaseMetadata.METADATA_FILENAME),
badValue.getBytes(Charset.defaultCharset()));
assertThatThrownBy(() -> RocksDbStorageProvider.create(rocksDbConfiguration, metricsSystem))
.isInstanceOf(IllegalStateException.class);
}
}

@ -32,8 +32,8 @@ import tech.pegasys.pantheon.ethereum.core.WorldUpdater;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.WorldStateKeyValueStorage;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.WorldStatePreimageKeyValueStorage;
import tech.pegasys.pantheon.ethereum.trie.MerklePatriciaTrie;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.InMemoryKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.util.bytes.Bytes32;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import tech.pegasys.pantheon.util.uint.UInt256;

@ -44,6 +44,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.junit.Test;
import org.mockito.InOrder;
@ -52,7 +53,7 @@ public class MarkSweepPrunerTest {
private final BlockDataGenerator gen = new BlockDataGenerator();
private final NoOpMetricsSystem metricsSystem = new NoOpMetricsSystem();
private final Map<BytesValue, BytesValue> hashValueStore = spy(new HashMap<>());
private final Map<BytesValue, byte[]> hashValueStore = spy(new HashMap<>());
private final InMemoryKeyValueStorage stateStorage = spy(new TestInMemoryStorage(hashValueStore));
private final WorldStateStorage worldStateStorage = new WorldStateKeyValueStorage(stateStorage);
private final WorldStateArchive worldStateArchive =
@ -156,7 +157,9 @@ public class MarkSweepPrunerTest {
// Check that storage contains only the values we expect
assertThat(hashValueStore.size()).isEqualTo(expectedNodes.size());
assertThat(hashValueStore.values()).containsExactlyInAnyOrderElementsOf(expectedNodes);
assertThat(hashValueStore.values())
.containsExactlyInAnyOrderElementsOf(
expectedNodes.stream().map(BytesValue::getArrayUnsafe).collect(Collectors.toSet()));
}
@Test
@ -202,7 +205,9 @@ public class MarkSweepPrunerTest {
// Check that storage contains only the values we expect
assertThat(hashValueStore.size()).isEqualTo(expectedNodes.size());
assertThat(hashValueStore.values()).containsExactlyInAnyOrderElementsOf(expectedNodes);
assertThat(hashValueStore.values())
.containsExactlyInAnyOrderElementsOf(
expectedNodes.stream().map(BytesValue::getArrayUnsafe).collect(Collectors.toSet()));
}
@Test
@ -233,7 +238,7 @@ public class MarkSweepPrunerTest {
for (Bytes32 stateRoot : stateRoots) {
inOrder.verify(hashValueStore).remove(stateRoot);
}
inOrder.verify(stateStorage).removeUnless(any());
inOrder.verify(stateStorage).removeAllKeysUnless(any());
}
@Test
@ -268,9 +273,9 @@ public class MarkSweepPrunerTest {
for (Bytes32 stateRoot : stateRoots) {
inOrder.verify(hashValueStore).remove(stateRoot);
}
inOrder.verify(stateStorage).removeUnless(any());
inOrder.verify(stateStorage).removeAllKeysUnless(any());
assertThat(stateStorage.containsKey(markedRoot)).isTrue();
assertThat(stateStorage.containsKey(markedRoot.getArrayUnsafe())).isTrue();
}
private void generateBlockchainData(final int numBlocks, final int numAccounts) {
@ -311,7 +316,9 @@ public class MarkSweepPrunerTest {
(key, val) -> {
final StateTrieAccountValue accountValue =
StateTrieAccountValue.readFrom(RLP.input(val));
stateStorage.get(accountValue.getCodeHash()).ifPresent(collector::add);
stateStorage
.get(accountValue.getCodeHash().getArrayUnsafe())
.ifPresent(v -> collector.add(BytesValue.wrap(v)));
storageRoots.add(accountValue.getStorageRoot());
});
@ -355,7 +362,7 @@ public class MarkSweepPrunerTest {
private static class TestInMemoryStorage extends InMemoryKeyValueStorage {
public TestInMemoryStorage(final Map<BytesValue, BytesValue> hashValueStore) {
public TestInMemoryStorage(final Map<BytesValue, byte[]> hashValueStore) {
super(hashValueStore);
}
}

@ -54,6 +54,9 @@ dependencies {
testImplementation 'org.assertj:assertj-core'
testImplementation 'org.awaitility:awaitility'
testImplementation 'org.mockito:mockito-core'
jmhImplementation project(':pantheon')
jmhImplementation project(':plugins:rocksdb')
jmhImplementation project(path: ':ethereum:core', configuration: 'testSupportArtifacts')
integrationTestImplementation project(path: ':config', configuration: 'testSupportArtifacts')

@ -13,6 +13,10 @@
package tech.pegasys.pantheon.ethereum.eth.sync.worldstate;
import static tech.pegasys.pantheon.ethereum.core.InMemoryStorageProvider.createInMemoryWorldStateArchive;
import static tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions.DEFAULT_BACKGROUND_THREAD_COUNT;
import static tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions.DEFAULT_CACHE_CAPACITY;
import static tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions.DEFAULT_MAX_BACKGROUND_COMPACTIONS;
import static tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions.DEFAULT_MAX_OPEN_FILES;
import tech.pegasys.pantheon.ethereum.core.BlockDataGenerator;
import tech.pegasys.pantheon.ethereum.core.BlockHeader;
@ -27,12 +31,15 @@ import tech.pegasys.pantheon.ethereum.eth.manager.RespondingEthPeer;
import tech.pegasys.pantheon.ethereum.eth.manager.RespondingEthPeer.Responder;
import tech.pegasys.pantheon.ethereum.eth.sync.SynchronizerConfiguration;
import tech.pegasys.pantheon.ethereum.storage.StorageProvider;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.RocksDbStorageProvider;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueSegmentIdentifier;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueStorageProviderBuilder;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStateArchive;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStateStorage;
import tech.pegasys.pantheon.metrics.ObservableMetricsSystem;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.services.kvstore.RocksDbConfiguration;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.RocksDBKeyValueStorageFactory;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBFactoryConfiguration;
import tech.pegasys.pantheon.services.PantheonConfigurationImpl;
import tech.pegasys.pantheon.services.tasks.CachingTaskCollection;
import tech.pegasys.pantheon.services.tasks.FlatFileTaskCollection;
import tech.pegasys.pantheon.util.bytes.BytesValue;
@ -41,6 +48,7 @@ import java.nio.file.Path;
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneOffset;
import java.util.Arrays;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
@ -60,7 +68,7 @@ public class WorldStateDownloaderBenchmark {
private final BlockDataGenerator dataGen = new BlockDataGenerator();
private Path tempDir;
private BlockHeader blockHeader;
private final MetricsSystem metricsSystem = new NoOpMetricsSystem();
private final ObservableMetricsSystem metricsSystem = new NoOpMetricsSystem();
private WorldStateDownloader worldStateDownloader;
private WorldStateStorage worldStateStorage;
private RespondingEthPeer peer;
@ -70,7 +78,7 @@ public class WorldStateDownloaderBenchmark {
private EthProtocolManager ethProtocolManager;
@Setup(Level.Invocation)
public void setUpUnchangedState() throws Exception {
public void setUpUnchangedState() {
final SynchronizerConfiguration syncConfig =
new SynchronizerConfiguration.Builder().worldStateHashCountPerRequest(200).build();
final Hash stateRoot = createExistingWorldState();
@ -88,10 +96,9 @@ public class WorldStateDownloaderBenchmark {
peer = EthProtocolManagerTestUtil.createPeer(ethProtocolManager, blockHeader.getNumber());
final EthContext ethContext = ethProtocolManager.ethContext();
storageProvider =
RocksDbStorageProvider.create(
RocksDbConfiguration.builder().databaseDir(tempDir.resolve("database")).build(),
metricsSystem);
final StorageProvider storageProvider =
createKeyValueStorageProvider(tempDir.resolve("database"));
worldStateStorage = storageProvider.createWorldStateStorage();
pendingRequests =
@ -148,4 +155,20 @@ public class WorldStateDownloaderBenchmark {
}
return rootData;
}
private StorageProvider createKeyValueStorageProvider(final Path dbAhead) {
return new KeyValueStorageProviderBuilder()
.withStorageFactory(
new RocksDBKeyValueStorageFactory(
() ->
new RocksDBFactoryConfiguration(
DEFAULT_MAX_OPEN_FILES,
DEFAULT_MAX_BACKGROUND_COMPACTIONS,
DEFAULT_BACKGROUND_THREAD_COUNT,
DEFAULT_CACHE_CAPACITY),
Arrays.asList(KeyValueSegmentIdentifier.values())))
.withCommonConfiguration(new PantheonConfigurationImpl(dbAhead))
.withMetricsSystem(new NoOpMetricsSystem())
.build();
}
}

@ -34,7 +34,6 @@ dependencies {
implementation(project(':ethereum:jsonrpc'))
implementation(project(':ethereum:rlp'))
implementation(project(':ethereum:p2p'))
// implementation(project(':pantheon'))
implementation(project(':metrics:core'))
implementation(project(':nat'))
implementation(project(':services:kvstore'))

@ -12,7 +12,8 @@
*/
package tech.pegasys.pantheon.ethereum.trie;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.util.bytes.Bytes32;
import tech.pegasys.pantheon.util.bytes.BytesValue;
@ -34,7 +35,7 @@ public class KeyValueMerkleStorage implements MerkleStorage {
final Optional<BytesValue> value =
pendingUpdates.containsKey(hash)
? Optional.of(pendingUpdates.get(hash))
: keyValueStorage.get(hash);
: keyValueStorage.get(hash.getArrayUnsafe()).map(BytesValue::wrap);
return value;
}
@ -49,9 +50,9 @@ public class KeyValueMerkleStorage implements MerkleStorage {
// Nothing to do
return;
}
final KeyValueStorage.Transaction kvTx = keyValueStorage.startTransaction();
final KeyValueStorageTransaction kvTx = keyValueStorage.startTransaction();
for (final Map.Entry<Bytes32, BytesValue> entry : pendingUpdates.entrySet()) {
kvTx.put(entry.getKey(), entry.getValue());
kvTx.put(entry.getKey().getArrayUnsafe(), entry.getValue().getArrayUnsafe());
}
kvTx.commit();

@ -16,8 +16,8 @@ import static java.nio.charset.StandardCharsets.UTF_8;
import static junit.framework.TestCase.assertFalse;
import static org.assertj.core.api.Assertions.assertThat;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.InMemoryKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.util.bytes.Bytes32;
import tech.pegasys.pantheon.util.bytes.BytesValue;

@ -14,12 +14,12 @@ package tech.pegasys.pantheon.ethereum.trie;
import static org.assertj.core.api.Assertions.assertThat;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.InMemoryKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage;
import tech.pegasys.pantheon.util.bytes.Bytes32;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Optional;
import java.util.function.Function;
@ -36,8 +36,8 @@ public class StoredMerklePatriciaTrieTest extends AbstractMerklePatriciaTrieTest
keyValueStore = new InMemoryKeyValueStorage();
merkleStorage = new KeyValueMerkleStorage(keyValueStore);
valueSerializer =
value -> (value != null) ? BytesValue.wrap(value.getBytes(Charset.forName("UTF-8"))) : null;
valueDeserializer = bytes -> new String(bytes.getArrayUnsafe(), Charset.forName("UTF-8"));
value -> (value != null) ? BytesValue.wrap(value.getBytes(StandardCharsets.UTF_8)) : null;
valueDeserializer = bytes -> new String(bytes.getArrayUnsafe(), StandardCharsets.UTF_8);
return new StoredMerklePatriciaTrie<>(merkleStorage::get, valueSerializer, valueDeserializer);
}

@ -14,8 +14,9 @@ package tech.pegasys.pantheon.ethereum.trie;
import static org.assertj.core.api.Assertions.assertThat;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.services.kvstore.InMemoryKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage.Transaction;
import tech.pegasys.pantheon.util.bytes.Bytes32;
import tech.pegasys.pantheon.util.bytes.BytesValue;
@ -36,7 +37,8 @@ public class TrieNodeDecoderTest {
// Build a small trie
MerklePatriciaTrie<BytesValue, BytesValue> trie =
new StoredMerklePatriciaTrie<>(storage::get, Function.identity(), Function.identity());
new StoredMerklePatriciaTrie<>(
new BytesToByteNodeLoader(storage), Function.identity(), Function.identity());
trie.put(BytesValue.fromHexString("0x100000"), BytesValue.of(1));
trie.put(BytesValue.fromHexString("0x200000"), BytesValue.of(2));
trie.put(BytesValue.fromHexString("0x300000"), BytesValue.of(3));
@ -49,12 +51,13 @@ public class TrieNodeDecoderTest {
BytesValue.fromHexString("0x11223344556677889900112233445566778899"));
// Save nodes to storage
final Transaction tx = storage.startTransaction();
trie.commit(tx::put);
final KeyValueStorageTransaction tx = storage.startTransaction();
trie.commit((key, value) -> tx.put(key.getArrayUnsafe(), value.getArrayUnsafe()));
tx.commit();
// Get and flatten root node
final BytesValue rootNodeRlp = storage.get(trie.getRootHash()).get();
final BytesValue rootNodeRlp =
BytesValue.wrap(storage.get(trie.getRootHash().getArrayUnsafe()).get());
final List<Node<BytesValue>> nodes = TrieNodeDecoder.decodeNodes(rootNodeRlp);
// The full trie hold 10 nodes, the branch node starting with 0x3... holding 2 values will be a
// hash
@ -80,7 +83,8 @@ public class TrieNodeDecoderTest {
// Build a small trie
MerklePatriciaTrie<BytesValue, BytesValue> trie =
new StoredMerklePatriciaTrie<>(storage::get, Function.identity(), Function.identity());
new StoredMerklePatriciaTrie<>(
new BytesToByteNodeLoader(storage), Function.identity(), Function.identity());
trie.put(BytesValue.fromHexString("0x100000"), BytesValue.of(1));
trie.put(BytesValue.fromHexString("0x200000"), BytesValue.of(2));
trie.put(BytesValue.fromHexString("0x300000"), BytesValue.of(3));
@ -90,13 +94,14 @@ public class TrieNodeDecoderTest {
trie.put(BytesValue.fromHexString("0x310000"), BytesValue.of(30));
// Save nodes to storage
final Transaction tx = storage.startTransaction();
trie.commit(tx::put);
final KeyValueStorageTransaction tx = storage.startTransaction();
trie.commit((key, value) -> tx.put(key.getArrayUnsafe(), value.getArrayUnsafe()));
tx.commit();
// First layer should just be the root node
final List<Node<BytesValue>> depth0Nodes =
TrieNodeDecoder.breadthFirstDecoder(storage::get, trie.getRootHash(), 0)
TrieNodeDecoder.breadthFirstDecoder(
new BytesToByteNodeLoader(storage), trie.getRootHash(), 0)
.collect(Collectors.toList());
assertThat(depth0Nodes.size()).isEqualTo(1);
@ -105,7 +110,8 @@ public class TrieNodeDecoderTest {
// Decode first 2 levels
final List<Node<BytesValue>> depth0And1Nodes =
(TrieNodeDecoder.breadthFirstDecoder(storage::get, trie.getRootHash(), 1)
(TrieNodeDecoder.breadthFirstDecoder(
new BytesToByteNodeLoader(storage), trie.getRootHash(), 1)
.collect(Collectors.toList()));
final int secondLevelNodeCount = 3;
final int expectedNodeCount = secondLevelNodeCount + 1;
@ -126,7 +132,7 @@ public class TrieNodeDecoderTest {
// Decode full trie
final List<Node<BytesValue>> allNodes =
TrieNodeDecoder.breadthFirstDecoder(storage::get, trie.getRootHash())
TrieNodeDecoder.breadthFirstDecoder(new BytesToByteNodeLoader(storage), trie.getRootHash())
.collect(Collectors.toList());
assertThat(allNodes.size()).isEqualTo(10);
// Collect and check values
@ -153,7 +159,8 @@ public class TrieNodeDecoderTest {
// Build a small trie
MerklePatriciaTrie<BytesValue, BytesValue> trie =
new StoredMerklePatriciaTrie<>(fullStorage::get, Function.identity(), Function.identity());
new StoredMerklePatriciaTrie<>(
new BytesToByteNodeLoader(fullStorage), Function.identity(), Function.identity());
final Random random = new Random(1);
for (int i = 0; i < 30; i++) {
byte[] key = new byte[4];
@ -162,20 +169,24 @@ public class TrieNodeDecoderTest {
random.nextBytes(val);
trie.put(BytesValue.wrap(key), BytesValue.wrap(val));
}
final Transaction tx = fullStorage.startTransaction();
trie.commit(tx::put);
final KeyValueStorageTransaction tx = fullStorage.startTransaction();
trie.commit((key, value) -> tx.put(key.getArrayUnsafe(), value.getArrayUnsafe()));
tx.commit();
// Get root node
Node<BytesValue> rootNode =
TrieNodeDecoder.breadthFirstDecoder(fullStorage::get, trie.getRootHash()).findFirst().get();
TrieNodeDecoder.breadthFirstDecoder(
new BytesToByteNodeLoader(fullStorage), trie.getRootHash())
.findFirst()
.get();
// Decode partially available trie
final Transaction partialTx = partialStorage.startTransaction();
partialTx.put(trie.getRootHash(), rootNode.getRlp());
final KeyValueStorageTransaction partialTx = partialStorage.startTransaction();
partialTx.put(trie.getRootHash().getArrayUnsafe(), rootNode.getRlp().getArrayUnsafe());
partialTx.commit();
final List<Node<BytesValue>> allDecodableNodes =
TrieNodeDecoder.breadthFirstDecoder(partialStorage::get, trie.getRootHash())
TrieNodeDecoder.breadthFirstDecoder(
new BytesToByteNodeLoader(partialStorage), trie.getRootHash())
.collect(Collectors.toList());
assertThat(allDecodableNodes.size()).isGreaterThanOrEqualTo(1);
assertThat(allDecodableNodes.get(0).getHash()).isEqualTo(rootNode.getHash());
@ -195,16 +206,17 @@ public class TrieNodeDecoderTest {
final InMemoryKeyValueStorage storage = new InMemoryKeyValueStorage();
MerklePatriciaTrie<BytesValue, BytesValue> trie =
new StoredMerklePatriciaTrie<>(storage::get, Function.identity(), Function.identity());
new StoredMerklePatriciaTrie<>(
new BytesToByteNodeLoader(storage), Function.identity(), Function.identity());
trie.put(BytesValue.fromHexString("0x100000"), BytesValue.of(1));
// Save nodes to storage
final Transaction tx = storage.startTransaction();
trie.commit(tx::put);
final KeyValueStorageTransaction tx = storage.startTransaction();
trie.commit((key, value) -> tx.put(key.getArrayUnsafe(), value.getArrayUnsafe()));
tx.commit();
List<Node<BytesValue>> result =
TrieNodeDecoder.breadthFirstDecoder(storage::get, trie.getRootHash())
TrieNodeDecoder.breadthFirstDecoder(new BytesToByteNodeLoader(storage), trie.getRootHash())
.collect(Collectors.toList());
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0).getValue()).contains(BytesValue.of(1));
@ -221,4 +233,19 @@ public class TrieNodeDecoderTest {
.collect(Collectors.toList());
assertThat(result.size()).isEqualTo(0);
}
private static class BytesToByteNodeLoader implements NodeLoader {
private final KeyValueStorage storage;
private BytesToByteNodeLoader(final KeyValueStorage storage) {
this.storage = storage;
}
@Override
public Optional<BytesValue> getNode(final Bytes32 hash) {
final byte[] value = storage.get(hash.getArrayUnsafe()).orElse(null);
return value == null ? Optional.empty() : Optional.of(BytesValue.wrap(value));
}
}
}

@ -127,10 +127,11 @@ downloadLicenses {
licenses = [
(group('pantheon')) : apache,
(group('pantheon.ethereum')) : apache,
(group('pantheon.services')) : apache,
(group('pantheon.consensus')) : apache,
(group('pantheon.ethereum')) : apache,
(group('pantheon.metrics')) : apache,
(group('pantheon.plugins')) : apache,
(group('pantheon.services')) : apache,
// https://checkerframework.org/manual/#license
// The more permissive MIT License applies to code that you might want

@ -29,6 +29,7 @@ dependencyManagement {
dependency 'com.graphql-java:graphql-java:13.0'
dependency 'com.google.guava:guava:28.0-jre'
dependency 'com.google.auto.service:auto-service:1.0-rc4'
dependency 'com.squareup.okhttp3:okhttp:3.14.2'

@ -28,7 +28,6 @@ jar {
dependencies {
implementation project(':metrics:core')
implementation project(':plugin-api')
implementation project(':services:util')
implementation 'com.google.guava:guava'
implementation 'io.prometheus:simpleclient'

@ -61,6 +61,7 @@ dependencies {
runtime 'org.apache.logging.log4j:log4j-core'
runtime 'org.apache.logging.log4j:log4j-slf4j-impl'
testImplementation project(':plugins:rocksdb')
testImplementation project(':testutil')
testImplementation project(path: ':ethereum:core', configuration: 'testSupportArtifacts')

@ -68,6 +68,7 @@ public interface DefaultCommandValues {
int DEFAULT_MAX_PEERS = 25;
float DEFAULT_FRACTION_REMOTE_WIRE_CONNECTIONS_ALLOWED =
RlpxConfiguration.DEFAULT_FRACTION_REMOTE_CONNECTIONS_ALLOWED;
String DEFAULT_KEY_VALUE_STORAGE_NAME = "rocksdb";
static Path getDefaultPantheonDataPath(final Object command) {
// this property is retrieved from Gradle tasks or Pantheon running shell script.

@ -44,7 +44,6 @@ import tech.pegasys.pantheon.cli.error.PantheonExceptionHandler;
import tech.pegasys.pantheon.cli.options.EthProtocolOptions;
import tech.pegasys.pantheon.cli.options.MetricsCLIOptions;
import tech.pegasys.pantheon.cli.options.NetworkingOptions;
import tech.pegasys.pantheon.cli.options.RocksDBOptions;
import tech.pegasys.pantheon.cli.options.SynchronizerOptions;
import tech.pegasys.pantheon.cli.options.TransactionPoolOptions;
import tech.pegasys.pantheon.cli.subcommands.PasswordSubCommand;
@ -81,6 +80,8 @@ import tech.pegasys.pantheon.ethereum.permissioning.LocalPermissioningConfigurat
import tech.pegasys.pantheon.ethereum.permissioning.PermissioningConfiguration;
import tech.pegasys.pantheon.ethereum.permissioning.PermissioningConfigurationBuilder;
import tech.pegasys.pantheon.ethereum.permissioning.SmartContractPermissioningConfiguration;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueStorageProvider;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueStorageProviderBuilder;
import tech.pegasys.pantheon.ethereum.worldstate.PruningConfiguration;
import tech.pegasys.pantheon.metrics.ObservableMetricsSystem;
import tech.pegasys.pantheon.metrics.PantheonMetricCategory;
@ -90,13 +91,16 @@ import tech.pegasys.pantheon.metrics.prometheus.PrometheusMetricsSystem;
import tech.pegasys.pantheon.metrics.vertx.VertxMetricsAdapterFactory;
import tech.pegasys.pantheon.nat.NatMethod;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.plugin.services.PantheonConfiguration;
import tech.pegasys.pantheon.plugin.services.PantheonEvents;
import tech.pegasys.pantheon.plugin.services.PicoCLIOptions;
import tech.pegasys.pantheon.plugin.services.StorageService;
import tech.pegasys.pantheon.plugin.services.metrics.MetricCategory;
import tech.pegasys.pantheon.services.PantheonConfigurationImpl;
import tech.pegasys.pantheon.services.PantheonEventsImpl;
import tech.pegasys.pantheon.services.PantheonPluginContextImpl;
import tech.pegasys.pantheon.services.PicoCLIOptionsImpl;
import tech.pegasys.pantheon.services.kvstore.RocksDbConfiguration;
import tech.pegasys.pantheon.services.StorageServiceImpl;
import tech.pegasys.pantheon.util.PermissioningConfigurationValidator;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import tech.pegasys.pantheon.util.number.Fraction;
@ -122,6 +126,7 @@ import java.util.TreeMap;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Resources;
@ -167,11 +172,11 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
final SynchronizerOptions synchronizerOptions = SynchronizerOptions.create();
final EthProtocolOptions ethProtocolOptions = EthProtocolOptions.create();
final MetricsCLIOptions metricsCLIOptions = MetricsCLIOptions.create();
final RocksDBOptions rocksDBOptions = RocksDBOptions.create();
final TransactionPoolOptions transactionPoolOptions = TransactionPoolOptions.create();
private final RunnerBuilder runnerBuilder;
private final PantheonController.Builder controllerBuilderFactory;
private final PantheonPluginContextImpl pantheonPluginContext;
private final StorageServiceImpl storageService;
private final Map<String, String> environment;
protected KeyLoader getKeyLoader() {
@ -651,6 +656,13 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
private final Integer pendingTxRetentionPeriod =
TransactionPoolConfiguration.DEFAULT_TX_RETENTION_HOURS;
@SuppressWarnings("FieldMayBeFinal") // Because PicoCLI requires Strings to not be final.
@Option(
names = {"--key-value-storage"},
description = "Identity for the key-value storage to be used.",
arity = "1")
private String keyValueStorageName = DEFAULT_KEY_VALUE_STORAGE_NAME;
@Option(
names = {"--override-genesis-config"},
paramLabel = "NAME=VALUE",
@ -669,7 +681,8 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
private Optional<PermissioningConfiguration> permissioningConfiguration;
private Collection<EnodeURL> staticNodes;
private PantheonController<?> pantheonController;
private StandaloneCommand standaloneCommands;
private PantheonConfiguration pluginCommonConfiguration;
private final Supplier<ObservableMetricsSystem> metricsSystem =
Suppliers.memoize(() -> PrometheusMetricsSystem.init(metricsConfiguration()));
@ -682,6 +695,29 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
final PantheonController.Builder controllerBuilderFactory,
final PantheonPluginContextImpl pantheonPluginContext,
final Map<String, String> environment) {
this(
logger,
rlpBlockImporter,
jsonBlockImporterFactory,
rlpBlockExporterFactory,
runnerBuilder,
controllerBuilderFactory,
pantheonPluginContext,
environment,
new StorageServiceImpl());
}
@VisibleForTesting
protected PantheonCommand(
final Logger logger,
final RlpBlockImporter rlpBlockImporter,
final JsonBlockImporterFactory jsonBlockImporterFactory,
final RlpBlockExporterFactory rlpBlockExporterFactory,
final RunnerBuilder runnerBuilder,
final PantheonController.Builder controllerBuilderFactory,
final PantheonPluginContextImpl pantheonPluginContext,
final Map<String, String> environment,
final StorageServiceImpl storageService) {
this.logger = logger;
this.rlpBlockImporter = rlpBlockImporter;
this.rlpBlockExporterFactory = rlpBlockExporterFactory;
@ -690,10 +726,9 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
this.controllerBuilderFactory = controllerBuilderFactory;
this.pantheonPluginContext = pantheonPluginContext;
this.environment = environment;
this.storageService = storageService;
}
private StandaloneCommand standaloneCommands;
public void parse(
final AbstractParseResultHandler<List<Object>> resultHandler,
final PantheonExceptionHandler exceptionHandler,
@ -712,6 +747,7 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
public void run() {
try {
prepareLogging();
addConfigurationService();
logger.info("Starting Pantheon version: {}", PantheonInfo.version());
checkOptions().configure().controller().startPlugins().startSynchronization();
} catch (final Exception e) {
@ -719,6 +755,16 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
}
}
private void addConfigurationService() {
pluginCommonConfiguration = new PantheonConfigurationImpl(dataDir().resolve(DATABASE_PATH));
pantheonPluginContext.addService(PantheonConfiguration.class, pluginCommonConfiguration);
}
@VisibleForTesting
void setPantheonConfiguration(final PantheonConfiguration pluginCommonConfiguration) {
this.pluginCommonConfiguration = pluginCommonConfiguration;
}
private PantheonCommand handleStandaloneCommand() {
standaloneCommands = new StandaloneCommand();
if (isFullInstantiation()) {
@ -773,7 +819,6 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
.put("Ethereum Wire Protocol", ethProtocolOptions)
.put("Metrics", metricsCLIOptions)
.put("P2P Network", networkingOptions)
.put("RocksDB", rocksDBOptions)
.put("Synchronizer", synchronizerOptions)
.put("TransactionPool", transactionPoolOptions)
.build();
@ -784,6 +829,7 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
private PantheonCommand preparePlugins() {
pantheonPluginContext.addService(PicoCLIOptions.class, new PicoCLIOptionsImpl(commandLine));
pantheonPluginContext.addService(StorageService.class, storageService);
pantheonPluginContext.registerPlugins(pluginsDir());
return this;
}
@ -825,6 +871,7 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
pantheonController.getProtocolManager().getBlockBroadcaster(),
pantheonController.getTransactionPool(),
pantheonController.getSyncState()));
pantheonPluginContext.addService(MetricsSystem.class, getMetricsSystem());
pantheonPluginContext.startPlugins();
return this;
}
@ -933,8 +980,6 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
public PantheonController<?> buildController() {
try {
return getControllerBuilder().build();
} catch (final IOException e) {
throw new ExecutionException(this.commandLine, "Invalid path", e);
} catch (final Exception e) {
throw new ExecutionException(this.commandLine, e.getMessage(), e);
}
@ -943,10 +988,9 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
public PantheonControllerBuilder<?> getControllerBuilder() {
try {
return controllerBuilderFactory
.fromEthNetworkConfig(updateNetworkConfig(getNetwork()), genesisConfigOverrides)
.fromEthNetworkConfig(updateNetworkConfig(getNetwork()))
.synchronizerConfiguration(buildSyncConfig())
.ethProtocolConfiguration(ethProtocolOptions.toDomainObject())
.rocksDbConfiguration(buildRocksDbConfiguration())
.dataDirectory(dataDir())
.miningParameters(
new MiningParameters(coinbase, minTransactionGasPrice, extraData, isMiningEnabled))
@ -956,6 +1000,7 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
.privacyParameters(privacyParameters())
.clock(Clock.systemUTC())
.isRevertReasonEnabled(isRevertReasonEnabled)
.storageProvider(keyStorageProvider(keyValueStorageName))
.isPruningEnabled(isPruningEnabled)
.pruningConfiguration(buildPruningConfiguration())
.genesisConfigOverrides(genesisConfigOverrides);
@ -1201,13 +1246,22 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
commandLine, "Please specify Enclave public key file path to enable privacy");
}
privacyParametersBuilder.setPrivacyAddress(privacyPrecompiledAddress);
privacyParametersBuilder.setMetricsSystem(metricsSystem.get());
privacyParametersBuilder.setDataDir(dataDir());
privacyParametersBuilder.setPrivateKeyPath(privacyMarkerTransactionSigningKeyPath);
privacyParametersBuilder.setStorageProvider(
keyStorageProvider(keyValueStorageName + "-privacy"));
}
return privacyParametersBuilder.build();
}
private KeyValueStorageProvider keyStorageProvider(final String name) {
return new KeyValueStorageProviderBuilder()
.withStorageFactory(storageService.getByName(name))
.withCommonConfiguration(pluginCommonConfiguration)
.withMetricsSystem(getMetricsSystem())
.build();
}
private SynchronizerConfiguration buildSyncConfig() {
return synchronizerOptions
.toDomainObject()
@ -1216,10 +1270,6 @@ public class PantheonCommand implements DefaultCommandValues, Runnable {
.build();
}
private RocksDbConfiguration buildRocksDbConfiguration() {
return rocksDBOptions.toDomainObject().databaseDir(dataDir().resolve(DATABASE_PATH)).build();
}
private TransactionPoolConfiguration buildTransactionPoolConfiguration() {
return transactionPoolOptions
.toDomainObject()

@ -182,8 +182,6 @@ public class BlocksSubCommand implements Runnable {
.getControllerBuilder()
.miningParameters(getMiningParameters())
.build();
} catch (final IOException e) {
throw new ExecutionException(new CommandLine(parentCommand), "Invalid path", e);
} catch (final Exception e) {
throw new ExecutionException(new CommandLine(parentCommand), e.getMessage(), e);
}

@ -12,7 +12,6 @@
*/
package tech.pegasys.pantheon.controller;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static tech.pegasys.pantheon.controller.KeyPairUtil.loadKeyPair;
@ -44,13 +43,11 @@ import tech.pegasys.pantheon.ethereum.jsonrpc.internal.methods.JsonRpcMethodFact
import tech.pegasys.pantheon.ethereum.mainnet.ProtocolSchedule;
import tech.pegasys.pantheon.ethereum.p2p.config.SubProtocolConfiguration;
import tech.pegasys.pantheon.ethereum.storage.StorageProvider;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.RocksDbStorageProvider;
import tech.pegasys.pantheon.ethereum.worldstate.MarkSweepPruner;
import tech.pegasys.pantheon.ethereum.worldstate.Pruner;
import tech.pegasys.pantheon.ethereum.worldstate.PruningConfiguration;
import tech.pegasys.pantheon.ethereum.worldstate.WorldStateArchive;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.services.kvstore.RocksDbConfiguration;
import tech.pegasys.pantheon.metrics.ObservableMetricsSystem;
import java.io.File;
import java.io.IOException;
@ -70,6 +67,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public abstract class PantheonControllerBuilder<C> {
private static final Logger LOG = LogManager.getLogger();
protected GenesisConfigFile genesisConfig;
@ -79,7 +77,7 @@ public abstract class PantheonControllerBuilder<C> {
protected TransactionPoolConfiguration transactionPoolConfiguration;
protected BigInteger networkId;
protected MiningParameters miningParameters;
protected MetricsSystem metricsSystem;
protected ObservableMetricsSystem metricsSystem;
protected PrivacyParameters privacyParameters;
protected Path dataDirectory;
protected Clock clock;
@ -87,17 +85,10 @@ public abstract class PantheonControllerBuilder<C> {
protected boolean isRevertReasonEnabled;
private StorageProvider storageProvider;
private final List<Runnable> shutdownActions = new ArrayList<>();
private RocksDbConfiguration rocksDbConfiguration;
private boolean isPruningEnabled;
private PruningConfiguration pruningConfiguration;
Map<String, String> genesisConfigOverrides;
public PantheonControllerBuilder<C> rocksDbConfiguration(
final RocksDbConfiguration rocksDbConfiguration) {
this.rocksDbConfiguration = rocksDbConfiguration;
return this;
}
public PantheonControllerBuilder<C> storageProvider(final StorageProvider storageProvider) {
this.storageProvider = storageProvider;
return this;
@ -141,7 +132,7 @@ public abstract class PantheonControllerBuilder<C> {
return this;
}
public PantheonControllerBuilder<C> metricsSystem(final MetricsSystem metricsSystem) {
public PantheonControllerBuilder<C> metricsSystem(final ObservableMetricsSystem metricsSystem) {
this.metricsSystem = metricsSystem;
return this;
}
@ -189,7 +180,7 @@ public abstract class PantheonControllerBuilder<C> {
return this;
}
public PantheonController<C> build() throws IOException {
public PantheonController<C> build() {
checkNotNull(genesisConfig, "Missing genesis config");
checkNotNull(syncConfig, "Missing sync config");
checkNotNull(ethereumWireProtocolConfiguration, "Missing ethereum protocol configuration");
@ -201,16 +192,7 @@ public abstract class PantheonControllerBuilder<C> {
checkNotNull(clock, "Mising clock");
checkNotNull(transactionPoolConfiguration, "Missing transaction pool configuration");
checkNotNull(nodeKeys, "Missing node keys");
checkArgument(
storageProvider != null || rocksDbConfiguration != null,
"Must supply either a storage provider or RocksDB configuration");
checkArgument(
storageProvider == null || rocksDbConfiguration == null,
"Must supply either storage provider or RocksDB confguration, but not both");
if (storageProvider == null && rocksDbConfiguration != null) {
storageProvider = RocksDbStorageProvider.create(rocksDbConfiguration, metricsSystem);
}
checkNotNull(storageProvider, "Must supply a storage provider");
prepForBuild();

@ -0,0 +1,38 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.services;
import tech.pegasys.pantheon.plugin.services.PantheonConfiguration;
import java.net.URI;
import java.nio.file.Path;
import java.util.Optional;
public class PantheonConfigurationImpl implements PantheonConfiguration {
private final Path storagePath;
public PantheonConfigurationImpl(final Path storagePath) {
this.storagePath = storagePath;
}
@Override
public Path getStoragePath() {
return storagePath;
}
@Override
public Optional<URI> getEnclaveUrl() {
return Optional.empty();
}
}

@ -74,24 +74,14 @@ public class PantheonPluginContextImpl implements PantheonContext {
checkState(
state == Lifecycle.UNINITIALIZED,
"Pantheon plugins have already been registered. Cannot register additional plugins.");
if (pluginsDir == null) {
LOG.debug("Plugins are disabled.");
return;
}
final ClassLoader pluginLoader =
pluginDirectoryLoader(pluginsDir).orElse(this.getClass().getClassLoader());
state = Lifecycle.REGISTERING;
if (pluginsDir.toFile().isDirectory()) {
LOG.debug("Searching for plugins in {}", pluginsDir.toAbsolutePath().toString());
try (final Stream<Path> pluginFilesList = Files.list(pluginsDir)) {
final URL[] pluginJarURLs =
pluginFilesList
.filter(p -> p.getFileName().toString().endsWith(".jar"))
.map(PantheonPluginContextImpl::pathToURIOrNull)
.toArray(URL[]::new);
final ServiceLoader<PantheonPlugin> serviceLoader =
ServiceLoader.load(
PantheonPlugin.class,
new URLClassLoader(pluginJarURLs, this.getClass().getClassLoader()));
ServiceLoader.load(PantheonPlugin.class, pluginLoader);
for (final PantheonPlugin plugin : serviceLoader) {
try {
@ -107,15 +97,8 @@ public class PantheonPluginContextImpl implements PantheonContext {
plugins.add(plugin);
}
} catch (final MalformedURLException e) {
LOG.error("Error converting files to URLs, could not load plugins", e);
} catch (final IOException e) {
LOG.error("Error enumerating plugins, could not load plugins", e);
}
LOG.debug("Plugin registration complete.");
} else {
LOG.debug("Plugin directory does not exist, skipping registation. - {}", pluginsDir);
}
state = Lifecycle.REGISTERED;
}
@ -180,4 +163,27 @@ public class PantheonPluginContextImpl implements PantheonContext {
List<PantheonPlugin> getPlugins() {
return Collections.unmodifiableList(plugins);
}
private Optional<ClassLoader> pluginDirectoryLoader(final Path pluginsDir) {
if (pluginsDir != null && pluginsDir.toFile().isDirectory()) {
LOG.debug("Searching for plugins in {}", pluginsDir.toAbsolutePath().toString());
try (final Stream<Path> pluginFilesList = Files.list(pluginsDir)) {
final URL[] pluginJarURLs =
pluginFilesList
.filter(p -> p.getFileName().toString().endsWith(".jar"))
.map(PantheonPluginContextImpl::pathToURIOrNull)
.toArray(URL[]::new);
return Optional.of(new URLClassLoader(pluginJarURLs, this.getClass().getClassLoader()));
} catch (final MalformedURLException e) {
LOG.error("Error converting files to URLs, could not load plugins", e);
} catch (final IOException e) {
LOG.error("Error enumerating plugins, could not load plugins", e);
}
} else {
LOG.debug("Plugin directory does not exist, skipping registration. - {}", pluginsDir);
}
return Optional.empty();
}
}

@ -0,0 +1,63 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.services;
import tech.pegasys.pantheon.plugin.services.StorageService;
import tech.pegasys.pantheon.plugin.services.exception.StorageException;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageFactory;
import tech.pegasys.pantheon.plugin.services.storage.SegmentIdentifier;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
public class StorageServiceImpl implements StorageService {
private final List<SegmentIdentifier> segments;
private final Map<String, KeyValueStorageFactory> factories;
public StorageServiceImpl() {
this.segments = List.of(Segment.values());
this.factories = new ConcurrentHashMap<>();
}
@Override
public void registerKeyValueStorage(final KeyValueStorageFactory factory) {
factories.put(factory.getName(), factory);
}
@Override
public List<SegmentIdentifier> getAllSegmentIdentifiers() {
return segments;
}
private enum Segment implements SegmentIdentifier {
BLOCKCHAIN,
WORLD_STATE,
PRIVATE_TRANSACTIONS,
PRIVATE_STATE,
PRUNING_STATE;
@Override
public String getName() {
return name();
}
}
public KeyValueStorageFactory getByName(final String name) {
return Optional.ofNullable(factories.get(name))
.orElseThrow(
() -> new StorageException("No KeyValueStorageFactory found for key: " + name));
}
}

@ -26,12 +26,19 @@ import tech.pegasys.pantheon.ethereum.eth.EthProtocolConfiguration;
import tech.pegasys.pantheon.ethereum.eth.sync.SynchronizerConfiguration;
import tech.pegasys.pantheon.ethereum.eth.transactions.TransactionPoolConfiguration;
import tech.pegasys.pantheon.ethereum.mainnet.PrecompiledContract;
import tech.pegasys.pantheon.ethereum.storage.StorageProvider;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueSegmentIdentifier;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueStorageProviderBuilder;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.RocksDBKeyValuePrivacyStorageFactory;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBFactoryConfiguration;
import tech.pegasys.pantheon.services.PantheonConfigurationImpl;
import tech.pegasys.pantheon.testutil.TestClock;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.file.Path;
import java.util.Arrays;
import org.junit.Rule;
import org.junit.Test;
@ -39,6 +46,11 @@ import org.junit.rules.TemporaryFolder;
public class PrivacyTest {
private static final int MAX_OPEN_FILES = 1024;
private static final long CACHE_CAPACITY = 8388608;
private static final int MAX_BACKGROUND_COMPACTIONS = 4;
private static final int BACKGROUND_THREAD_COUNT = 4;
private static final Integer ADDRESS = 9;
@Rule public final TemporaryFolder folder = new TemporaryFolder();
@ -49,9 +61,8 @@ public class PrivacyTest {
new PrivacyParameters.Builder()
.setPrivacyAddress(ADDRESS)
.setEnabled(true)
.setDataDir(dataDir)
.setStorageProvider(createKeyValueStorageProvider(dataDir))
.build();
final PantheonController<?> pantheonController =
new PantheonController.Builder()
.fromGenesisConfig(GenesisConfigFile.mainnet())
@ -75,6 +86,23 @@ public class PrivacyTest {
.getByBlockNumber(1)
.getPrecompileContractRegistry()
.get(privacyContractAddress, Account.DEFAULT_VERSION);
assertThat(precompiledContract.getName()).isEqualTo("Privacy");
}
private StorageProvider createKeyValueStorageProvider(final Path dbAhead) {
return new KeyValueStorageProviderBuilder()
.withStorageFactory(
new RocksDBKeyValuePrivacyStorageFactory(
() ->
new RocksDBFactoryConfiguration(
MAX_OPEN_FILES,
MAX_BACKGROUND_COMPACTIONS,
BACKGROUND_THREAD_COUNT,
CACHE_CAPACITY),
Arrays.asList(KeyValueSegmentIdentifier.values())))
.withCommonConfiguration(new PantheonConfigurationImpl(dbAhead))
.withMetricsSystem(new NoOpMetricsSystem())
.build();
}
}

@ -42,19 +42,22 @@ import tech.pegasys.pantheon.ethereum.mainnet.ProtocolSchedule;
import tech.pegasys.pantheon.ethereum.mainnet.ProtocolSpec;
import tech.pegasys.pantheon.ethereum.p2p.peers.EnodeURL;
import tech.pegasys.pantheon.ethereum.storage.StorageProvider;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.RocksDbStorageProvider;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueSegmentIdentifier;
import tech.pegasys.pantheon.ethereum.storage.keyvalue.KeyValueStorageProviderBuilder;
import tech.pegasys.pantheon.metrics.ObservableMetricsSystem;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.metrics.prometheus.MetricsConfiguration;
import tech.pegasys.pantheon.services.kvstore.RocksDbConfiguration;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.RocksDBKeyValueStorageFactory;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBFactoryConfiguration;
import tech.pegasys.pantheon.services.PantheonConfigurationImpl;
import tech.pegasys.pantheon.testutil.TestClock;
import tech.pegasys.pantheon.util.uint.UInt256;
import java.io.IOException;
import java.math.BigInteger;
import java.net.InetAddress;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@ -80,6 +83,11 @@ import org.junit.rules.TemporaryFolder;
/** Tests for {@link Runner}. */
public final class RunnerTest {
private static final int MAX_OPEN_FILES = 1024;
private static final long CACHE_CAPACITY = 8388608;
private static final int MAX_BACKGROUND_COMPACTIONS = 4;
private static final int BACKGROUND_THREAD_COUNT = 4;
@Rule public final TemporaryFolder temp = new TemporaryFolder();
@Test
@ -329,9 +337,20 @@ public final class RunnerTest {
}
}
private StorageProvider createKeyValueStorageProvider(final Path dbAhead) throws IOException {
return RocksDbStorageProvider.create(
RocksDbConfiguration.builder().databaseDir(dbAhead).build(), new NoOpMetricsSystem());
private StorageProvider createKeyValueStorageProvider(final Path dbAhead) {
return new KeyValueStorageProviderBuilder()
.withStorageFactory(
new RocksDBKeyValueStorageFactory(
() ->
new RocksDBFactoryConfiguration(
MAX_OPEN_FILES,
MAX_BACKGROUND_COMPACTIONS,
BACKGROUND_THREAD_COUNT,
CACHE_CAPACITY),
Arrays.asList(KeyValueSegmentIdentifier.values())))
.withCommonConfiguration(new PantheonConfigurationImpl(dbAhead))
.withMetricsSystem(new NoOpMetricsSystem())
.build();
}
private JsonRpcConfiguration jsonRpcConfiguration() {

@ -30,7 +30,6 @@ import tech.pegasys.pantheon.cli.config.EthNetworkConfig;
import tech.pegasys.pantheon.cli.options.EthProtocolOptions;
import tech.pegasys.pantheon.cli.options.MetricsCLIOptions;
import tech.pegasys.pantheon.cli.options.NetworkingOptions;
import tech.pegasys.pantheon.cli.options.RocksDBOptions;
import tech.pegasys.pantheon.cli.options.SynchronizerOptions;
import tech.pegasys.pantheon.cli.options.TransactionPoolOptions;
import tech.pegasys.pantheon.cli.subcommands.PublicKeySubCommand.KeyLoader;
@ -49,8 +48,12 @@ import tech.pegasys.pantheon.ethereum.jsonrpc.JsonRpcConfiguration;
import tech.pegasys.pantheon.ethereum.jsonrpc.websocket.WebSocketConfiguration;
import tech.pegasys.pantheon.ethereum.mainnet.ProtocolSchedule;
import tech.pegasys.pantheon.ethereum.permissioning.PermissioningConfiguration;
import tech.pegasys.pantheon.ethereum.storage.StorageProvider;
import tech.pegasys.pantheon.metrics.prometheus.MetricsConfiguration;
import tech.pegasys.pantheon.plugin.services.PantheonConfiguration;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageFactory;
import tech.pegasys.pantheon.services.PantheonPluginContextImpl;
import tech.pegasys.pantheon.services.StorageServiceImpl;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import java.io.ByteArrayOutputStream;
@ -105,6 +108,10 @@ public abstract class CommandTestAbstract {
@Mock protected RlpBlockExporter rlpBlockExporter;
@Mock protected JsonBlockImporter<?> jsonBlockImporter;
@Mock protected RlpBlockImporter rlpBlockImporter;
@Mock protected StorageServiceImpl storageService;
@Mock protected PantheonConfiguration commonPluginConfiguration;
@Mock protected KeyValueStorageFactory rocksDBStorageFactory;
@Mock protected KeyValueStorageFactory rocksDBSPrivacyStorageFactory;
@Mock protected Logger mockLogger;
@Mock protected PantheonPluginContextImpl mockPantheonPluginContext;
@ -121,6 +128,7 @@ public abstract class CommandTestAbstract {
@Captor protected ArgumentCaptor<GraphQLConfiguration> graphQLConfigArgumentCaptor;
@Captor protected ArgumentCaptor<WebSocketConfiguration> wsRpcConfigArgumentCaptor;
@Captor protected ArgumentCaptor<MetricsConfiguration> metricsConfigArgumentCaptor;
@Captor protected ArgumentCaptor<StorageProvider> storageProviderArgumentCaptor;
@Captor
protected ArgumentCaptor<PermissioningConfiguration> permissioningConfigurationArgumentCaptor;
@ -139,12 +147,10 @@ public abstract class CommandTestAbstract {
public void initMocks() throws Exception {
// doReturn used because of generic PantheonController
doReturn(mockControllerBuilder)
.when(mockControllerBuilderFactory)
.fromEthNetworkConfig(any(), any());
doReturn(mockControllerBuilder).when(mockControllerBuilderFactory).fromEthNetworkConfig(any());
when(mockControllerBuilder.synchronizerConfiguration(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.ethProtocolConfiguration(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.rocksDbConfiguration(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.transactionPoolConfiguration(any()))
.thenReturn(mockControllerBuilder);
when(mockControllerBuilder.dataDirectory(any())).thenReturn(mockControllerBuilder);
@ -154,6 +160,7 @@ public abstract class CommandTestAbstract {
when(mockControllerBuilder.privacyParameters(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.clock(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.isRevertReasonEnabled(false)).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.storageProvider(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.isPruningEnabled(anyBoolean())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.pruningConfiguration(any())).thenReturn(mockControllerBuilder);
when(mockControllerBuilder.genesisConfigOverrides(any())).thenReturn(mockControllerBuilder);
@ -189,6 +196,8 @@ public abstract class CommandTestAbstract {
when(mockRunnerBuilder.metricsConfiguration(any())).thenReturn(mockRunnerBuilder);
when(mockRunnerBuilder.staticNodes(any())).thenReturn(mockRunnerBuilder);
when(mockRunnerBuilder.build()).thenReturn(mockRunner);
when(storageService.getByName("rocksdb")).thenReturn(rocksDBStorageFactory);
}
// Display outputs for debug purpose
@ -241,7 +250,10 @@ public abstract class CommandTestAbstract {
mockControllerBuilderFactory,
keyLoader,
mockPantheonPluginContext,
environment);
environment,
storageService);
pantheonCommand.setPantheonConfiguration(commonPluginConfiguration);
// parse using Ansi.OFF to be able to assert on non formatted output results
pantheonCommand.parse(
@ -254,6 +266,7 @@ public abstract class CommandTestAbstract {
@CommandLine.Command
public static class TestPantheonCommand extends PantheonCommand {
@CommandLine.Spec CommandLine.Model.CommandSpec spec;
private final KeyLoader keyLoader;
@ -271,7 +284,8 @@ public abstract class CommandTestAbstract {
final PantheonController.Builder controllerBuilderFactory,
final KeyLoader keyLoader,
final PantheonPluginContextImpl pantheonPluginContext,
final Map<String, String> environment) {
final Map<String, String> environment,
final StorageServiceImpl storageService) {
super(
mockLogger,
mockBlockImporter,
@ -280,7 +294,8 @@ public abstract class CommandTestAbstract {
mockRunnerBuilder,
controllerBuilderFactory,
pantheonPluginContext,
environment);
environment,
storageService);
this.keyLoader = keyLoader;
}
@ -288,10 +303,6 @@ public abstract class CommandTestAbstract {
return spec;
}
public RocksDBOptions getRocksDBOptions() {
return rocksDBOptions;
}
public NetworkingOptions getNetworkingOptions() {
return networkingOptions;
}

@ -17,12 +17,12 @@ import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isNotNull;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
import static tech.pegasys.pantheon.cli.config.NetworkName.DEV;
import static tech.pegasys.pantheon.cli.config.NetworkName.GOERLI;
import static tech.pegasys.pantheon.cli.config.NetworkName.MAINNET;
@ -71,7 +71,6 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@ -90,16 +89,15 @@ import picocli.CommandLine;
public class PantheonCommandTest extends CommandTestAbstract {
private final String ENCLAVE_URI = "http://1.2.3.4:5555";
private final String ENCLAVE_PUBLIC_KEY = "A1aVtMxLCUHmBVHXoZzzBgPbW/wj5axDpW9X8l91SGo=";
private final String VALID_NODE_ID =
private static final String ENCLAVE_URI = "http://1.2.3.4:5555";
private static final String ENCLAVE_PUBLIC_KEY = "A1aVtMxLCUHmBVHXoZzzBgPbW/wj5axDpW9X8l91SGo=";
private static final String VALID_NODE_ID =
"6f8a80d14311c39f35f516fa664deaaaa13e85b2f7493f37f6144d86991ec012937307647bd3b9a82abe2974e1407241d54947bbb39763a4cac9f77166ad92a0";
static final String PERMISSIONING_CONFIG_TOML = "/permissioning_config.toml";
private static final JsonRpcConfiguration defaultJsonRpcConfiguration;
private static final String PERMISSIONING_CONFIG_TOML = "/permissioning_config.toml";
private static final JsonRpcConfiguration DEFAULT_JSON_RPC_CONFIGURATION;
private static final GraphQLConfiguration DEFAULT_GRAPH_QL_CONFIGURATION;
private static final WebSocketConfiguration defaultWebSocketConfiguration;
private static final MetricsConfiguration defaultMetricsConfiguration;
private static final WebSocketConfiguration DEFAULT_WEB_SOCKET_CONFIGURATION;
private static final MetricsConfiguration DEFAULT_METRICS_CONFIGURATION;
private static final int GENESIS_CONFIG_TEST_CHAINID = 3141592;
private static final JsonObject GENESIS_VALID_JSON =
(new JsonObject())
@ -114,13 +112,10 @@ public class PantheonCommandTest extends CommandTestAbstract {
};
static {
defaultJsonRpcConfiguration = JsonRpcConfiguration.createDefault();
DEFAULT_JSON_RPC_CONFIGURATION = JsonRpcConfiguration.createDefault();
DEFAULT_GRAPH_QL_CONFIGURATION = GraphQLConfiguration.createDefault();
defaultWebSocketConfiguration = WebSocketConfiguration.createDefault();
defaultMetricsConfiguration = MetricsConfiguration.builder().build();
DEFAULT_WEB_SOCKET_CONFIGURATION = WebSocketConfiguration.createDefault();
DEFAULT_METRICS_CONFIGURATION = MetricsConfiguration.builder().build();
}
@Test
@ -163,22 +158,24 @@ public class PantheonCommandTest extends CommandTestAbstract {
verify(mockRunnerBuilder).p2pListenPort(eq(30303));
verify(mockRunnerBuilder).maxPeers(eq(25));
verify(mockRunnerBuilder).fractionRemoteConnectionsAllowed(eq(0.6f));
verify(mockRunnerBuilder).jsonRpcConfiguration(eq(defaultJsonRpcConfiguration));
verify(mockRunnerBuilder).jsonRpcConfiguration(eq(DEFAULT_JSON_RPC_CONFIGURATION));
verify(mockRunnerBuilder).graphQLConfiguration(eq(DEFAULT_GRAPH_QL_CONFIGURATION));
verify(mockRunnerBuilder).webSocketConfiguration(eq(defaultWebSocketConfiguration));
verify(mockRunnerBuilder).metricsConfiguration(eq(defaultMetricsConfiguration));
verify(mockRunnerBuilder).webSocketConfiguration(eq(DEFAULT_WEB_SOCKET_CONFIGURATION));
verify(mockRunnerBuilder).metricsConfiguration(eq(DEFAULT_METRICS_CONFIGURATION));
verify(mockRunnerBuilder).ethNetworkConfig(ethNetworkArg.capture());
verify(mockRunnerBuilder).build();
verify(mockControllerBuilderFactory).fromEthNetworkConfig(ethNetworkArg.capture(), any());
verify(mockControllerBuilderFactory).fromEthNetworkConfig(ethNetworkArg.capture());
final ArgumentCaptor<MiningParameters> miningArg =
ArgumentCaptor.forClass(MiningParameters.class);
verify(mockControllerBuilder).synchronizerConfiguration(syncConfigurationCaptor.capture());
verify(mockControllerBuilder).dataDirectory(isNotNull());
verify(mockControllerBuilder).miningParameters(miningArg.capture());
verify(mockControllerBuilder).nodePrivateKeyFile(isNotNull());
verify(mockControllerBuilder).storageProvider(storageProviderArgumentCaptor.capture());
verify(mockControllerBuilder).build();
assertThat(storageProviderArgumentCaptor.getValue()).isNotNull();
assertThat(syncConfigurationCaptor.getValue().getSyncMode()).isEqualTo(SyncMode.FULL);
assertThat(commandErrorOutput.toString()).isEmpty();
assertThat(miningArg.getValue().getCoinbase()).isEqualTo(Optional.empty());
@ -335,7 +332,7 @@ public class PantheonCommandTest extends CommandTestAbstract {
.setBootNodes(nodes)
.build();
verify(mockControllerBuilder).dataDirectory(eq(Paths.get("/opt/pantheon").toAbsolutePath()));
verify(mockControllerBuilderFactory).fromEthNetworkConfig(eq(networkConfig), any());
verify(mockControllerBuilderFactory).fromEthNetworkConfig(eq(networkConfig));
verify(mockControllerBuilder).synchronizerConfiguration(syncConfigurationCaptor.capture());
assertThat(syncConfigurationCaptor.getValue().getSyncMode()).isEqualTo(SyncMode.FAST);
@ -893,7 +890,7 @@ public class PantheonCommandTest extends CommandTestAbstract {
parseCommand("--genesis-file", genesisFile.toString());
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture(), any());
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture());
verify(mockControllerBuilder).build();
assertThat(networkArg.getValue().getGenesisConfig())
@ -929,7 +926,7 @@ public class PantheonCommandTest extends CommandTestAbstract {
final ArgumentCaptor<EthNetworkConfig> networkArg =
ArgumentCaptor.forClass(EthNetworkConfig.class);
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture(), any());
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture());
verify(mockControllerBuilder).build();
assertThat(networkArg.getValue().getGenesisConfig())
@ -952,7 +949,7 @@ public class PantheonCommandTest extends CommandTestAbstract {
final ArgumentCaptor<EthNetworkConfig> networkArg =
ArgumentCaptor.forClass(EthNetworkConfig.class);
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture(), any());
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture());
verify(mockControllerBuilder).build();
assertThat(networkArg.getValue().getGenesisConfig())
@ -965,22 +962,6 @@ public class PantheonCommandTest extends CommandTestAbstract {
assertThat(commandErrorOutput.toString()).isEmpty();
}
@Test
@SuppressWarnings("unchecked")
public void overrideGenesisConfigFileChange() throws Exception {
final ArgumentCaptor<Map<String, String>> overrides = ArgumentCaptor.forClass(Map.class);
parseCommand("--network=dev", "--override-genesis-config=chainId=8675309");
verify(mockControllerBuilderFactory).fromEthNetworkConfig(any(), overrides.capture());
verify(mockControllerBuilder).build();
assertThat(overrides.getValue()).containsOnlyKeys("chainId");
assertThat(overrides.getValue()).containsEntry("chainId", "8675309");
assertThat(commandOutput.toString()).isEmpty();
assertThat(commandErrorOutput.toString()).isEmpty();
}
@Test
public void predefinedNetworkIdsMustBeEqualToChainIds() {
// check the network id against the one in mainnet genesis config
@ -2362,7 +2343,7 @@ public class PantheonCommandTest extends CommandTestAbstract {
final ArgumentCaptor<EthNetworkConfig> networkArg =
ArgumentCaptor.forClass(EthNetworkConfig.class);
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture(), any());
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture());
verify(mockControllerBuilder).build();
assertThat(networkArg.getValue()).isEqualTo(EthNetworkConfig.getNetworkConfig(DEV));
@ -2378,7 +2359,7 @@ public class PantheonCommandTest extends CommandTestAbstract {
final ArgumentCaptor<EthNetworkConfig> networkArg =
ArgumentCaptor.forClass(EthNetworkConfig.class);
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture(), any());
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture());
verify(mockControllerBuilder).build();
assertThat(networkArg.getValue()).isEqualTo(EthNetworkConfig.getNetworkConfig(RINKEBY));
@ -2394,7 +2375,7 @@ public class PantheonCommandTest extends CommandTestAbstract {
final ArgumentCaptor<EthNetworkConfig> networkArg =
ArgumentCaptor.forClass(EthNetworkConfig.class);
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture(), any());
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture());
verify(mockControllerBuilder).build();
assertThat(networkArg.getValue()).isEqualTo(EthNetworkConfig.getNetworkConfig(ROPSTEN));
@ -2410,7 +2391,7 @@ public class PantheonCommandTest extends CommandTestAbstract {
final ArgumentCaptor<EthNetworkConfig> networkArg =
ArgumentCaptor.forClass(EthNetworkConfig.class);
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture(), any());
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture());
verify(mockControllerBuilder).build();
assertThat(networkArg.getValue()).isEqualTo(EthNetworkConfig.getNetworkConfig(GOERLI));
@ -2451,7 +2432,7 @@ public class PantheonCommandTest extends CommandTestAbstract {
final ArgumentCaptor<EthNetworkConfig> networkArg =
ArgumentCaptor.forClass(EthNetworkConfig.class);
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture(), any());
verify(mockControllerBuilderFactory).fromEthNetworkConfig(networkArg.capture());
verify(mockControllerBuilder).build();
assertThat(networkArg.getValue().getBootNodes())
@ -2490,7 +2471,8 @@ public class PantheonCommandTest extends CommandTestAbstract {
}
@Test
public void mustUseEnclaveUriAndOptions() throws IOException {
public void mustUseEnclaveUriAndOptions() {
when(storageService.getByName("rocksdb-privacy")).thenReturn(rocksDBSPrivacyStorageFactory);
final URL configFile = this.getClass().getResource("/orion_publickey.pub");
parseCommand(
@ -2539,7 +2521,7 @@ public class PantheonCommandTest extends CommandTestAbstract {
}
@Test
public void mustVerifyPrivacyIsDisabled() throws IOException {
public void mustVerifyPrivacyIsDisabled() {
parseCommand();
final ArgumentCaptor<PrivacyParameters> enclaveArg =

@ -1,51 +0,0 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.cli.options;
import tech.pegasys.pantheon.services.kvstore.RocksDbConfiguration;
import java.util.Arrays;
import java.util.List;
public class RocksDBOptionsTest
extends AbstractCLIOptionsTest<RocksDbConfiguration.Builder, RocksDBOptions> {
@Override
RocksDbConfiguration.Builder createDefaultDomainObject() {
return RocksDbConfiguration.builder();
}
@Override
RocksDbConfiguration.Builder createCustomizedDomainObject() {
return RocksDbConfiguration.builder()
.maxOpenFiles(RocksDbConfiguration.DEFAULT_MAX_OPEN_FILES + 1)
.cacheCapacity(RocksDbConfiguration.DEFAULT_CACHE_CAPACITY + 1)
.maxBackgroundCompactions(RocksDbConfiguration.DEFAULT_MAX_BACKGROUND_COMPACTIONS + 1)
.backgroundThreadCount(RocksDbConfiguration.DEFAULT_BACKGROUND_THREAD_COUNT + 1);
}
@Override
RocksDBOptions optionsFromDomainObject(final RocksDbConfiguration.Builder domainObject) {
return RocksDBOptions.fromConfig(domainObject.build());
}
@Override
RocksDBOptions getOptionsFromPantheonCommand(final TestPantheonCommand command) {
return command.getRocksDBOptions();
}
@Override
protected List<String> getFieldsToIgnore() {
return Arrays.asList("databaseDir", "useColumns");
}
}

@ -105,3 +105,6 @@ Xincoming-tx-messages-keep-alive-seconds=60
# Revert Reason
revert-reason-enabled=false
# Storage plugin to use
key-value-storage="rocksdb"

@ -56,7 +56,7 @@ Calculated : ${currentHash}
tasks.register('checkAPIChanges', FileStateChecker) {
description = "Checks that the API for the Plugin-API project does not change without deliberate thought"
files = sourceSets.main.allJava.files
knownHash = 'j39vjVpNEK0kTpk/MLK8BHnqkFoRO9BWajrm9WoejWM='
knownHash = '1VkUKHRqmT1ONtvrqRz0VNCA1uqSopK0RAnNdmCM6vc='
}
check.dependsOn('checkAPIChanges')

@ -13,6 +13,8 @@
package tech.pegasys.pantheon.plugin.services.storage;
import tech.pegasys.pantheon.plugin.Unstable;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.plugin.services.PantheonConfiguration;
import tech.pegasys.pantheon.plugin.services.exception.StorageException;
/** Factory for creating key-value storage instances. */
@ -38,10 +40,14 @@ public interface KeyValueStorageFactory {
*
* @param segment identity of the isolation segment, an identifier for the data set the storage
* will contain.
* @param configuration common configuration available to plugins, in a populated state.
* @param metricsSystem metrics component for recording key-value storage events.
* @return the storage instance reserved for the given segment.
* @exception StorageException problem encountered when creating storage for the segment.
*/
KeyValueStorage create(SegmentIdentifier segment) throws StorageException;
KeyValueStorage create(
SegmentIdentifier segment, PantheonConfiguration configuration, MetricsSystem metricsSystem)
throws StorageException;
/**
* Whether storage segment isolation is supported by the factory created instances.

@ -0,0 +1,14 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
jar { enabled = false }

@ -1,5 +1,5 @@
/*
* Copyright 2018 ConsenSys AG.
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
@ -14,7 +14,7 @@
apply plugin: 'java-library'
jar {
baseName 'pantheon-services-util'
baseName 'pantheon-plugin-rocksdb'
manifest {
attributes(
'Specification-Title': baseName,
@ -25,22 +25,28 @@ jar {
}
}
publishing {
publications {
mavenJava(MavenPublication) { artifactId 'services-util' }
}
}
dependencies {
api project(':util')
api project(':plugin-api')
implementation project(':metrics:core')
implementation project(':metrics:rocksdb')
implementation project(':services:kvstore')
implementation 'org.apache.logging.log4j:log4j-api'
implementation 'com.fasterxml.jackson.core:jackson-databind'
implementation 'com.google.auto.service:auto-service'
implementation 'com.google.guava:guava'
implementation 'info.picocli:picocli'
implementation 'io.prometheus:simpleclient'
implementation 'org.apache.logging.log4j:log4j-api'
implementation 'org.rocksdb:rocksdbjni'
annotationProcessor 'com.google.auto.service:auto-service'
runtime 'org.apache.logging.log4j:log4j-core'
testImplementation project(':testutil')
testImplementation 'junit:junit'
testImplementation 'org.assertj:assertj-core'
testImplementation 'org.mockito:mockito-core'
}

@ -0,0 +1,44 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.plugin.services.storage.rocksdb;
import tech.pegasys.pantheon.plugin.services.PantheonConfiguration;
import tech.pegasys.pantheon.plugin.services.storage.SegmentIdentifier;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBFactoryConfiguration;
import java.nio.file.Path;
import java.util.List;
import com.google.common.base.Supplier;
@Deprecated
public class RocksDBKeyValuePrivacyStorageFactory extends RocksDBKeyValueStorageFactory {
private static final String PRIVATE_DATABASE_PATH = "private";
public RocksDBKeyValuePrivacyStorageFactory(
final Supplier<RocksDBFactoryConfiguration> configuration,
final List<SegmentIdentifier> segments) {
super(configuration, segments);
}
@Override
public String getName() {
return "rocksdb-privacy";
}
@Override
protected Path storagePath(final PantheonConfiguration commonConfiguration) {
return super.storagePath(commonConfiguration).resolve(PRIVATE_DATABASE_PATH);
}
}

@ -0,0 +1,151 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.plugin.services.storage.rocksdb;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.plugin.services.PantheonConfiguration;
import tech.pegasys.pantheon.plugin.services.exception.StorageException;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageFactory;
import tech.pegasys.pantheon.plugin.services.storage.SegmentIdentifier;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.DatabaseMetadata;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBConfiguration;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBConfigurationBuilder;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBFactoryConfiguration;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.segmented.RocksDBColumnarKeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.unsegmented.RocksDBKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorageAdapter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.Set;
import com.google.common.base.Supplier;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class RocksDBKeyValueStorageFactory implements KeyValueStorageFactory {
private static final Logger LOG = LogManager.getLogger();
private static final int DEFAULT_VERSION = 1;
private static final Set<Integer> SUPPORTED_VERSION = Set.of(0, 1);
private static final String NAME = "rocksdb";
private boolean isSegmentIsolationSupported;
private SegmentedKeyValueStorage<?> segmentedStorage;
private KeyValueStorage unsegmentedStorage;
private final Supplier<RocksDBFactoryConfiguration> configuration;
private final List<SegmentIdentifier> segments;
public RocksDBKeyValueStorageFactory(
final Supplier<RocksDBFactoryConfiguration> configuration,
final List<SegmentIdentifier> segments) {
this.configuration = configuration;
this.segments = segments;
}
@Override
public String getName() {
return NAME;
}
@Override
public KeyValueStorage create(
final SegmentIdentifier segment,
final PantheonConfiguration commonConfiguration,
final MetricsSystem metricsSystem)
throws StorageException {
if (requiresInit()) {
init(commonConfiguration, metricsSystem);
}
return isSegmentIsolationSupported
? new SegmentedKeyValueStorageAdapter<>(segment, segmentedStorage)
: unsegmentedStorage;
}
@Override
public boolean isSegmentIsolationSupported() {
return isSegmentIsolationSupported;
}
public void close() throws IOException {
if (segmentedStorage != null) {
segmentedStorage.close();
}
if (unsegmentedStorage != null) {
unsegmentedStorage.close();
}
}
protected Path storagePath(final PantheonConfiguration commonConfiguration) {
return commonConfiguration.getStoragePath();
}
private boolean requiresInit() {
return segmentedStorage == null && unsegmentedStorage == null;
}
private void init(
final PantheonConfiguration commonConfiguration, final MetricsSystem metricsSystem) {
try {
this.isSegmentIsolationSupported = databaseVersion(commonConfiguration) == DEFAULT_VERSION;
} catch (final IOException e) {
LOG.error("Failed to retrieve the RocksDB database meta version: {}", e.getMessage());
throw new StorageException(e.getMessage(), e);
}
final RocksDBConfiguration rocksDBConfiguration =
RocksDBConfigurationBuilder.from(configuration.get())
.databaseDir(storagePath(commonConfiguration))
.build();
if (isSegmentIsolationSupported) {
this.unsegmentedStorage = null;
this.segmentedStorage =
new RocksDBColumnarKeyValueStorage(rocksDBConfiguration, segments, metricsSystem);
} else {
this.unsegmentedStorage = new RocksDBKeyValueStorage(rocksDBConfiguration, metricsSystem);
this.segmentedStorage = null;
}
}
private int databaseVersion(final PantheonConfiguration commonConfiguration) throws IOException {
final Path databaseDir = storagePath(commonConfiguration);
final boolean databaseExists = databaseDir.resolve("IDENTITY").toFile().exists();
final int databaseVersion;
if (databaseExists) {
databaseVersion = DatabaseMetadata.fromDirectory(databaseDir).getVersion();
LOG.info("Existing database detected at {}. Version {}", databaseDir, databaseVersion);
} else {
databaseVersion = DEFAULT_VERSION;
LOG.info(
"No existing database detected at {}. Using version {}", databaseDir, databaseVersion);
Files.createDirectories(databaseDir);
new DatabaseMetadata(databaseVersion).writeToDirectory(databaseDir);
}
if (!SUPPORTED_VERSION.contains(databaseVersion)) {
final String message = "Unsupported RocksDB Metadata version of: " + databaseVersion;
LOG.error(message);
throw new StorageException(message);
}
return databaseVersion;
}
}

@ -10,7 +10,7 @@
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.services.kvstore;
package tech.pegasys.pantheon.plugin.services.storage.rocksdb;
import tech.pegasys.pantheon.metrics.PantheonMetricCategory;
import tech.pegasys.pantheon.metrics.prometheus.PrometheusMetricsSystem;
@ -18,6 +18,7 @@ import tech.pegasys.pantheon.metrics.rocksdb.RocksDBStats;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.plugin.services.metrics.Counter;
import tech.pegasys.pantheon.plugin.services.metrics.OperationTimer;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBConfiguration;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@ -25,7 +26,8 @@ import org.rocksdb.RocksDBException;
import org.rocksdb.Statistics;
import org.rocksdb.TransactionDB;
public class RocksDBMetricsHelper {
public class RocksDBMetrics {
private static final Logger LOG = LogManager.getLogger();
private final OperationTimer readLatency;
@ -34,7 +36,7 @@ public class RocksDBMetricsHelper {
private final OperationTimer commitLatency;
private final Counter rollbackCount;
private RocksDBMetricsHelper(
private RocksDBMetrics(
final OperationTimer readLatency,
final OperationTimer removeLatency,
final OperationTimer writeLatency,
@ -47,9 +49,9 @@ public class RocksDBMetricsHelper {
this.rollbackCount = rollbackCount;
}
public static RocksDBMetricsHelper of(
public static RocksDBMetrics of(
final MetricsSystem metricsSystem,
final RocksDbConfiguration rocksDbConfiguration,
final RocksDBConfiguration rocksDbConfiguration,
final TransactionDB db,
final Statistics stats) {
final OperationTimer readLatency =
@ -124,7 +126,7 @@ public class RocksDBMetricsHelper {
"database")
.labels(rocksDbConfiguration.getLabel());
return new RocksDBMetricsHelper(
return new RocksDBMetrics(
readLatency, removeLatency, writeLatency, commitLatency, rollbackCount);
}

@ -0,0 +1,117 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.plugin.services.storage.rocksdb;
import tech.pegasys.pantheon.plugin.PantheonContext;
import tech.pegasys.pantheon.plugin.PantheonPlugin;
import tech.pegasys.pantheon.plugin.services.PicoCLIOptions;
import tech.pegasys.pantheon.plugin.services.StorageService;
import tech.pegasys.pantheon.plugin.services.storage.SegmentIdentifier;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBFactoryConfiguration;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
import com.google.auto.service.AutoService;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@AutoService(PantheonPlugin.class)
public class RocksDBPlugin implements PantheonPlugin {
private static final Logger LOG = LogManager.getLogger();
private static final String NAME = "rocksdb";
private final RocksDBCLIOptions options;
private PantheonContext context;
private RocksDBKeyValueStorageFactory factory;
private RocksDBKeyValuePrivacyStorageFactory privacyFactory;
public RocksDBPlugin() {
this.options = RocksDBCLIOptions.create();
}
@Override
public void register(final PantheonContext context) {
LOG.info("Registering plugin");
this.context = context;
final Optional<PicoCLIOptions> cmdlineOptions = context.getService(PicoCLIOptions.class);
if (cmdlineOptions.isEmpty()) {
throw new IllegalStateException(
"Expecting a PicoCLIO options to register CLI options with, but none found.");
}
cmdlineOptions.get().addPicoCLIOptions(NAME, options);
createFactoriesAndRegisterWithStorageService();
LOG.info("Plugin registered.");
}
@Override
public void start() {
LOG.info("Starting plugin.");
if (factory == null) {
LOG.debug("Applied configuration: {}", options.toString());
createFactoriesAndRegisterWithStorageService();
}
}
@Override
public void stop() {
LOG.info("Stopping plugin.");
try {
if (factory != null) {
factory.close();
factory = null;
}
} catch (final IOException e) {
LOG.error("Failed to stop plugin: {}", e.getMessage(), e);
}
try {
if (privacyFactory != null) {
privacyFactory.close();
privacyFactory = null;
}
} catch (final IOException e) {
LOG.error("Failed to stop plugin: {}", e.getMessage(), e);
}
}
private void createAndRegister(final StorageService service) {
final List<SegmentIdentifier> segments = service.getAllSegmentIdentifiers();
final Supplier<RocksDBFactoryConfiguration> configuration =
Suppliers.memoize(options::toDomainObject);
factory = new RocksDBKeyValueStorageFactory(configuration, segments);
privacyFactory = new RocksDBKeyValuePrivacyStorageFactory(configuration, segments);
service.registerKeyValueStorage(factory);
service.registerKeyValueStorage(privacyFactory);
}
private void createFactoriesAndRegisterWithStorageService() {
context
.getService(StorageService.class)
.ifPresentOrElse(
this::createAndRegister,
() -> LOG.error("Failed to register KeyValueFactory due to missing StorageService."));
}
}

@ -10,7 +10,7 @@
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.services.util;
package tech.pegasys.pantheon.plugin.services.storage.rocksdb;
import tech.pegasys.pantheon.util.InvalidConfigurationException;

@ -10,7 +10,7 @@
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.ethereum.storage.keyvalue;
package tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration;
import java.io.File;
import java.io.FileNotFoundException;
@ -23,12 +23,12 @@ import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
public class DatabaseMetadata {
static final String METADATA_FILENAME = "DATABASE_METADATA.json";
private static final String METADATA_FILENAME = "DATABASE_METADATA.json";
private static ObjectMapper MAPPER = new ObjectMapper();
private final int version;
@JsonCreator
DatabaseMetadata(@JsonProperty("version") final int version) {
public DatabaseMetadata(@JsonProperty("version") final int version) {
this.version = version;
}
@ -36,7 +36,7 @@ public class DatabaseMetadata {
return version;
}
static DatabaseMetadata fromDirectory(final Path databaseDir) throws IOException {
public static DatabaseMetadata fromDirectory(final Path databaseDir) throws IOException {
final File metadataFile = getDefaultMetadataFile(databaseDir);
try {
return MAPPER.readValue(metadataFile, DatabaseMetadata.class);
@ -48,7 +48,7 @@ public class DatabaseMetadata {
}
}
void writeToDirectory(final Path databaseDir) throws IOException {
public void writeToDirectory(final Path databaseDir) throws IOException {
MAPPER.writeValue(getDefaultMetadataFile(databaseDir), this);
}

@ -10,16 +10,18 @@
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.cli.options;
package tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration;
import tech.pegasys.pantheon.services.kvstore.RocksDbConfiguration;
import com.google.common.base.MoreObjects;
import picocli.CommandLine;
import java.util.Arrays;
import java.util.List;
public class RocksDBCLIOptions {
import picocli.CommandLine;
public static final int DEFAULT_MAX_OPEN_FILES = 1024;
public static final long DEFAULT_CACHE_CAPACITY = 8388608;
public static final int DEFAULT_MAX_BACKGROUND_COMPACTIONS = 4;
public static final int DEFAULT_BACKGROUND_THREAD_COUNT = 4;
public class RocksDBOptions implements CLIOptions<RocksDbConfiguration.Builder> {
private static final String MAX_OPEN_FILES_FLAG = "--Xrocksdb-max-open-files";
private static final String CACHE_CAPACITY_FLAG = "--Xrocksdb-cache-capacity";
private static final String MAX_BACKGROUND_COMPACTIONS_FLAG =
@ -58,14 +60,14 @@ public class RocksDBOptions implements CLIOptions<RocksDbConfiguration.Builder>
description = "Number of RocksDB background threads (default: ${DEFAULT-VALUE})")
int backgroundThreadCount;
private RocksDBOptions() {}
private RocksDBCLIOptions() {}
public static RocksDBOptions create() {
return new RocksDBOptions();
public static RocksDBCLIOptions create() {
return new RocksDBCLIOptions();
}
public static RocksDBOptions fromConfig(final RocksDbConfiguration config) {
final RocksDBOptions options = create();
public static RocksDBCLIOptions fromConfig(final RocksDBConfiguration config) {
final RocksDBCLIOptions options = create();
options.maxOpenFiles = config.getMaxOpenFiles();
options.cacheCapacity = config.getCacheCapacity();
options.maxBackgroundCompactions = config.getMaxBackgroundCompactions();
@ -73,25 +75,18 @@ public class RocksDBOptions implements CLIOptions<RocksDbConfiguration.Builder>
return options;
}
@Override
public RocksDbConfiguration.Builder toDomainObject() {
return RocksDbConfiguration.builder()
.maxOpenFiles(maxOpenFiles)
.cacheCapacity(cacheCapacity)
.maxBackgroundCompactions(maxBackgroundCompactions)
.backgroundThreadCount(backgroundThreadCount);
public RocksDBFactoryConfiguration toDomainObject() {
return new RocksDBFactoryConfiguration(
maxOpenFiles, maxBackgroundCompactions, backgroundThreadCount, cacheCapacity);
}
@Override
public List<String> getCLIOptions() {
return Arrays.asList(
MAX_OPEN_FILES_FLAG,
OptionParser.format(maxOpenFiles),
CACHE_CAPACITY_FLAG,
OptionParser.format(cacheCapacity),
MAX_BACKGROUND_COMPACTIONS_FLAG,
OptionParser.format(maxBackgroundCompactions),
BACKGROUND_THREAD_COUNT_FLAG,
OptionParser.format(backgroundThreadCount));
public String toString() {
return MoreObjects.toStringHelper(this)
.add("maxOpenFiles", maxOpenFiles)
.add("cacheCapacity", cacheCapacity)
.add("maxBackgroundCompactions", maxBackgroundCompactions)
.add("backgroundThreadCount", backgroundThreadCount)
.toString();
}
}

@ -0,0 +1,64 @@
/*
* Copyright 2018 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration;
import java.nio.file.Path;
public class RocksDBConfiguration {
private final Path databaseDir;
private final int maxOpenFiles;
private final String label;
private final int maxBackgroundCompactions;
private final int backgroundThreadCount;
private final long cacheCapacity;
public RocksDBConfiguration(
final Path databaseDir,
final int maxOpenFiles,
final int maxBackgroundCompactions,
final int backgroundThreadCount,
final long cacheCapacity,
final String label) {
this.maxBackgroundCompactions = maxBackgroundCompactions;
this.backgroundThreadCount = backgroundThreadCount;
this.databaseDir = databaseDir;
this.maxOpenFiles = maxOpenFiles;
this.cacheCapacity = cacheCapacity;
this.label = label;
}
public Path getDatabaseDir() {
return databaseDir;
}
public int getMaxOpenFiles() {
return maxOpenFiles;
}
public int getMaxBackgroundCompactions() {
return maxBackgroundCompactions;
}
public int getBackgroundThreadCount() {
return backgroundThreadCount;
}
public long getCacheCapacity() {
return cacheCapacity;
}
public String getLabel() {
return label;
}
}

@ -0,0 +1,78 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration;
import static tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions.DEFAULT_BACKGROUND_THREAD_COUNT;
import static tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions.DEFAULT_CACHE_CAPACITY;
import static tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions.DEFAULT_MAX_BACKGROUND_COMPACTIONS;
import static tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions.DEFAULT_MAX_OPEN_FILES;
import java.nio.file.Path;
public class RocksDBConfigurationBuilder {
private Path databaseDir;
private String label = "blockchain";
private int maxOpenFiles = DEFAULT_MAX_OPEN_FILES;
private long cacheCapacity = DEFAULT_CACHE_CAPACITY;
private int maxBackgroundCompactions = DEFAULT_MAX_BACKGROUND_COMPACTIONS;
private int backgroundThreadCount = DEFAULT_BACKGROUND_THREAD_COUNT;
public RocksDBConfigurationBuilder databaseDir(final Path databaseDir) {
this.databaseDir = databaseDir;
return this;
}
public RocksDBConfigurationBuilder maxOpenFiles(final int maxOpenFiles) {
this.maxOpenFiles = maxOpenFiles;
return this;
}
public RocksDBConfigurationBuilder label(final String label) {
this.label = label;
return this;
}
public RocksDBConfigurationBuilder cacheCapacity(final long cacheCapacity) {
this.cacheCapacity = cacheCapacity;
return this;
}
public RocksDBConfigurationBuilder maxBackgroundCompactions(final int maxBackgroundCompactions) {
this.maxBackgroundCompactions = maxBackgroundCompactions;
return this;
}
public RocksDBConfigurationBuilder backgroundThreadCount(final int backgroundThreadCount) {
this.backgroundThreadCount = backgroundThreadCount;
return this;
}
public static RocksDBConfigurationBuilder from(final RocksDBFactoryConfiguration configuration) {
return new RocksDBConfigurationBuilder()
.backgroundThreadCount(configuration.getBackgroundThreadCount())
.cacheCapacity(configuration.getCacheCapacity())
.maxBackgroundCompactions(configuration.getMaxBackgroundCompactions())
.maxOpenFiles(configuration.getMaxOpenFiles());
}
public RocksDBConfiguration build() {
return new RocksDBConfiguration(
databaseDir,
maxOpenFiles,
maxBackgroundCompactions,
backgroundThreadCount,
cacheCapacity,
label);
}
}

@ -0,0 +1,48 @@
/*
* Copyright 2018 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration;
public class RocksDBFactoryConfiguration {
private final int maxOpenFiles;
private final int maxBackgroundCompactions;
private final int backgroundThreadCount;
private final long cacheCapacity;
public RocksDBFactoryConfiguration(
final int maxOpenFiles,
final int maxBackgroundCompactions,
final int backgroundThreadCount,
final long cacheCapacity) {
this.maxBackgroundCompactions = maxBackgroundCompactions;
this.backgroundThreadCount = backgroundThreadCount;
this.maxOpenFiles = maxOpenFiles;
this.cacheCapacity = cacheCapacity;
}
public int getMaxOpenFiles() {
return maxOpenFiles;
}
public int getMaxBackgroundCompactions() {
return maxBackgroundCompactions;
}
public int getBackgroundThreadCount() {
return backgroundThreadCount;
}
public long getCacheCapacity() {
return cacheCapacity;
}
}

@ -10,13 +10,19 @@
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.services.kvstore;
package tech.pegasys.pantheon.plugin.services.storage.rocksdb.segmented;
import static java.util.Objects.requireNonNullElse;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.plugin.services.exception.StorageException;
import tech.pegasys.pantheon.plugin.services.metrics.OperationTimer;
import tech.pegasys.pantheon.services.util.RocksDbUtil;
import tech.pegasys.pantheon.plugin.services.storage.SegmentIdentifier;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.RocksDBMetrics;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.RocksDbUtil;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBConfiguration;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorageTransactionTransitionValidatorDecorator;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import java.io.Closeable;
@ -46,9 +52,13 @@ import org.rocksdb.TransactionDB;
import org.rocksdb.TransactionDBOptions;
import org.rocksdb.WriteOptions;
public class ColumnarRocksDbKeyValueStorage
public class RocksDBColumnarKeyValueStorage
implements SegmentedKeyValueStorage<ColumnFamilyHandle>, Closeable {
static {
RocksDbUtil.loadNativeLibrary();
}
private static final Logger LOG = LogManager.getLogger();
private static final String DEFAULT_COLUMN = "default";
@ -57,43 +67,35 @@ public class ColumnarRocksDbKeyValueStorage
private final TransactionDB db;
private final AtomicBoolean closed = new AtomicBoolean(false);
private final Map<String, ColumnFamilyHandle> columnHandlesByName;
private final RocksDBMetricsHelper rocksDBMetricsHelper;
private final RocksDBMetrics metrics;
public static ColumnarRocksDbKeyValueStorage create(
final RocksDbConfiguration rocksDbConfiguration,
final List<Segment> segments,
public RocksDBColumnarKeyValueStorage(
final RocksDBConfiguration configuration,
final List<SegmentIdentifier> segments,
final MetricsSystem metricsSystem)
throws StorageException {
return new ColumnarRocksDbKeyValueStorage(rocksDbConfiguration, segments, metricsSystem);
}
private ColumnarRocksDbKeyValueStorage(
final RocksDbConfiguration rocksDbConfiguration,
final List<Segment> segments,
final MetricsSystem metricsSystem) {
RocksDbUtil.loadNativeLibrary();
try {
final List<ColumnFamilyDescriptor> columnDescriptors =
segments.stream()
.map(segment -> new ColumnFamilyDescriptor(segment.getId()))
.map(segment -> new ColumnFamilyDescriptor(getId(segment)))
.collect(Collectors.toList());
columnDescriptors.add(
new ColumnFamilyDescriptor(
DEFAULT_COLUMN.getBytes(StandardCharsets.UTF_8),
new ColumnFamilyOptions()
.setTableFormatConfig(createBlockBasedTableConfig(rocksDbConfiguration))));
.setTableFormatConfig(createBlockBasedTableConfig(configuration))));
final Statistics stats = new Statistics();
options =
new DBOptions()
.setCreateIfMissing(true)
.setMaxOpenFiles(rocksDbConfiguration.getMaxOpenFiles())
.setMaxBackgroundCompactions(rocksDbConfiguration.getMaxBackgroundCompactions())
.setMaxOpenFiles(configuration.getMaxOpenFiles())
.setMaxBackgroundCompactions(configuration.getMaxBackgroundCompactions())
.setStatistics(stats)
.setCreateMissingColumnFamilies(true)
.setEnv(
Env.getDefault()
.setBackgroundThreads(rocksDbConfiguration.getBackgroundThreadCount()));
Env.getDefault().setBackgroundThreads(configuration.getBackgroundThreadCount()));
txOptions = new TransactionDBOptions();
final List<ColumnFamilyHandle> columnHandles = new ArrayList<>(columnDescriptors.size());
@ -101,15 +103,15 @@ public class ColumnarRocksDbKeyValueStorage
TransactionDB.open(
options,
txOptions,
rocksDbConfiguration.getDatabaseDir().toString(),
configuration.getDatabaseDir().toString(),
columnDescriptors,
columnHandles);
rocksDBMetricsHelper =
RocksDBMetricsHelper.of(metricsSystem, rocksDbConfiguration, db, stats);
metrics = RocksDBMetrics.of(metricsSystem, configuration, db, stats);
final Map<BytesValue, String> segmentsById =
segments.stream()
.collect(
Collectors.toMap(segment -> BytesValue.wrap(segment.getId()), Segment::getName));
Collectors.toMap(
segment -> BytesValue.wrap(getId(segment)), SegmentIdentifier::getName));
final ImmutableMap.Builder<String, ColumnFamilyHandle> builder = ImmutableMap.builder();
@ -126,24 +128,23 @@ public class ColumnarRocksDbKeyValueStorage
}
}
private BlockBasedTableConfig createBlockBasedTableConfig(final RocksDbConfiguration config) {
private BlockBasedTableConfig createBlockBasedTableConfig(final RocksDBConfiguration config) {
final LRUCache cache = new LRUCache(config.getCacheCapacity());
return new BlockBasedTableConfig().setBlockCache(cache);
}
@Override
public ColumnFamilyHandle getSegmentIdentifierByName(final Segment segment) {
public ColumnFamilyHandle getSegmentIdentifierByName(final SegmentIdentifier segment) {
return columnHandlesByName.get(segment.getName());
}
@Override
public Optional<BytesValue> get(final ColumnFamilyHandle segment, final BytesValue key)
public Optional<byte[]> get(final ColumnFamilyHandle segment, final byte[] key)
throws StorageException {
throwIfClosed();
try (final OperationTimer.TimingContext ignored =
rocksDBMetricsHelper.getReadLatency().startTimer()) {
return Optional.ofNullable(db.get(segment, key.getArrayUnsafe())).map(BytesValue::wrap);
try (final OperationTimer.TimingContext ignored = metrics.getReadLatency().startTimer()) {
return Optional.ofNullable(db.get(segment, key));
} catch (final RocksDBException e) {
throw new StorageException(e);
}
@ -153,18 +154,19 @@ public class ColumnarRocksDbKeyValueStorage
public Transaction<ColumnFamilyHandle> startTransaction() throws StorageException {
throwIfClosed();
final WriteOptions options = new WriteOptions();
return new RocksDbTransaction(db.beginTransaction(options), options);
return new SegmentedKeyValueStorageTransactionTransitionValidatorDecorator<>(
new RocksDbTransaction(db.beginTransaction(options), options));
}
@Override
public long removeUnless(
final ColumnFamilyHandle segmentHandle, final Predicate<BytesValue> inUseCheck) {
final ColumnFamilyHandle segmentHandle, final Predicate<byte[]> inUseCheck) {
long removedNodeCounter = 0;
try (final RocksIterator rocksIterator = db.newIterator(segmentHandle)) {
rocksIterator.seekToFirst();
while (rocksIterator.isValid()) {
final byte[] key = rocksIterator.key();
if (!inUseCheck.test(BytesValue.wrap(key))) {
if (!inUseCheck.test(key)) {
removedNodeCounter++;
db.delete(segmentHandle, key);
}
@ -211,7 +213,12 @@ public class ColumnarRocksDbKeyValueStorage
}
}
private class RocksDbTransaction extends AbstractTransaction<ColumnFamilyHandle> {
private byte[] getId(final SegmentIdentifier name) {
return name.getName().getBytes(StandardCharsets.UTF_8);
}
private class RocksDbTransaction implements Transaction<ColumnFamilyHandle> {
private final org.rocksdb.Transaction innerTx;
private final WriteOptions options;
@ -221,30 +228,26 @@ public class ColumnarRocksDbKeyValueStorage
}
@Override
protected void doPut(
final ColumnFamilyHandle segment, final BytesValue key, final BytesValue value) {
try (final OperationTimer.TimingContext ignored =
rocksDBMetricsHelper.getWriteLatency().startTimer()) {
innerTx.put(segment, key.getArrayUnsafe(), value.getArrayUnsafe());
public void put(final ColumnFamilyHandle segment, final byte[] key, final byte[] value) {
try (final OperationTimer.TimingContext ignored = metrics.getWriteLatency().startTimer()) {
innerTx.put(segment, key, value);
} catch (final RocksDBException e) {
throw new StorageException(e);
}
}
@Override
protected void doRemove(final ColumnFamilyHandle segment, final BytesValue key) {
try (final OperationTimer.TimingContext ignored =
rocksDBMetricsHelper.getRemoveLatency().startTimer()) {
innerTx.delete(segment, key.getArrayUnsafe());
public void remove(final ColumnFamilyHandle segment, final byte[] key) {
try (final OperationTimer.TimingContext ignored = metrics.getRemoveLatency().startTimer()) {
innerTx.delete(segment, key);
} catch (final RocksDBException e) {
throw new StorageException(e);
}
}
@Override
protected void doCommit() throws StorageException {
try (final OperationTimer.TimingContext ignored =
rocksDBMetricsHelper.getCommitLatency().startTimer()) {
public void commit() throws StorageException {
try (final OperationTimer.TimingContext ignored = metrics.getCommitLatency().startTimer()) {
innerTx.commit();
} catch (final RocksDBException e) {
throw new StorageException(e);
@ -254,10 +257,10 @@ public class ColumnarRocksDbKeyValueStorage
}
@Override
protected void doRollback() {
public void rollback() {
try {
innerTx.rollback();
rocksDBMetricsHelper.getRollbackCount().inc();
metrics.getRollbackCount().inc();
} catch (final RocksDBException e) {
throw new StorageException(e);
} finally {

@ -1,5 +1,5 @@
/*
* Copyright 2018 ConsenSys AG.
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
@ -10,15 +10,18 @@
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.services.kvstore;
package tech.pegasys.pantheon.plugin.services.storage.rocksdb.unsegmented;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.plugin.services.exception.StorageException;
import tech.pegasys.pantheon.plugin.services.metrics.OperationTimer;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorage.StorageException;
import tech.pegasys.pantheon.services.util.RocksDbUtil;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.RocksDBMetrics;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.RocksDbUtil;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBConfiguration;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorageTransactionTransitionValidatorDecorator;
import java.io.Closeable;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Predicate;
@ -35,7 +38,11 @@ import org.rocksdb.TransactionDB;
import org.rocksdb.TransactionDBOptions;
import org.rocksdb.WriteOptions;
public class RocksDbKeyValueStorage implements KeyValueStorage, Closeable {
public class RocksDBKeyValueStorage implements KeyValueStorage {
static {
RocksDbUtil.loadNativeLibrary();
}
private static final Logger LOG = LogManager.getLogger();
@ -43,39 +50,32 @@ public class RocksDbKeyValueStorage implements KeyValueStorage, Closeable {
private final TransactionDBOptions txOptions;
private final TransactionDB db;
private final AtomicBoolean closed = new AtomicBoolean(false);
private final RocksDBMetricsHelper rocksDBMetricsHelper;
private final RocksDBMetrics rocksDBMetrics;
public static KeyValueStorage create(
final RocksDbConfiguration rocksDbConfiguration, final MetricsSystem metricsSystem)
throws StorageException {
return new RocksDbKeyValueStorage(rocksDbConfiguration, metricsSystem);
}
public RocksDBKeyValueStorage(
final RocksDBConfiguration configuration, final MetricsSystem metricsSystem) {
private RocksDbKeyValueStorage(
final RocksDbConfiguration rocksDbConfiguration, final MetricsSystem metricsSystem) {
RocksDbUtil.loadNativeLibrary();
try {
final Statistics stats = new Statistics();
options =
new Options()
.setCreateIfMissing(true)
.setMaxOpenFiles(rocksDbConfiguration.getMaxOpenFiles())
.setTableFormatConfig(createBlockBasedTableConfig(rocksDbConfiguration))
.setMaxBackgroundCompactions(rocksDbConfiguration.getMaxBackgroundCompactions())
.setMaxOpenFiles(configuration.getMaxOpenFiles())
.setTableFormatConfig(createBlockBasedTableConfig(configuration))
.setMaxBackgroundCompactions(configuration.getMaxBackgroundCompactions())
.setStatistics(stats);
options.getEnv().setBackgroundThreads(rocksDbConfiguration.getBackgroundThreadCount());
options.getEnv().setBackgroundThreads(configuration.getBackgroundThreadCount());
txOptions = new TransactionDBOptions();
db = TransactionDB.open(options, txOptions, rocksDbConfiguration.getDatabaseDir().toString());
rocksDBMetricsHelper =
RocksDBMetricsHelper.of(metricsSystem, rocksDbConfiguration, db, stats);
db = TransactionDB.open(options, txOptions, configuration.getDatabaseDir().toString());
rocksDBMetrics = RocksDBMetrics.of(metricsSystem, configuration, db, stats);
} catch (final RocksDBException e) {
throw new StorageException(e);
}
}
@Override
public void clear() {
public void clear() throws StorageException {
try (final RocksIterator rocksIterator = db.newIterator()) {
rocksIterator.seekToFirst();
if (rocksIterator.isValid()) {
@ -93,34 +93,30 @@ public class RocksDbKeyValueStorage implements KeyValueStorage, Closeable {
}
@Override
public void close() {
if (closed.compareAndSet(false, true)) {
txOptions.close();
options.close();
db.close();
}
public boolean containsKey(final byte[] key) throws StorageException {
return get(key).isPresent();
}
@Override
public Optional<BytesValue> get(final BytesValue key) throws StorageException {
public Optional<byte[]> get(final byte[] key) throws StorageException {
throwIfClosed();
try (final OperationTimer.TimingContext ignored =
rocksDBMetricsHelper.getReadLatency().startTimer()) {
return Optional.ofNullable(db.get(key.getArrayUnsafe())).map(BytesValue::wrap);
rocksDBMetrics.getReadLatency().startTimer()) {
return Optional.ofNullable(db.get(key));
} catch (final RocksDBException e) {
throw new StorageException(e);
}
}
@Override
public long removeUnless(final Predicate<BytesValue> inUseCheck) throws StorageException {
public long removeAllKeysUnless(final Predicate<byte[]> retainCondition) throws StorageException {
long removedNodeCounter = 0;
try (final RocksIterator rocksIterator = db.newIterator()) {
rocksIterator.seekToFirst();
while (rocksIterator.isValid()) {
final byte[] key = rocksIterator.key();
if (!inUseCheck.test(BytesValue.wrap(key))) {
if (!retainCondition.test(key)) {
removedNodeCounter++;
db.delete(key);
}
@ -133,81 +129,31 @@ public class RocksDbKeyValueStorage implements KeyValueStorage, Closeable {
}
@Override
public Transaction startTransaction() throws StorageException {
public KeyValueStorageTransaction startTransaction() throws StorageException {
throwIfClosed();
final WriteOptions options = new WriteOptions();
return new RocksDbTransaction(db.beginTransaction(options), options);
return new KeyValueStorageTransactionTransitionValidatorDecorator(
new RocksDBTransaction(db.beginTransaction(options), options, rocksDBMetrics));
}
@Override
public void close() {
if (closed.compareAndSet(false, true)) {
txOptions.close();
options.close();
db.close();
}
}
private BlockBasedTableConfig createBlockBasedTableConfig(final RocksDbConfiguration config) {
private BlockBasedTableConfig createBlockBasedTableConfig(final RocksDBConfiguration config) {
final LRUCache cache = new LRUCache(config.getCacheCapacity());
return new BlockBasedTableConfig().setBlockCache(cache);
}
private void throwIfClosed() {
if (closed.get()) {
LOG.error("Attempting to use a closed RocksDbKeyValueStorage");
LOG.error("Attempting to use a closed RocksDBKeyValueStorage");
throw new IllegalStateException("Storage has been closed");
}
}
private class RocksDbTransaction extends AbstractTransaction {
private final org.rocksdb.Transaction innerTx;
private final WriteOptions options;
RocksDbTransaction(final org.rocksdb.Transaction innerTx, final WriteOptions options) {
this.innerTx = innerTx;
this.options = options;
}
@Override
protected void doPut(final BytesValue key, final BytesValue value) {
try (final OperationTimer.TimingContext ignored =
rocksDBMetricsHelper.getWriteLatency().startTimer()) {
innerTx.put(key.getArrayUnsafe(), value.getArrayUnsafe());
} catch (final RocksDBException e) {
throw new StorageException(e);
}
}
@Override
protected void doRemove(final BytesValue key) {
try (final OperationTimer.TimingContext ignored =
rocksDBMetricsHelper.getRemoveLatency().startTimer()) {
innerTx.delete(key.getArrayUnsafe());
} catch (final RocksDBException e) {
throw new StorageException(e);
}
}
@Override
protected void doCommit() throws StorageException {
try (final OperationTimer.TimingContext ignored =
rocksDBMetricsHelper.getCommitLatency().startTimer()) {
innerTx.commit();
} catch (final RocksDBException e) {
throw new StorageException(e);
} finally {
close();
}
}
@Override
protected void doRollback() {
try {
innerTx.rollback();
rocksDBMetricsHelper.getRollbackCount().inc();
} catch (final RocksDBException e) {
throw new StorageException(e);
} finally {
close();
}
}
private void close() {
innerTx.close();
options.close();
}
}
}

@ -0,0 +1,82 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.plugin.services.storage.rocksdb.unsegmented;
import tech.pegasys.pantheon.plugin.services.exception.StorageException;
import tech.pegasys.pantheon.plugin.services.metrics.OperationTimer;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.RocksDBMetrics;
import org.rocksdb.RocksDBException;
import org.rocksdb.Transaction;
import org.rocksdb.WriteOptions;
public class RocksDBTransaction implements KeyValueStorageTransaction {
private final RocksDBMetrics metrics;
private final Transaction innerTx;
private final WriteOptions options;
RocksDBTransaction(
final Transaction innerTx, final WriteOptions options, final RocksDBMetrics metrics) {
this.innerTx = innerTx;
this.options = options;
this.metrics = metrics;
}
@Override
public void put(final byte[] key, final byte[] value) {
try (final OperationTimer.TimingContext ignored = metrics.getWriteLatency().startTimer()) {
innerTx.put(key, value);
} catch (final RocksDBException e) {
throw new StorageException(e);
}
}
@Override
public void remove(final byte[] key) {
try (final OperationTimer.TimingContext ignored = metrics.getRemoveLatency().startTimer()) {
innerTx.delete(key);
} catch (final RocksDBException e) {
throw new StorageException(e);
}
}
@Override
public void commit() throws StorageException {
try (final OperationTimer.TimingContext ignored = metrics.getCommitLatency().startTimer()) {
innerTx.commit();
} catch (final RocksDBException e) {
throw new StorageException(e);
} finally {
close();
}
}
@Override
public void rollback() {
try {
innerTx.rollback();
metrics.getRollbackCount().inc();
} catch (final RocksDBException e) {
throw new StorageException(e);
} finally {
close();
}
}
private void close() {
innerTx.close();
options.close();
}
}

@ -0,0 +1,115 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.plugin.services.storage.rocksdb;
import static org.assertj.core.api.Assertions.assertThat;
import static tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions.DEFAULT_BACKGROUND_THREAD_COUNT;
import static tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions.DEFAULT_CACHE_CAPACITY;
import static tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions.DEFAULT_MAX_BACKGROUND_COMPACTIONS;
import static tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions.DEFAULT_MAX_OPEN_FILES;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBCLIOptions;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBFactoryConfiguration;
import org.junit.Test;
import picocli.CommandLine;
public class RocksDBCLIOptionsTest {
private static final String MAX_OPEN_FILES_FLAG = "--Xrocksdb-max-open-files";
private static final String CACHE_CAPACITY_FLAG = "--Xrocksdb-cache-capacity";
private static final String MAX_BACKGROUND_COMPACTIONS_FLAG =
"--Xrocksdb-max-background-compactions";
private static final String BACKGROUND_THREAD_COUNT_FLAG = "--Xrocksdb-background-thread-count";
@Test
public void defaultValues() {
final RocksDBCLIOptions options = RocksDBCLIOptions.create();
new CommandLine(options).parse();
final RocksDBFactoryConfiguration configuration = options.toDomainObject();
assertThat(configuration).isNotNull();
assertThat(configuration.getBackgroundThreadCount()).isEqualTo(DEFAULT_BACKGROUND_THREAD_COUNT);
assertThat(configuration.getCacheCapacity()).isEqualTo(DEFAULT_CACHE_CAPACITY);
assertThat(configuration.getMaxBackgroundCompactions())
.isEqualTo(DEFAULT_MAX_BACKGROUND_COMPACTIONS);
assertThat(configuration.getMaxOpenFiles()).isEqualTo(DEFAULT_MAX_OPEN_FILES);
}
@Test
public void customBackgroundThreadCount() {
final RocksDBCLIOptions options = RocksDBCLIOptions.create();
final int expectedBackgroundThreadCount = 99;
new CommandLine(options)
.parse(BACKGROUND_THREAD_COUNT_FLAG, "" + expectedBackgroundThreadCount);
final RocksDBFactoryConfiguration configuration = options.toDomainObject();
assertThat(configuration).isNotNull();
assertThat(configuration.getBackgroundThreadCount()).isEqualTo(expectedBackgroundThreadCount);
assertThat(configuration.getCacheCapacity()).isEqualTo(DEFAULT_CACHE_CAPACITY);
assertThat(configuration.getMaxBackgroundCompactions())
.isEqualTo(DEFAULT_MAX_BACKGROUND_COMPACTIONS);
assertThat(configuration.getMaxOpenFiles()).isEqualTo(DEFAULT_MAX_OPEN_FILES);
}
@Test
public void customCacheCapacity() {
final RocksDBCLIOptions options = RocksDBCLIOptions.create();
final long expectedCacheCapacity = 400050006000L;
new CommandLine(options).parse(CACHE_CAPACITY_FLAG, "" + expectedCacheCapacity);
final RocksDBFactoryConfiguration configuration = options.toDomainObject();
assertThat(configuration).isNotNull();
assertThat(configuration.getBackgroundThreadCount()).isEqualTo(DEFAULT_BACKGROUND_THREAD_COUNT);
assertThat(configuration.getCacheCapacity()).isEqualTo(expectedCacheCapacity);
assertThat(configuration.getMaxBackgroundCompactions())
.isEqualTo(DEFAULT_MAX_BACKGROUND_COMPACTIONS);
assertThat(configuration.getMaxOpenFiles()).isEqualTo(DEFAULT_MAX_OPEN_FILES);
}
@Test
public void customMaxBackgroundCompactions() {
final RocksDBCLIOptions options = RocksDBCLIOptions.create();
final int expectedMaxBackgroundCompactions = 223344;
new CommandLine(options)
.parse(MAX_BACKGROUND_COMPACTIONS_FLAG, "" + expectedMaxBackgroundCompactions);
final RocksDBFactoryConfiguration configuration = options.toDomainObject();
assertThat(configuration).isNotNull();
assertThat(configuration.getBackgroundThreadCount()).isEqualTo(DEFAULT_BACKGROUND_THREAD_COUNT);
assertThat(configuration.getCacheCapacity()).isEqualTo(DEFAULT_CACHE_CAPACITY);
assertThat(configuration.getMaxBackgroundCompactions())
.isEqualTo(expectedMaxBackgroundCompactions);
assertThat(configuration.getMaxOpenFiles()).isEqualTo(DEFAULT_MAX_OPEN_FILES);
}
@Test
public void customMaxOpenFiles() {
final RocksDBCLIOptions options = RocksDBCLIOptions.create();
final int expectedMaxOpenFiles = 65;
new CommandLine(options).parse(MAX_OPEN_FILES_FLAG, "" + expectedMaxOpenFiles);
final RocksDBFactoryConfiguration configuration = options.toDomainObject();
assertThat(configuration).isNotNull();
assertThat(configuration.getBackgroundThreadCount()).isEqualTo(DEFAULT_BACKGROUND_THREAD_COUNT);
assertThat(configuration.getCacheCapacity()).isEqualTo(DEFAULT_CACHE_CAPACITY);
assertThat(configuration.getMaxBackgroundCompactions())
.isEqualTo(DEFAULT_MAX_BACKGROUND_COMPACTIONS);
assertThat(configuration.getMaxOpenFiles()).isEqualTo(expectedMaxOpenFiles);
}
}

@ -0,0 +1,141 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.plugin.services.storage.rocksdb;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.when;
import tech.pegasys.pantheon.metrics.ObservableMetricsSystem;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.plugin.services.PantheonConfiguration;
import tech.pegasys.pantheon.plugin.services.exception.StorageException;
import tech.pegasys.pantheon.plugin.services.storage.SegmentIdentifier;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.DatabaseMetadata;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBFactoryConfiguration;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class RocksDBKeyValueStorageFactoryTest {
private static final String METADATA_FILENAME = "DATABASE_METADATA.json";
private static final int DEFAULT_VERSION = 1;
@Mock private RocksDBFactoryConfiguration rocksDbConfiguration;
@Mock private PantheonConfiguration commonConfiguration;
@Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();
private final ObservableMetricsSystem metricsSystem = new NoOpMetricsSystem();
private final List<SegmentIdentifier> segments = List.of();
@Mock private SegmentIdentifier segment;
@Test
public void shouldCreateCorrectMetadataFileForLatestVersion() throws Exception {
final Path tempDatabaseDir = temporaryFolder.newFolder().toPath().resolve("db");
when(commonConfiguration.getStoragePath()).thenReturn(tempDatabaseDir);
final RocksDBKeyValueStorageFactory storageFactory =
new RocksDBKeyValueStorageFactory(() -> rocksDbConfiguration, segments);
// Side effect is creation of the Metadata version file
storageFactory.create(() -> "block-chain", commonConfiguration, metricsSystem);
assertEquals(
DEFAULT_VERSION,
DatabaseMetadata.fromDirectory(commonConfiguration.getStoragePath()).getVersion());
}
@Test
public void shouldDetectVersion0DatabaseIfNoMetadataFileFound() throws Exception {
final Path tempDatabaseDir = temporaryFolder.newFolder().toPath().resolve("db");
Files.createDirectories(tempDatabaseDir);
tempDatabaseDir.resolve("IDENTITY").toFile().createNewFile();
when(commonConfiguration.getStoragePath()).thenReturn(tempDatabaseDir);
final RocksDBKeyValueStorageFactory storageFactory =
new RocksDBKeyValueStorageFactory(() -> rocksDbConfiguration, segments);
storageFactory.create(segment, commonConfiguration, metricsSystem);
assertEquals(0, DatabaseMetadata.fromDirectory(tempDatabaseDir).getVersion());
}
@Test
public void shouldDetectCorrectVersionIfMetadataFileExists() throws Exception {
final Path tempDatabaseDir = temporaryFolder.newFolder().toPath().resolve("db");
Files.createDirectories(tempDatabaseDir);
tempDatabaseDir.resolve("IDENTITY").toFile().createNewFile();
new DatabaseMetadata(DEFAULT_VERSION).writeToDirectory(tempDatabaseDir);
when(commonConfiguration.getStoragePath()).thenReturn(tempDatabaseDir);
final RocksDBKeyValueStorageFactory storageFactory =
new RocksDBKeyValueStorageFactory(() -> rocksDbConfiguration, segments);
storageFactory.create(() -> "block-chain", commonConfiguration, metricsSystem);
assertEquals(DEFAULT_VERSION, DatabaseMetadata.fromDirectory(tempDatabaseDir).getVersion());
assertTrue(storageFactory.isSegmentIsolationSupported());
}
@Test
public void shouldThrowExceptionWhenVersionNumberIsInvalid() throws Exception {
final Path tempDatabaseDir = temporaryFolder.newFolder().toPath().resolve("db");
Files.createDirectories(tempDatabaseDir);
tempDatabaseDir.resolve("IDENTITY").toFile().createNewFile();
new DatabaseMetadata(-1).writeToDirectory(tempDatabaseDir);
when(commonConfiguration.getStoragePath()).thenReturn(tempDatabaseDir);
assertThatThrownBy(
() ->
new RocksDBKeyValueStorageFactory(() -> rocksDbConfiguration, segments)
.create(() -> "segment-does-not-matter", commonConfiguration, metricsSystem))
.isInstanceOf(StorageException.class);
}
@Test
public void shouldThrowExceptionWhenMetaDataFileIsCorrupted() throws Exception {
final Path tempDatabaseDir = temporaryFolder.newFolder().toPath().resolve("db");
Files.createDirectories(tempDatabaseDir);
when(commonConfiguration.getStoragePath()).thenReturn(tempDatabaseDir);
tempDatabaseDir.resolve("IDENTITY").toFile().createNewFile();
final String badVersion = "{\"🦄\":1}";
Files.write(
tempDatabaseDir.resolve(METADATA_FILENAME), badVersion.getBytes(Charset.defaultCharset()));
assertThatThrownBy(
() ->
new RocksDBKeyValueStorageFactory(() -> rocksDbConfiguration, segments)
.create(() -> "bad-version", commonConfiguration, metricsSystem))
.isInstanceOf(IllegalStateException.class);
final String badValue = "{\"version\":\"iomedae\"}";
Files.write(
tempDatabaseDir.resolve(METADATA_FILENAME), badValue.getBytes(Charset.defaultCharset()));
assertThatThrownBy(
() ->
new RocksDBKeyValueStorageFactory(() -> rocksDbConfiguration, segments)
.create(() -> "bad-value", commonConfiguration, metricsSystem))
.isInstanceOf(IllegalStateException.class);
}
}

@ -1,5 +1,5 @@
/*
* Copyright 2018 ConsenSys AG.
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
@ -10,7 +10,7 @@
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.services.kvstore;
package tech.pegasys.pantheon.plugin.services.storage.rocksdb;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
@ -20,12 +20,13 @@ import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import tech.pegasys.pantheon.metrics.ObservableMetricsSystem;
import tech.pegasys.pantheon.metrics.PantheonMetricCategory;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.plugin.services.MetricsSystem;
import tech.pegasys.pantheon.plugin.services.metrics.Counter;
import tech.pegasys.pantheon.plugin.services.metrics.LabelledMetric;
import tech.pegasys.pantheon.plugin.services.metrics.OperationTimer;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBConfiguration;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBConfigurationBuilder;
import java.util.function.LongSupplier;
@ -36,20 +37,20 @@ import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.rocksdb.Statistics;
import org.rocksdb.TransactionDB;
@RunWith(MockitoJUnitRunner.class)
public class RocksDbKeyValueStorageTest extends AbstractKeyValueStorageTest {
public class RocksDBMetricsTest {
@Mock private MetricsSystem metricsSystemMock;
@Mock private ObservableMetricsSystem metricsSystemMock;
@Mock private LabelledMetric<OperationTimer> labelledMetricOperationTimerMock;
@Mock private LabelledMetric<Counter> labelledMetricCounterMock;
@Mock private OperationTimer operationTimerMock;
@Rule public final TemporaryFolder folder = new TemporaryFolder();
@Mock private TransactionDB db;
@Mock private Statistics stats;
@Override
protected KeyValueStorage createStore() throws Exception {
return RocksDbKeyValueStorage.create(config(), new NoOpMetricsSystem());
}
@Rule public final TemporaryFolder folder = new TemporaryFolder();
@Test
public void createStoreMustCreateMetrics() throws Exception {
@ -71,12 +72,8 @@ public class RocksDbKeyValueStorageTest extends AbstractKeyValueStorageTest {
final ArgumentCaptor<String> longGaugesMetricsNameArgs = ArgumentCaptor.forClass(String.class);
final ArgumentCaptor<String> longGaugesHelpArgs = ArgumentCaptor.forClass(String.class);
// Actual call
final KeyValueStorage keyValueStorage =
RocksDbKeyValueStorage.create(config(), metricsSystemMock);
RocksDBMetrics.of(metricsSystemMock, config(), db, stats);
// Assertions
assertThat(keyValueStorage).isNotNull();
verify(metricsSystemMock, times(4))
.createLabelledTimer(
eq(PantheonMetricCategory.KVSTORE_ROCKSDB),
@ -120,7 +117,7 @@ public class RocksDbKeyValueStorageTest extends AbstractKeyValueStorageTest {
.isEqualTo("Number of RocksDB transactions rolled back.");
}
private RocksDbConfiguration config() throws Exception {
return RocksDbConfiguration.builder().databaseDir(folder.newFolder().toPath()).build();
private RocksDBConfiguration config() throws Exception {
return new RocksDBConfigurationBuilder().databaseDir(folder.newFolder().toPath()).build();
}
}

@ -0,0 +1,130 @@
/*
* Copyright 2018 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.plugin.services.storage.rocksdb.segmented;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import tech.pegasys.pantheon.kvstore.AbstractKeyValueStorageTest;
import tech.pegasys.pantheon.metrics.ObservableMetricsSystem;
import tech.pegasys.pantheon.metrics.PantheonMetricCategory;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.plugin.services.metrics.Counter;
import tech.pegasys.pantheon.plugin.services.metrics.LabelledMetric;
import tech.pegasys.pantheon.plugin.services.metrics.OperationTimer;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBConfiguration;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBConfigurationBuilder;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.unsegmented.RocksDBKeyValueStorage;
import java.util.function.LongSupplier;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class RocksDBKeyValueStorageTest extends AbstractKeyValueStorageTest {
@Mock private ObservableMetricsSystem metricsSystemMock;
@Mock private LabelledMetric<OperationTimer> labelledMetricOperationTimerMock;
@Mock private LabelledMetric<Counter> labelledMetricCounterMock;
@Mock private OperationTimer operationTimerMock;
@Rule public final TemporaryFolder folder = new TemporaryFolder();
@Override
protected KeyValueStorage createStore() throws Exception {
return new RocksDBKeyValueStorage(config(), new NoOpMetricsSystem());
}
@Test
public void createStoreMustCreateMetrics() throws Exception {
// Prepare mocks
when(labelledMetricOperationTimerMock.labels(any())).thenReturn(operationTimerMock);
when(metricsSystemMock.createLabelledTimer(
eq(PantheonMetricCategory.KVSTORE_ROCKSDB), anyString(), anyString(), any()))
.thenReturn(labelledMetricOperationTimerMock);
when(metricsSystemMock.createLabelledCounter(
eq(PantheonMetricCategory.KVSTORE_ROCKSDB), anyString(), anyString(), any()))
.thenReturn(labelledMetricCounterMock);
// Prepare argument captors
final ArgumentCaptor<String> labelledTimersMetricsNameArgs =
ArgumentCaptor.forClass(String.class);
final ArgumentCaptor<String> labelledTimersHelpArgs = ArgumentCaptor.forClass(String.class);
final ArgumentCaptor<String> labelledCountersMetricsNameArgs =
ArgumentCaptor.forClass(String.class);
final ArgumentCaptor<String> labelledCountersHelpArgs = ArgumentCaptor.forClass(String.class);
final ArgumentCaptor<String> longGaugesMetricsNameArgs = ArgumentCaptor.forClass(String.class);
final ArgumentCaptor<String> longGaugesHelpArgs = ArgumentCaptor.forClass(String.class);
// Actual call
final KeyValueStorage keyValueStorage = new RocksDBKeyValueStorage(config(), metricsSystemMock);
// Assertions
assertThat(keyValueStorage).isNotNull();
verify(metricsSystemMock, times(4))
.createLabelledTimer(
eq(PantheonMetricCategory.KVSTORE_ROCKSDB),
labelledTimersMetricsNameArgs.capture(),
labelledTimersHelpArgs.capture(),
any());
assertThat(labelledTimersMetricsNameArgs.getAllValues())
.containsExactly(
"read_latency_seconds",
"remove_latency_seconds",
"write_latency_seconds",
"commit_latency_seconds");
assertThat(labelledTimersHelpArgs.getAllValues())
.containsExactly(
"Latency for read from RocksDB.",
"Latency of remove requests from RocksDB.",
"Latency for write to RocksDB.",
"Latency for commits to RocksDB.");
verify(metricsSystemMock, times(2))
.createLongGauge(
eq(PantheonMetricCategory.KVSTORE_ROCKSDB),
longGaugesMetricsNameArgs.capture(),
longGaugesHelpArgs.capture(),
any(LongSupplier.class));
assertThat(longGaugesMetricsNameArgs.getAllValues())
.containsExactly("rocks_db_table_readers_memory_bytes", "rocks_db_files_size_bytes");
assertThat(longGaugesHelpArgs.getAllValues())
.containsExactly(
"Estimated memory used for RocksDB index and filter blocks in bytes",
"Estimated database size in bytes");
verify(metricsSystemMock)
.createLabelledCounter(
eq(PantheonMetricCategory.KVSTORE_ROCKSDB),
labelledCountersMetricsNameArgs.capture(),
labelledCountersHelpArgs.capture(),
any());
assertThat(labelledCountersMetricsNameArgs.getValue()).isEqualTo("rollback_count");
assertThat(labelledCountersHelpArgs.getValue())
.isEqualTo("Number of RocksDB transactions rolled back.");
}
private RocksDBConfiguration config() throws Exception {
return new RocksDBConfigurationBuilder().databaseDir(folder.newFolder().toPath()).build();
}
}

@ -10,15 +10,22 @@
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.services.kvstore;
package tech.pegasys.pantheon.plugin.services.storage.rocksdb.unsegmented;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import tech.pegasys.pantheon.kvstore.AbstractKeyValueStorageTest;
import tech.pegasys.pantheon.metrics.noop.NoOpMetricsSystem;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorage.Segment;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.SegmentIdentifier;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.configuration.RocksDBConfigurationBuilder;
import tech.pegasys.pantheon.plugin.services.storage.rocksdb.segmented.RocksDBColumnarKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorage;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorage.Transaction;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorageAdapter;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Optional;
@ -27,7 +34,8 @@ import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.rocksdb.ColumnFamilyHandle;
public class ColumnarRocksDbKeyValueStorageTest extends AbstractKeyValueStorageTest {
public class RocksDBColumnarKeyValueStorageTest extends AbstractKeyValueStorageTest {
@Rule public final TemporaryFolder folder = new TemporaryFolder();
@Test
@ -37,12 +45,13 @@ public class ColumnarRocksDbKeyValueStorageTest extends AbstractKeyValueStorageT
Transaction<ColumnFamilyHandle> tx = store.startTransaction();
tx.put(
store.getSegmentIdentifierByName(TestSegment.BAR),
BytesValue.fromHexString("0001"),
BytesValue.fromHexString("0FFF"));
bytesFromHexString("0001"),
bytesFromHexString("0FFF"));
tx.commit();
final Optional<BytesValue> result =
store.get(
store.getSegmentIdentifierByName(TestSegment.FOO), BytesValue.fromHexString("0001"));
final Optional<byte[]> result =
store.get(store.getSegmentIdentifierByName(TestSegment.FOO), bytesFromHexString("0001"));
assertEquals(Optional.empty(), result);
}
@ -53,53 +62,48 @@ public class ColumnarRocksDbKeyValueStorageTest extends AbstractKeyValueStorageT
final ColumnFamilyHandle barSegment = store.getSegmentIdentifierByName(TestSegment.BAR);
Transaction<ColumnFamilyHandle> tx = store.startTransaction();
tx.put(fooSegment, BytesValue.of(1), BytesValue.of(1));
tx.put(fooSegment, BytesValue.of(2), BytesValue.of(2));
tx.put(fooSegment, BytesValue.of(3), BytesValue.of(3));
tx.put(barSegment, BytesValue.of(4), BytesValue.of(4));
tx.put(barSegment, BytesValue.of(5), BytesValue.of(5));
tx.put(barSegment, BytesValue.of(6), BytesValue.of(6));
tx.put(fooSegment, bytesOf(1), bytesOf(1));
tx.put(fooSegment, bytesOf(2), bytesOf(2));
tx.put(fooSegment, bytesOf(3), bytesOf(3));
tx.put(barSegment, bytesOf(4), bytesOf(4));
tx.put(barSegment, bytesOf(5), bytesOf(5));
tx.put(barSegment, bytesOf(6), bytesOf(6));
tx.commit();
final long removedFromFoo = store.removeUnless(fooSegment, x -> x.equals(BytesValue.of(3)));
final long removedFromBar = store.removeUnless(barSegment, x -> x.equals(BytesValue.of(4)));
final long removedFromFoo = store.removeUnless(fooSegment, x -> Arrays.equals(x, bytesOf(3)));
final long removedFromBar = store.removeUnless(barSegment, x -> Arrays.equals(x, bytesOf(4)));
assertEquals(2, removedFromFoo);
assertEquals(2, removedFromBar);
assertEquals(Optional.empty(), store.get(fooSegment, BytesValue.of(1)));
assertEquals(Optional.empty(), store.get(fooSegment, BytesValue.of(2)));
assertEquals(Optional.of(BytesValue.of(3)), store.get(fooSegment, BytesValue.of(3)));
assertEquals(Optional.empty(), store.get(fooSegment, bytesOf(1)));
assertEquals(Optional.empty(), store.get(fooSegment, bytesOf(2)));
assertArrayEquals(bytesOf(3), store.get(fooSegment, bytesOf(3)).get());
assertEquals(Optional.of(BytesValue.of(4)), store.get(barSegment, BytesValue.of(4)));
assertEquals(Optional.empty(), store.get(barSegment, BytesValue.of(5)));
assertEquals(Optional.empty(), store.get(barSegment, BytesValue.of(6)));
assertArrayEquals(bytesOf(4), store.get(barSegment, bytesOf(4)).get());
assertEquals(Optional.empty(), store.get(barSegment, bytesOf(5)));
assertEquals(Optional.empty(), store.get(barSegment, bytesOf(6)));
}
public enum TestSegment implements Segment {
public enum TestSegment implements SegmentIdentifier {
FOO(new byte[] {1}),
BAR(new byte[] {2});
private final byte[] id;
private final String nameAsUtf8;
TestSegment(final byte[] id) {
this.id = id;
this.nameAsUtf8 = new String(id, StandardCharsets.UTF_8);
}
@Override
public String getName() {
return name();
}
@Override
public byte[] getId() {
return id;
return nameAsUtf8;
}
}
private SegmentedKeyValueStorage<ColumnFamilyHandle> createSegmentedStore() throws Exception {
return ColumnarRocksDbKeyValueStorage.create(
RocksDbConfiguration.builder().databaseDir(folder.newFolder().toPath()).build(),
return new RocksDBColumnarKeyValueStorage(
new RocksDBConfigurationBuilder().databaseDir(folder.newFolder().toPath()).build(),
Arrays.asList(TestSegment.FOO, TestSegment.BAR),
new NoOpMetricsSystem());
}

@ -31,7 +31,6 @@ dependencies {
implementation project(':metrics:core')
implementation project(':metrics:rocksdb')
implementation project(':services:util')
implementation 'com.google.guava:guava'
implementation 'io.prometheus:simpleclient'
@ -40,6 +39,8 @@ dependencies {
runtime 'org.apache.logging.log4j:log4j-core'
testImplementation project(':testutil')
testImplementation 'junit:junit'
testImplementation 'org.mockito:mockito-core'
testImplementation 'org.assertj:assertj-core'

@ -12,6 +12,9 @@
*/
package tech.pegasys.pantheon.services.kvstore;
import tech.pegasys.pantheon.plugin.services.exception.StorageException;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import java.util.HashMap;
@ -26,14 +29,14 @@ import java.util.function.Predicate;
public class InMemoryKeyValueStorage implements KeyValueStorage {
private final Map<BytesValue, BytesValue> hashValueStore;
private final Map<BytesValue, byte[]> hashValueStore;
private final ReadWriteLock rwLock = new ReentrantReadWriteLock();
public InMemoryKeyValueStorage() {
this(new HashMap<>());
}
protected InMemoryKeyValueStorage(final Map<BytesValue, BytesValue> hashValueStore) {
protected InMemoryKeyValueStorage(final Map<BytesValue, byte[]> hashValueStore) {
this.hashValueStore = hashValueStore;
}
@ -49,71 +52,65 @@ public class InMemoryKeyValueStorage implements KeyValueStorage {
}
@Override
public void close() {}
@Override
public boolean containsKey(final BytesValue key) throws StorageException {
public boolean containsKey(final byte[] key) throws StorageException {
final Lock lock = rwLock.readLock();
lock.lock();
try {
return hashValueStore.containsKey(key);
return hashValueStore.containsKey(BytesValue.wrap(key));
} finally {
lock.unlock();
}
}
@Override
public Optional<BytesValue> get(final BytesValue key) {
public Optional<byte[]> get(final byte[] key) throws StorageException {
final Lock lock = rwLock.readLock();
lock.lock();
try {
return Optional.ofNullable(hashValueStore.get(key));
return Optional.ofNullable(hashValueStore.get(BytesValue.wrap(key)));
} finally {
lock.unlock();
}
}
@Override
public long removeUnless(final Predicate<BytesValue> inUseCheck) {
final Lock lock = rwLock.writeLock();
lock.lock();
try {
public long removeAllKeysUnless(final Predicate<byte[]> retainCondition) throws StorageException {
long initialSize = hashValueStore.keySet().size();
hashValueStore.keySet().removeIf(key -> !inUseCheck.test(key));
hashValueStore.keySet().removeIf(key -> !retainCondition.test(key.getArrayUnsafe()));
return initialSize - hashValueStore.keySet().size();
} finally {
lock.unlock();
}
}
@Override
public Transaction startTransaction() {
return new InMemoryTransaction();
public void close() {}
@Override
public KeyValueStorageTransaction startTransaction() {
return new KeyValueStorageTransactionTransitionValidatorDecorator(new InMemoryTransaction());
}
public Set<BytesValue> keySet() {
return Set.copyOf(hashValueStore.keySet());
}
private class InMemoryTransaction extends AbstractTransaction {
private class InMemoryTransaction implements KeyValueStorageTransaction {
private Map<BytesValue, BytesValue> updatedValues = new HashMap<>();
private Map<BytesValue, byte[]> updatedValues = new HashMap<>();
private Set<BytesValue> removedKeys = new HashSet<>();
@Override
protected void doPut(final BytesValue key, final BytesValue value) {
updatedValues.put(key, value);
removedKeys.remove(key);
public void put(final byte[] key, final byte[] value) {
updatedValues.put(BytesValue.wrap(key), value);
removedKeys.remove(BytesValue.wrap(key));
}
@Override
protected void doRemove(final BytesValue key) {
removedKeys.add(key);
updatedValues.remove(key);
public void remove(final byte[] key) {
removedKeys.add(BytesValue.wrap(key));
updatedValues.remove(BytesValue.wrap(key));
}
@Override
protected void doCommit() {
public void commit() throws StorageException {
final Lock lock = rwLock.writeLock();
lock.lock();
try {
@ -127,7 +124,7 @@ public class InMemoryKeyValueStorage implements KeyValueStorage {
}
@Override
protected void doRollback() {
public void rollback() {
updatedValues = null;
removedKeys = null;
}

@ -1,126 +0,0 @@
/*
* Copyright 2018 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.services.kvstore;
import static com.google.common.base.Preconditions.checkState;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import java.io.Closeable;
import java.util.Optional;
import java.util.function.Predicate;
/** Service provided by pantheon to facilitate persistent data storage. */
public interface KeyValueStorage extends Closeable {
void clear();
default boolean containsKey(final BytesValue key) throws StorageException {
return get(key).isPresent();
}
/**
* @param key Index into persistent data repository.
* @return The value persisted at the key index.
*/
Optional<BytesValue> get(BytesValue key) throws StorageException;
long removeUnless(Predicate<BytesValue> inUseCheck);
/**
* Begins a transaction. Returns a transaction object that can be updated and committed.
*
* @return An object representing the transaction.
*/
Transaction startTransaction() throws StorageException;
class StorageException extends RuntimeException {
public StorageException(final Throwable t) {
super(t);
}
}
/**
* Represents a set of changes to be committed atomically. A single transaction is not
* thread-safe, but multiple transactions can execute concurrently.
*/
interface Transaction {
/**
* Add the given key-value pair to the set of updates to be committed.
*
* @param key The key to set / modify.
* @param value The value to be set.
*/
void put(BytesValue key, BytesValue value);
/**
* Schedules the given key to be deleted from storage.
*
* @param key The key to delete
*/
void remove(BytesValue key);
/**
* Atomically commit the set of changes contained in this transaction to the underlying
* key-value storage from which this transaction was started. After committing, the transaction
* is no longer usable and will throw exceptions if modifications are attempted.
*/
void commit() throws StorageException;
/**
* Cancel this transaction. After rolling back, the transaction is no longer usable and will
* throw exceptions if modifications are attempted.
*/
void rollback();
}
abstract class AbstractTransaction implements Transaction {
private boolean active = true;
@Override
public final void put(final BytesValue key, final BytesValue value) {
checkState(active, "Cannot invoke put() on a completed transaction.");
doPut(key, value);
}
@Override
public final void remove(final BytesValue key) {
checkState(active, "Cannot invoke remove() on a completed transaction.");
doRemove(key);
}
@Override
public final void commit() throws StorageException {
checkState(active, "Cannot commit a completed transaction.");
active = false;
doCommit();
}
@Override
public final void rollback() {
checkState(active, "Cannot rollback a completed transaction.");
active = false;
doRollback();
}
protected abstract void doPut(BytesValue key, BytesValue value);
protected abstract void doRemove(BytesValue key);
protected abstract void doCommit() throws StorageException;
protected abstract void doRollback();
}
}

@ -0,0 +1,56 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.services.kvstore;
import static com.google.common.base.Preconditions.checkState;
import tech.pegasys.pantheon.plugin.services.exception.StorageException;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
public class KeyValueStorageTransactionTransitionValidatorDecorator
implements KeyValueStorageTransaction {
private final KeyValueStorageTransaction transaction;
private boolean active = true;
public KeyValueStorageTransactionTransitionValidatorDecorator(
final KeyValueStorageTransaction toDecorate) {
this.transaction = toDecorate;
}
@Override
public void put(final byte[] key, final byte[] value) {
checkState(active, "Cannot invoke put() on a completed transaction.");
transaction.put(key, value);
}
@Override
public void remove(final byte[] key) {
checkState(active, "Cannot invoke remove() on a completed transaction.");
transaction.remove(key);
}
@Override
public final void commit() throws StorageException {
checkState(active, "Cannot commit a completed transaction.");
active = false;
transaction.commit();
}
@Override
public final void rollback() {
checkState(active, "Cannot rollback a completed transaction.");
active = false;
transaction.rollback();
}
}

@ -12,6 +12,9 @@
*/
package tech.pegasys.pantheon.services.kvstore;
import tech.pegasys.pantheon.plugin.services.exception.StorageException;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import java.util.HashMap;
@ -33,7 +36,7 @@ import com.google.common.cache.CacheBuilder;
*/
public class LimitedInMemoryKeyValueStorage implements KeyValueStorage {
private final Cache<BytesValue, BytesValue> storage;
private final Cache<BytesValue, byte[]> storage;
private final ReadWriteLock rwLock = new ReentrantReadWriteLock();
public LimitedInMemoryKeyValueStorage(final long maxSize) {
@ -52,67 +55,61 @@ public class LimitedInMemoryKeyValueStorage implements KeyValueStorage {
}
@Override
public void close() {}
@Override
public boolean containsKey(final BytesValue key) throws StorageException {
public boolean containsKey(final byte[] key) throws StorageException {
final Lock lock = rwLock.readLock();
lock.lock();
try {
return storage.getIfPresent(key) != null;
return storage.getIfPresent(BytesValue.wrap(key)) != null;
} finally {
lock.unlock();
}
}
@Override
public Optional<BytesValue> get(final BytesValue key) {
public void close() {}
@Override
public Optional<byte[]> get(final byte[] key) {
final Lock lock = rwLock.readLock();
lock.lock();
try {
return Optional.ofNullable(storage.getIfPresent(key));
return Optional.ofNullable(storage.getIfPresent(BytesValue.wrap(key)));
} finally {
lock.unlock();
}
}
@Override
public long removeUnless(final Predicate<BytesValue> inUseCheck) {
final Lock lock = rwLock.writeLock();
lock.lock();
try {
public long removeAllKeysUnless(final Predicate<byte[]> retainCondition) throws StorageException {
final long initialSize = storage.size();
storage.asMap().keySet().removeIf(key -> !inUseCheck.test(key));
storage.asMap().keySet().removeIf(key -> !retainCondition.test(key.getArrayUnsafe()));
return initialSize - storage.size();
} finally {
lock.unlock();
}
}
@Override
public Transaction startTransaction() {
return new InMemoryTransaction();
public KeyValueStorageTransaction startTransaction() throws StorageException {
return new KeyValueStorageTransactionTransitionValidatorDecorator(new MemoryTransaction());
}
private class InMemoryTransaction extends AbstractTransaction {
private class MemoryTransaction implements KeyValueStorageTransaction {
private Map<BytesValue, BytesValue> updatedValues = new HashMap<>();
private Map<BytesValue, byte[]> updatedValues = new HashMap<>();
private Set<BytesValue> removedKeys = new HashSet<>();
@Override
protected void doPut(final BytesValue key, final BytesValue value) {
updatedValues.put(key, value);
removedKeys.remove(key);
public void put(final byte[] key, final byte[] value) {
updatedValues.put(BytesValue.wrap(key), value);
removedKeys.remove(BytesValue.wrap(key));
}
@Override
protected void doRemove(final BytesValue key) {
removedKeys.add(key);
updatedValues.remove(key);
public void remove(final byte[] key) {
removedKeys.add(BytesValue.wrap(key));
updatedValues.remove(BytesValue.wrap(key));
}
@Override
protected void doCommit() {
public void commit() throws StorageException {
final Lock lock = rwLock.writeLock();
lock.lock();
try {
@ -126,7 +123,7 @@ public class LimitedInMemoryKeyValueStorage implements KeyValueStorage {
}
@Override
protected void doRollback() {
public void rollback() {
updatedValues = null;
removedKeys = null;
}

@ -1,128 +0,0 @@
/*
* Copyright 2018 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.services.kvstore;
import tech.pegasys.pantheon.services.util.RocksDbUtil;
import java.nio.file.Path;
public class RocksDbConfiguration {
public static final int DEFAULT_MAX_OPEN_FILES = 1024;
public static final long DEFAULT_CACHE_CAPACITY = 8388608;
public static final int DEFAULT_MAX_BACKGROUND_COMPACTIONS = 4;
public static final int DEFAULT_BACKGROUND_THREAD_COUNT = 4;
private final Path databaseDir;
private final int maxOpenFiles;
private final String label;
private final int maxBackgroundCompactions;
private final int backgroundThreadCount;
private final long cacheCapacity;
private RocksDbConfiguration(
final Path databaseDir,
final int maxOpenFiles,
final int maxBackgroundCompactions,
final int backgroundThreadCount,
final long cacheCapacity,
final String label) {
this.maxBackgroundCompactions = maxBackgroundCompactions;
this.backgroundThreadCount = backgroundThreadCount;
RocksDbUtil.loadNativeLibrary();
this.databaseDir = databaseDir;
this.maxOpenFiles = maxOpenFiles;
this.cacheCapacity = cacheCapacity;
this.label = label;
}
public static Builder builder() {
return new Builder();
}
public Path getDatabaseDir() {
return databaseDir;
}
public int getMaxOpenFiles() {
return maxOpenFiles;
}
public int getMaxBackgroundCompactions() {
return maxBackgroundCompactions;
}
public int getBackgroundThreadCount() {
return backgroundThreadCount;
}
public long getCacheCapacity() {
return cacheCapacity;
}
public String getLabel() {
return label;
}
public static class Builder {
Path databaseDir;
String label = "blockchain";
int maxOpenFiles = DEFAULT_MAX_OPEN_FILES;
long cacheCapacity = DEFAULT_CACHE_CAPACITY;
int maxBackgroundCompactions = DEFAULT_MAX_BACKGROUND_COMPACTIONS;
int backgroundThreadCount = DEFAULT_BACKGROUND_THREAD_COUNT;
private Builder() {}
public Builder databaseDir(final Path databaseDir) {
this.databaseDir = databaseDir;
return this;
}
public Builder maxOpenFiles(final int maxOpenFiles) {
this.maxOpenFiles = maxOpenFiles;
return this;
}
public Builder label(final String label) {
this.label = label;
return this;
}
public Builder cacheCapacity(final long cacheCapacity) {
this.cacheCapacity = cacheCapacity;
return this;
}
public Builder maxBackgroundCompactions(final int maxBackgroundCompactions) {
this.maxBackgroundCompactions = maxBackgroundCompactions;
return this;
}
public Builder backgroundThreadCount(final int backgroundThreadCount) {
this.backgroundThreadCount = backgroundThreadCount;
return this;
}
public RocksDbConfiguration build() {
return new RocksDbConfiguration(
databaseDir,
maxOpenFiles,
maxBackgroundCompactions,
backgroundThreadCount,
cacheCapacity,
label);
}
}
}

@ -12,9 +12,8 @@
*/
package tech.pegasys.pantheon.services.kvstore;
import static com.google.common.base.Preconditions.checkState;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import tech.pegasys.pantheon.plugin.services.exception.StorageException;
import tech.pegasys.pantheon.plugin.services.storage.SegmentIdentifier;
import java.io.Closeable;
import java.util.Optional;
@ -27,16 +26,16 @@ import java.util.function.Predicate;
*/
public interface SegmentedKeyValueStorage<S> extends Closeable {
S getSegmentIdentifierByName(Segment segment);
S getSegmentIdentifierByName(SegmentIdentifier segment);
/**
* @param segment the segment
* @param key Index into persistent data repository.
* @return The value persisted at the key index.
*/
Optional<BytesValue> get(S segment, BytesValue key) throws StorageException;
Optional<byte[]> get(S segment, byte[] key) throws StorageException;
default boolean containsKey(final S segment, final BytesValue key) throws StorageException {
default boolean containsKey(final S segment, final byte[] key) throws StorageException {
return get(segment, key).isPresent();
}
@ -47,16 +46,10 @@ public interface SegmentedKeyValueStorage<S> extends Closeable {
*/
Transaction<S> startTransaction() throws StorageException;
long removeUnless(S segmentHandle, Predicate<BytesValue> inUseCheck);
long removeUnless(S segmentHandle, Predicate<byte[]> inUseCheck);
void clear(S segmentHandle);
class StorageException extends RuntimeException {
public StorageException(final Throwable t) {
super(t);
}
}
/**
* Represents a set of changes to be committed atomically. A single transaction is not
* thread-safe, but multiple transactions can execute concurrently.
@ -72,7 +65,7 @@ public interface SegmentedKeyValueStorage<S> extends Closeable {
* @param key The key to set / modify.
* @param value The value to be set.
*/
void put(S segment, BytesValue key, BytesValue value);
void put(S segment, byte[] key, byte[] value);
/**
* Schedules the given key to be deleted from storage.
@ -80,7 +73,7 @@ public interface SegmentedKeyValueStorage<S> extends Closeable {
* @param segment the database segment
* @param key The key to delete
*/
void remove(S segment, BytesValue key);
void remove(S segment, byte[] key);
/**
* Atomically commit the set of changes contained in this transaction to the underlying
@ -95,49 +88,4 @@ public interface SegmentedKeyValueStorage<S> extends Closeable {
*/
void rollback();
}
interface Segment {
String getName();
byte[] getId();
}
abstract class AbstractTransaction<S> implements Transaction<S> {
private boolean active = true;
@Override
public final void put(final S segment, final BytesValue key, final BytesValue value) {
checkState(active, "Cannot invoke put() on a completed transaction.");
doPut(segment, key, value);
}
@Override
public final void remove(final S segment, final BytesValue key) {
checkState(active, "Cannot invoke remove() on a completed transaction.");
doRemove(segment, key);
}
@Override
public final void commit() throws StorageException {
checkState(active, "Cannot commit a completed transaction.");
active = false;
doCommit();
}
@Override
public final void rollback() {
checkState(active, "Cannot rollback a completed transaction.");
active = false;
doRollback();
}
protected abstract void doPut(S segment, BytesValue key, BytesValue value);
protected abstract void doRemove(S segment, BytesValue key);
protected abstract void doCommit() throws StorageException;
protected abstract void doRollback();
}
}

@ -12,8 +12,10 @@
*/
package tech.pegasys.pantheon.services.kvstore;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorage.Segment;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import tech.pegasys.pantheon.plugin.services.exception.StorageException;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.plugin.services.storage.SegmentIdentifier;
import java.io.IOException;
import java.util.Optional;
@ -25,7 +27,7 @@ public class SegmentedKeyValueStorageAdapter<S> implements KeyValueStorage {
private final SegmentedKeyValueStorage<S> storage;
public SegmentedKeyValueStorageAdapter(
final Segment segment, final SegmentedKeyValueStorage<S> storage) {
final SegmentIdentifier segment, final SegmentedKeyValueStorage<S> storage) {
this.segmentHandle = storage.getSegmentIdentifierByName(segment);
this.storage = storage;
}
@ -36,36 +38,37 @@ public class SegmentedKeyValueStorageAdapter<S> implements KeyValueStorage {
}
@Override
public void close() throws IOException {
storage.close();
public boolean containsKey(final byte[] key) throws StorageException {
return storage.containsKey(segmentHandle, key);
}
@Override
public boolean containsKey(final BytesValue key) throws StorageException {
return storage.containsKey(segmentHandle, key);
public Optional<byte[]> get(final byte[] key) throws StorageException {
return storage.get(segmentHandle, key);
}
@Override
public Optional<BytesValue> get(final BytesValue key) throws StorageException {
return storage.get(segmentHandle, key);
public long removeAllKeysUnless(final Predicate<byte[]> retainCondition) throws StorageException {
return storage.removeUnless(segmentHandle, retainCondition);
}
@Override
public long removeUnless(final Predicate<BytesValue> inUseCheck) {
return storage.removeUnless(segmentHandle, inUseCheck);
public void close() throws IOException {
storage.close();
}
@Override
public Transaction startTransaction() throws StorageException {
public KeyValueStorageTransaction startTransaction() throws StorageException {
final SegmentedKeyValueStorage.Transaction<S> transaction = storage.startTransaction();
return new Transaction() {
return new KeyValueStorageTransaction() {
@Override
public void put(final BytesValue key, final BytesValue value) {
public void put(final byte[] key, final byte[] value) {
transaction.put(segmentHandle, key, value);
}
@Override
public void remove(final BytesValue key) {
public void remove(final byte[] key) {
transaction.remove(segmentHandle, key);
}

@ -0,0 +1,56 @@
/*
* Copyright 2019 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.services.kvstore;
import static com.google.common.base.Preconditions.checkState;
import tech.pegasys.pantheon.plugin.services.exception.StorageException;
import tech.pegasys.pantheon.services.kvstore.SegmentedKeyValueStorage.Transaction;
public class SegmentedKeyValueStorageTransactionTransitionValidatorDecorator<S>
implements Transaction<S> {
private final Transaction<S> transaction;
private boolean active = true;
public SegmentedKeyValueStorageTransactionTransitionValidatorDecorator(
final Transaction<S> toDecorate) {
this.transaction = toDecorate;
}
@Override
public final void put(final S segment, final byte[] key, final byte[] value) {
checkState(active, "Cannot invoke put() on a completed transaction.");
transaction.put(segment, key, value);
}
@Override
public final void remove(final S segment, final byte[] key) {
checkState(active, "Cannot invoke remove() on a completed transaction.");
transaction.remove(segment, key);
}
@Override
public final void commit() throws StorageException {
checkState(active, "Cannot commit a completed transaction.");
active = false;
transaction.commit();
}
@Override
public final void rollback() {
checkState(active, "Cannot rollback a completed transaction.");
active = false;
transaction.rollback();
}
}

@ -1,384 +0,0 @@
/*
* Copyright 2018 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.services.kvstore;
import static org.assertj.core.api.Assertions.assertThat;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage.Transaction;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import tech.pegasys.pantheon.util.bytes.BytesValues;
import java.util.Arrays;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import java.util.function.Function;
import org.junit.Ignore;
import org.junit.Test;
@Ignore
public abstract class AbstractKeyValueStorageTest {
protected abstract KeyValueStorage createStore() throws Exception;
@Test
public void twoStoresAreIndependent() throws Exception {
final KeyValueStorage store1 = createStore();
final KeyValueStorage store2 = createStore();
Transaction tx = store1.startTransaction();
tx.put(BytesValue.fromHexString("0001"), BytesValue.fromHexString("0FFF"));
tx.commit();
final Optional<BytesValue> result = store2.get(BytesValue.fromHexString("0001"));
assertThat(result).isEmpty();
}
@Test
public void put() throws Exception {
final KeyValueStorage store = createStore();
Transaction tx = store.startTransaction();
tx.put(BytesValue.fromHexString("0F"), BytesValue.fromHexString("0ABC"));
tx.commit();
assertThat(store.get(BytesValue.fromHexString("0F")))
.contains(BytesValue.fromHexString("0ABC"));
tx = store.startTransaction();
tx.put(BytesValue.fromHexString("0F"), BytesValue.fromHexString("0DEF"));
tx.commit();
assertThat(store.get(BytesValue.fromHexString("0F")))
.contains(BytesValue.fromHexString("0DEF"));
}
@Test
public void removeUnless() throws Exception {
final KeyValueStorage store = createStore();
Transaction tx = store.startTransaction();
tx.put(BytesValue.fromHexString("0F"), BytesValue.fromHexString("0ABC"));
tx.put(BytesValue.fromHexString("10"), BytesValue.fromHexString("0ABC"));
tx.put(BytesValue.fromHexString("11"), BytesValue.fromHexString("0ABC"));
tx.put(BytesValue.fromHexString("12"), BytesValue.fromHexString("0ABC"));
tx.commit();
store.removeUnless(bv -> bv.toString().contains("1"));
assertThat(store.containsKey(BytesValue.fromHexString("0F"))).isFalse();
assertThat(store.containsKey(BytesValue.fromHexString("10"))).isTrue();
assertThat(store.containsKey(BytesValue.fromHexString("11"))).isTrue();
assertThat(store.containsKey(BytesValue.fromHexString("12"))).isTrue();
}
@Test
public void clearRemovesAll() throws Exception {
final KeyValueStorage store = createStore();
Transaction tx = store.startTransaction();
tx.put(BytesValue.fromHexString("0F"), BytesValue.fromHexString("0ABC"));
tx.put(BytesValue.fromHexString("10"), BytesValue.fromHexString("0ABC"));
tx.put(BytesValue.fromHexString("11"), BytesValue.fromHexString("0ABC"));
tx.put(BytesValue.fromHexString("12"), BytesValue.fromHexString("0ABC"));
tx.commit();
store.clear();
assertThat(store.containsKey(BytesValue.fromHexString("0F"))).isFalse();
assertThat(store.containsKey(BytesValue.fromHexString("10"))).isFalse();
assertThat(store.containsKey(BytesValue.fromHexString("11"))).isFalse();
assertThat(store.containsKey(BytesValue.fromHexString("12"))).isFalse();
}
@Test
public void containsKey() throws Exception {
final KeyValueStorage store = createStore();
final BytesValue key = BytesValue.fromHexString("ABCD");
assertThat(store.containsKey(key)).isFalse();
final Transaction transaction = store.startTransaction();
transaction.put(key, BytesValue.fromHexString("DEFF"));
transaction.commit();
assertThat(store.containsKey(key)).isTrue();
}
@Test
public void removeExisting() throws Exception {
final KeyValueStorage store = createStore();
Transaction tx = store.startTransaction();
tx.put(BytesValue.fromHexString("0F"), BytesValue.fromHexString("0ABC"));
tx.commit();
tx = store.startTransaction();
tx.remove(BytesValue.fromHexString("0F"));
tx.commit();
assertThat(store.get(BytesValue.fromHexString("0F"))).isEmpty();
}
@Test
public void removeExistingSameTransaction() throws Exception {
final KeyValueStorage store = createStore();
Transaction tx = store.startTransaction();
tx.put(BytesValue.fromHexString("0F"), BytesValue.fromHexString("0ABC"));
tx.remove(BytesValue.fromHexString("0F"));
tx.commit();
assertThat(store.get(BytesValue.fromHexString("0F"))).isEmpty();
}
@Test
public void removeNonExistent() throws Exception {
final KeyValueStorage store = createStore();
Transaction tx = store.startTransaction();
tx.remove(BytesValue.fromHexString("0F"));
tx.commit();
assertThat(store.get(BytesValue.fromHexString("0F"))).isEmpty();
}
@Test
public void concurrentUpdate() throws Exception {
final int keyCount = 1000;
final KeyValueStorage store = createStore();
final CountDownLatch finishedLatch = new CountDownLatch(2);
final Function<BytesValue, Thread> updater =
(value) ->
new Thread(
() -> {
try {
for (int i = 0; i < keyCount; i++) {
Transaction tx = store.startTransaction();
tx.put(BytesValues.toMinimalBytes(i), value);
tx.commit();
}
} finally {
finishedLatch.countDown();
}
});
// Run 2 concurrent transactions that write a bunch of values to the same keys
final BytesValue a = BytesValue.of(10);
final BytesValue b = BytesValue.of(20);
updater.apply(a).start();
updater.apply(b).start();
finishedLatch.await();
for (int i = 0; i < keyCount; i++) {
final BytesValue key = BytesValues.toMinimalBytes(i);
final BytesValue actual = store.get(key).get();
assertThat(actual.equals(a) || actual.equals(b)).isTrue();
}
store.close();
}
@Test
public void transactionCommit() throws Exception {
final KeyValueStorage store = createStore();
// Add some values
Transaction tx = store.startTransaction();
tx.put(BytesValue.of(1), BytesValue.of(1));
tx.put(BytesValue.of(2), BytesValue.of(2));
tx.put(BytesValue.of(3), BytesValue.of(3));
tx.commit();
// Start transaction that adds, modifies, and removes some values
tx = store.startTransaction();
tx.put(BytesValue.of(2), BytesValue.of(3));
tx.put(BytesValue.of(2), BytesValue.of(4));
tx.remove(BytesValue.of(3));
tx.put(BytesValue.of(4), BytesValue.of(8));
// Check values before committing have not changed
assertThat(store.get(BytesValue.of(1))).contains(BytesValue.of(1));
assertThat(store.get(BytesValue.of(2))).contains(BytesValue.of(2));
assertThat(store.get(BytesValue.of(3))).contains(BytesValue.of(3));
assertThat(store.get(BytesValue.of(4))).isEmpty();
assertThat(store.get(BytesValue.of(1))).contains(BytesValue.of(1));
assertThat(store.get(BytesValue.of(2))).contains(BytesValue.of(2));
assertThat(store.get(BytesValue.of(3))).contains(BytesValue.of(3));
assertThat(store.get(BytesValue.of(4))).isEmpty();
tx.commit();
// Check that values have been updated after commit
assertThat(store.get(BytesValue.of(1))).contains(BytesValue.of(1));
assertThat(store.get(BytesValue.of(2))).contains(BytesValue.of(4));
assertThat(store.get(BytesValue.of(3))).isEmpty();
assertThat(store.get(BytesValue.of(4))).contains(BytesValue.of(8));
}
@Test
public void transactionRollback() throws Exception {
final KeyValueStorage store = createStore();
// Add some values
Transaction tx = store.startTransaction();
tx.put(BytesValue.of(1), BytesValue.of(1));
tx.put(BytesValue.of(2), BytesValue.of(2));
tx.put(BytesValue.of(3), BytesValue.of(3));
tx.commit();
// Start transaction that adds, modifies, and removes some values
tx = store.startTransaction();
tx.put(BytesValue.of(2), BytesValue.of(3));
tx.put(BytesValue.of(2), BytesValue.of(4));
tx.remove(BytesValue.of(3));
tx.put(BytesValue.of(4), BytesValue.of(8));
// Check values before committing have not changed
assertThat(store.get(BytesValue.of(1))).contains(BytesValue.of(1));
assertThat(store.get(BytesValue.of(2))).contains(BytesValue.of(2));
assertThat(store.get(BytesValue.of(3))).contains(BytesValue.of(3));
assertThat(store.get(BytesValue.of(4))).isEmpty();
tx.rollback();
// Check that values have not changed after rollback
assertThat(store.get(BytesValue.of(1))).contains(BytesValue.of(1));
assertThat(store.get(BytesValue.of(2))).contains(BytesValue.of(2));
assertThat(store.get(BytesValue.of(3))).contains(BytesValue.of(3));
assertThat(store.get(BytesValue.of(4))).isEmpty();
}
@Test
public void transactionCommitEmpty() throws Exception {
final KeyValueStorage store = createStore();
final Transaction tx = store.startTransaction();
tx.commit();
}
@Test
public void transactionRollbackEmpty() throws Exception {
final KeyValueStorage store = createStore();
final Transaction tx = store.startTransaction();
tx.rollback();
}
@Test(expected = IllegalStateException.class)
public void transactionPutAfterCommit() throws Exception {
final KeyValueStorage store = createStore();
final Transaction tx = store.startTransaction();
tx.commit();
tx.put(BytesValue.of(1), BytesValue.of(1));
}
@Test(expected = IllegalStateException.class)
public void transactionRemoveAfterCommit() throws Exception {
final KeyValueStorage store = createStore();
final Transaction tx = store.startTransaction();
tx.commit();
tx.remove(BytesValue.of(1));
}
@Test(expected = IllegalStateException.class)
public void transactionPutAfterRollback() throws Exception {
final KeyValueStorage store = createStore();
final Transaction tx = store.startTransaction();
tx.rollback();
tx.put(BytesValue.of(1), BytesValue.of(1));
}
@Test(expected = IllegalStateException.class)
public void transactionRemoveAfterRollback() throws Exception {
final KeyValueStorage store = createStore();
final Transaction tx = store.startTransaction();
tx.rollback();
tx.remove(BytesValue.of(1));
}
@Test(expected = IllegalStateException.class)
public void transactionCommitAfterRollback() throws Exception {
final KeyValueStorage store = createStore();
final Transaction tx = store.startTransaction();
tx.rollback();
tx.commit();
}
@Test(expected = IllegalStateException.class)
public void transactionCommitTwice() throws Exception {
final KeyValueStorage store = createStore();
final Transaction tx = store.startTransaction();
tx.commit();
tx.commit();
}
@Test(expected = IllegalStateException.class)
public void transactionRollbackAfterCommit() throws Exception {
final KeyValueStorage store = createStore();
final Transaction tx = store.startTransaction();
tx.commit();
tx.rollback();
}
@Test(expected = IllegalStateException.class)
public void transactionRollbackTwice() throws Exception {
final KeyValueStorage store = createStore();
final Transaction tx = store.startTransaction();
tx.rollback();
tx.rollback();
}
@Test
public void twoTransactions() throws Exception {
final KeyValueStorage store = createStore();
final Transaction tx1 = store.startTransaction();
final Transaction tx2 = store.startTransaction();
tx1.put(BytesValue.of(1), BytesValue.of(1));
tx2.put(BytesValue.of(2), BytesValue.of(2));
tx1.commit();
tx2.commit();
assertThat(store.get(BytesValue.of(1))).contains(BytesValue.of(1));
assertThat(store.get(BytesValue.of(2))).contains(BytesValue.of(2));
}
@Test
public void transactionIsolation() throws Exception {
final int keyCount = 1000;
final KeyValueStorage store = createStore();
final CountDownLatch finishedLatch = new CountDownLatch(2);
final Function<BytesValue, Thread> txRunner =
(value) ->
new Thread(
() -> {
final Transaction tx = store.startTransaction();
for (int i = 0; i < keyCount; i++) {
tx.put(BytesValues.toMinimalBytes(i), value);
}
try {
tx.commit();
} finally {
finishedLatch.countDown();
}
});
// Run 2 concurrent transactions that write a bunch of values to the same keys
final BytesValue a = BytesValue.of(10);
final BytesValue b = BytesValue.of(20);
txRunner.apply(a).start();
txRunner.apply(b).start();
finishedLatch.await();
// Check that transaction results are isolated (not interleaved)
final BytesValue[] finalValues = new BytesValue[keyCount];
final BytesValue[] expectedValues = new BytesValue[keyCount];
for (int i = 0; i < keyCount; i++) {
final BytesValue key = BytesValues.toMinimalBytes(i);
finalValues[i] = store.get(key).get();
}
Arrays.fill(expectedValues, 0, keyCount, finalValues[0]);
assertThat(finalValues).containsExactly(expectedValues);
assertThat(finalValues[0].equals(a) || finalValues[0].equals(b)).isTrue();
store.close();
}
}

@ -12,10 +12,13 @@
*/
package tech.pegasys.pantheon.services.kvstore;
import tech.pegasys.pantheon.kvstore.AbstractKeyValueStorageTest;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
public class InMemoryKeyValueStorageTest extends AbstractKeyValueStorageTest {
@Override
protected KeyValueStorage createStore() throws Exception {
protected KeyValueStorage createStore() {
return new InMemoryKeyValueStorage();
}
}

@ -13,16 +13,18 @@
package tech.pegasys.pantheon.services.kvstore;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertFalse;
import tech.pegasys.pantheon.services.kvstore.KeyValueStorage.Transaction;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import tech.pegasys.pantheon.kvstore.AbstractKeyValueStorageTest;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import org.junit.Test;
public class LimitedInMemoryKeyValueStorageTest extends AbstractKeyValueStorageTest {
@Override
protected KeyValueStorage createStore() throws Exception {
protected KeyValueStorage createStore() {
return new LimitedInMemoryKeyValueStorage(100_000_000);
}
@ -32,20 +34,20 @@ public class LimitedInMemoryKeyValueStorageTest extends AbstractKeyValueStorageT
final LimitedInMemoryKeyValueStorage storage = new LimitedInMemoryKeyValueStorage(limit);
for (int i = 0; i < limit * 2; i++) {
final Transaction tx = storage.startTransaction();
tx.put(BytesValue.of(i), BytesValue.of(i));
final KeyValueStorageTransaction tx = storage.startTransaction();
tx.put(bytesOf(i), bytesOf(i));
tx.commit();
}
int hits = 0;
for (int i = 0; i < limit * 2; i++) {
if (storage.containsKey(BytesValue.of(i))) {
if (storage.get(bytesOf(i)).isPresent()) {
hits++;
}
}
assertThat(hits <= limit).isTrue();
// Oldest key should've been dropped first
assertThat(storage.containsKey(BytesValue.of(0))).isFalse();
assertFalse(storage.containsKey(bytesOf((0))));
}
}

@ -32,7 +32,6 @@ dependencies {
compileOnly 'org.openjdk.jmh:jmh-generator-annprocess'
implementation project(':metrics:core')
implementation project(':services:util')
implementation 'io.vertx:vertx-core'
implementation 'org.apache.logging.log4j:log4j-api'

@ -38,9 +38,9 @@ include 'metrics:rocksdb'
include 'nat'
include 'pantheon'
include 'plugin-api'
include 'plugins:rocksdb'
include 'services:kvstore'
include 'services:pipeline'
include 'services:tasks'
include 'services:util'
include 'testutil'
include 'util'

@ -26,10 +26,15 @@ jar {
}
dependencies {
implementation project(':plugin-api')
implementation project(':util')
implementation 'com.fasterxml.jackson.core:jackson-databind'
implementation 'com.google.guava:guava'
implementation 'com.squareup.okhttp3:okhttp'
implementation 'junit:junit'
implementation 'net.consensys:orion'
implementation 'org.assertj:assertj-core'
implementation 'org.mockito:mockito-core'
implementation 'org.web3j:core'
}

@ -0,0 +1,398 @@
/*
* Copyright 2018 ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package tech.pegasys.pantheon.kvstore;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertTrue;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorage;
import tech.pegasys.pantheon.plugin.services.storage.KeyValueStorageTransaction;
import tech.pegasys.pantheon.util.bytes.BytesValue;
import tech.pegasys.pantheon.util.bytes.BytesValues;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import java.util.function.Function;
import org.junit.Ignore;
import org.junit.Test;
@Ignore
public abstract class AbstractKeyValueStorageTest {
protected abstract KeyValueStorage createStore() throws Exception;
@Test
public void twoStoresAreIndependent() throws Exception {
final KeyValueStorage store1 = createStore();
final KeyValueStorage store2 = createStore();
final KeyValueStorageTransaction tx = store1.startTransaction();
final byte[] key = bytesFromHexString("0001");
final byte[] value = bytesFromHexString("0FFF");
tx.put(key, value);
tx.commit();
final Optional<byte[]> result = store2.get(key);
assertThat(result).isEmpty();
}
@Test
public void put() throws Exception {
final KeyValueStorage store = createStore();
final byte[] key = bytesFromHexString("0F");
final byte[] firstValue = bytesFromHexString("0ABC");
final byte[] secondValue = bytesFromHexString("0DEF");
KeyValueStorageTransaction tx = store.startTransaction();
tx.put(key, firstValue);
tx.commit();
assertThat(store.get(key)).contains(firstValue);
tx = store.startTransaction();
tx.put(key, secondValue);
tx.commit();
assertThat(store.get(key)).contains(secondValue);
}
@Test
public void removeUnless() throws Exception {
final KeyValueStorage store = createStore();
final KeyValueStorageTransaction tx = store.startTransaction();
tx.put(bytesFromHexString("0F"), bytesFromHexString("0ABC"));
tx.put(bytesFromHexString("10"), bytesFromHexString("0ABC"));
tx.put(bytesFromHexString("11"), bytesFromHexString("0ABC"));
tx.put(bytesFromHexString("12"), bytesFromHexString("0ABC"));
tx.commit();
store.removeAllKeysUnless(bv -> BytesValue.wrap(bv).toString().contains("1"));
assertThat(store.containsKey(bytesFromHexString("0F"))).isFalse();
assertThat(store.containsKey(bytesFromHexString("10"))).isTrue();
assertThat(store.containsKey(bytesFromHexString("11"))).isTrue();
assertThat(store.containsKey(bytesFromHexString("12"))).isTrue();
}
@Test
public void containsKey() throws Exception {
final KeyValueStorage store = createStore();
final byte[] key = bytesFromHexString("ABCD");
final byte[] value = bytesFromHexString("DEFF");
assertThat(store.containsKey(key)).isFalse();
final KeyValueStorageTransaction transaction = store.startTransaction();
transaction.put(key, value);
transaction.commit();
assertThat(store.containsKey(key)).isTrue();
}
@Test
public void removeExisting() throws Exception {
final KeyValueStorage store = createStore();
final byte[] key = bytesFromHexString("0F");
final byte[] value = bytesFromHexString("0ABC");
KeyValueStorageTransaction tx = store.startTransaction();
tx.put(key, value);
tx.commit();
tx = store.startTransaction();
tx.remove(key);
tx.commit();
assertThat(store.get(key)).isEmpty();
}
@Test
public void removeExistingSameTransaction() throws Exception {
final KeyValueStorage store = createStore();
final byte[] key = bytesFromHexString("0F");
final byte[] value = bytesFromHexString("0ABC");
KeyValueStorageTransaction tx = store.startTransaction();
tx.put(key, value);
tx.remove(key);
tx.commit();
assertThat(store.get(key)).isEmpty();
}
@Test
public void removeNonExistent() throws Exception {
final KeyValueStorage store = createStore();
final byte[] key = bytesFromHexString("0F");
KeyValueStorageTransaction tx = store.startTransaction();
tx.remove(key);
tx.commit();
assertThat(store.get(key)).isEmpty();
}
@Test
public void concurrentUpdate() throws Exception {
final int keyCount = 1000;
final KeyValueStorage store = createStore();
final CountDownLatch finishedLatch = new CountDownLatch(2);
final Function<byte[], Thread> updater =
(value) ->
new Thread(
() -> {
try {
for (int i = 0; i < keyCount; i++) {
KeyValueStorageTransaction tx = store.startTransaction();
tx.put(BytesValues.toMinimalBytes(i).getArrayUnsafe(), value);
tx.commit();
}
} finally {
finishedLatch.countDown();
}
});
// Run 2 concurrent transactions that write a bunch of values to the same keys
final byte[] a = BytesValue.of(10).getArrayUnsafe();
final byte[] b = BytesValue.of(20).getArrayUnsafe();
updater.apply(a).start();
updater.apply(b).start();
finishedLatch.await();
for (int i = 0; i < keyCount; i++) {
final byte[] key = BytesValues.toMinimalBytes(i).getArrayUnsafe();
final byte[] actual = store.get(key).get();
assertTrue(Arrays.equals(actual, a) || Arrays.equals(actual, b));
}
store.close();
}
@Test
public void transactionCommit() throws Exception {
final KeyValueStorage store = createStore();
// Add some values
KeyValueStorageTransaction tx = store.startTransaction();
tx.put(bytesOf(1), bytesOf(1));
tx.put(bytesOf(2), bytesOf(2));
tx.put(bytesOf(3), bytesOf(3));
tx.commit();
// Start transaction that adds, modifies, and removes some values
tx = store.startTransaction();
tx.put(bytesOf(2), bytesOf(3));
tx.put(bytesOf(2), bytesOf(4));
tx.remove(bytesOf(3));
tx.put(bytesOf(4), bytesOf(8));
// Check values before committing have not changed
assertThat(store.get(bytesOf(1))).contains(bytesOf(1));
assertThat(store.get(bytesOf(2))).contains(bytesOf(2));
assertThat(store.get(bytesOf(3))).contains(bytesOf(3));
assertThat(store.get(bytesOf(4))).isEmpty();
tx.commit();
// Check that values have been updated after commit
assertThat(store.get(bytesOf(1))).contains(bytesOf(1));
assertThat(store.get(bytesOf(2))).contains(bytesOf(4));
assertThat(store.get(bytesOf(3))).isEmpty();
assertThat(store.get(bytesOf(4))).contains(bytesOf(8));
}
@Test
public void transactionRollback() throws Exception {
final KeyValueStorage store = createStore();
// Add some values
KeyValueStorageTransaction tx = store.startTransaction();
tx.put(bytesOf(1), bytesOf(1));
tx.put(bytesOf(2), bytesOf(2));
tx.put(bytesOf(3), bytesOf(3));
tx.commit();
// Start transaction that adds, modifies, and removes some values
tx = store.startTransaction();
tx.put(bytesOf(2), bytesOf(3));
tx.put(bytesOf(2), bytesOf(4));
tx.remove(bytesOf(3));
tx.put(bytesOf(4), bytesOf(8));
// Check values before committing have not changed
assertThat(store.get(bytesOf(1))).contains(bytesOf(1));
assertThat(store.get(bytesOf(2))).contains(bytesOf(2));
assertThat(store.get(bytesOf(3))).contains(bytesOf(3));
assertThat(store.get(bytesOf(4))).isEmpty();
tx.rollback();
// Check that values have not changed after rollback
assertThat(store.get(bytesOf(1))).contains(bytesOf(1));
assertThat(store.get(bytesOf(2))).contains(bytesOf(2));
assertThat(store.get(bytesOf(3))).contains(bytesOf(3));
assertThat(store.get(bytesOf(4))).isEmpty();
}
@Test
public void transactionCommitEmpty() throws Exception {
final KeyValueStorage store = createStore();
final KeyValueStorageTransaction tx = store.startTransaction();
tx.commit();
}
@Test
public void transactionRollbackEmpty() throws Exception {
final KeyValueStorage store = createStore();
final KeyValueStorageTransaction tx = store.startTransaction();
tx.rollback();
}
@Test(expected = IllegalStateException.class)
public void transactionPutAfterCommit() throws Exception {
final KeyValueStorage store = createStore();
final KeyValueStorageTransaction tx = store.startTransaction();
tx.commit();
tx.put(bytesOf(1), bytesOf(1));
}
@Test(expected = IllegalStateException.class)
public void transactionRemoveAfterCommit() throws Exception {
final KeyValueStorage store = createStore();
final KeyValueStorageTransaction tx = store.startTransaction();
tx.commit();
tx.remove(bytesOf(1));
}
@Test(expected = IllegalStateException.class)
public void transactionPutAfterRollback() throws Exception {
final KeyValueStorage store = createStore();
final KeyValueStorageTransaction tx = store.startTransaction();
tx.rollback();
tx.put(bytesOf(1), bytesOf(1));
}
@Test(expected = IllegalStateException.class)
public void transactionRemoveAfterRollback() throws Exception {
final KeyValueStorage store = createStore();
final KeyValueStorageTransaction tx = store.startTransaction();
tx.rollback();
tx.remove(bytesOf(1));
}
@Test(expected = IllegalStateException.class)
public void transactionCommitAfterRollback() throws Exception {
final KeyValueStorage store = createStore();
final KeyValueStorageTransaction tx = store.startTransaction();
tx.rollback();
tx.commit();
}
@Test(expected = IllegalStateException.class)
public void transactionCommitTwice() throws Exception {
final KeyValueStorage store = createStore();
final KeyValueStorageTransaction tx = store.startTransaction();
tx.commit();
tx.commit();
}
@Test(expected = IllegalStateException.class)
public void transactionRollbackAfterCommit() throws Exception {
final KeyValueStorage store = createStore();
final KeyValueStorageTransaction tx = store.startTransaction();
tx.commit();
tx.rollback();
}
@Test(expected = IllegalStateException.class)
public void transactionRollbackTwice() throws Exception {
final KeyValueStorage store = createStore();
final KeyValueStorageTransaction tx = store.startTransaction();
tx.rollback();
tx.rollback();
}
@Test
public void twoTransactions() throws Exception {
final KeyValueStorage store = createStore();
final KeyValueStorageTransaction tx1 = store.startTransaction();
final KeyValueStorageTransaction tx2 = store.startTransaction();
tx1.put(bytesOf(1), bytesOf(1));
tx2.put(bytesOf(2), bytesOf(2));
tx1.commit();
tx2.commit();
assertThat(store.get(bytesOf(1))).contains(bytesOf(1));
assertThat(store.get(bytesOf(2))).contains(bytesOf(2));
}
@Test
public void transactionIsolation() throws Exception {
final int keyCount = 1000;
final KeyValueStorage store = createStore();
final CountDownLatch finishedLatch = new CountDownLatch(2);
final Function<byte[], Thread> txRunner =
(value) ->
new Thread(
() -> {
final KeyValueStorageTransaction tx = store.startTransaction();
for (int i = 0; i < keyCount; i++) {
tx.put(BytesValues.toMinimalBytes(i).getArrayUnsafe(), value);
}
try {
tx.commit();
} finally {
finishedLatch.countDown();
}
});
// Run 2 concurrent transactions that write a bunch of values to the same keys
final byte[] a = bytesOf(10);
final byte[] b = bytesOf(20);
txRunner.apply(a).start();
txRunner.apply(b).start();
finishedLatch.await();
// Check that transaction results are isolated (not interleaved)
final List<byte[]> finalValues = new ArrayList<>(keyCount);
for (int i = 0; i < keyCount; i++) {
final byte[] key = BytesValues.toMinimalBytes(i).getArrayUnsafe();
finalValues.add(store.get(key).get());
}
// Expecting the same value for all entries
final byte[] expected = finalValues.get(0);
for (final byte[] actual : finalValues) {
assertArrayEquals(expected, actual);
}
assertTrue(Arrays.equals(expected, a) || Arrays.equals(expected, b));
store.close();
}
/*
* Used to mimic the wrapping with BytesValue performed in Pantheon
*/
protected byte[] bytesFromHexString(final String hex) {
return BytesValue.fromHexString(hex).getArrayUnsafe();
}
protected byte[] bytesOf(final int... bytes) {
return BytesValue.of(bytes).getArrayUnsafe();
}
}
Loading…
Cancel
Save