mirror of https://github.com/hyperledger/besu
Preload state trie node (#4737)
The idea behind this commit is to preload asynchronously account nodes and storage nodes from the database during the transaction processing to use these nodes during the calculate root hash step. We've created two caches, one for account nodes and one for storage nodes. The size of these caches is 100k for accounts and 200k for storage. We've tested other values but this configuration is the one that works better. We also use exporter cache metrics as Prometheus metrics to check cache efficiency. Signed-off-by: Karim TAAM <karim.t2am@gmail.com> Co-authored-by: Ameziane H <ameziane.hamlat@consensys.net>pull/4761/head
parent
2c5d7728ca
commit
fae615fcb8
@ -0,0 +1,151 @@ |
||||
/* |
||||
* Copyright Hyperledger Besu contributors. |
||||
* |
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||
* specific language governing permissions and limitations under the License. |
||||
* |
||||
* SPDX-License-Identifier: Apache-2.0 |
||||
* |
||||
*/ |
||||
package org.hyperledger.besu.ethereum.bonsai; |
||||
|
||||
import org.hyperledger.besu.datatypes.Address; |
||||
import org.hyperledger.besu.datatypes.Hash; |
||||
import org.hyperledger.besu.ethereum.trie.MerklePatriciaTrie; |
||||
import org.hyperledger.besu.ethereum.trie.MerkleTrieException; |
||||
import org.hyperledger.besu.ethereum.trie.StoredMerklePatriciaTrie; |
||||
import org.hyperledger.besu.metrics.BesuMetricCategory; |
||||
import org.hyperledger.besu.metrics.ObservableMetricsSystem; |
||||
import org.hyperledger.besu.metrics.prometheus.PrometheusMetricsSystem; |
||||
|
||||
import java.util.Optional; |
||||
import java.util.concurrent.CompletableFuture; |
||||
import java.util.function.Function; |
||||
|
||||
import com.google.common.annotations.VisibleForTesting; |
||||
import com.google.common.cache.Cache; |
||||
import com.google.common.cache.CacheBuilder; |
||||
import io.prometheus.client.guava.cache.CacheMetricsCollector; |
||||
import org.apache.tuweni.bytes.Bytes; |
||||
import org.apache.tuweni.bytes.Bytes32; |
||||
|
||||
public class CachedMerkleTrieLoader { |
||||
|
||||
private static final int ACCOUNT_CACHE_SIZE = 100_000; |
||||
private static final int STORAGE_CACHE_SIZE = 200_000; |
||||
private final Cache<Bytes, Bytes> accountNodes = |
||||
CacheBuilder.newBuilder().recordStats().maximumSize(ACCOUNT_CACHE_SIZE).build(); |
||||
private final Cache<Bytes, Bytes> storageNodes = |
||||
CacheBuilder.newBuilder().recordStats().maximumSize(STORAGE_CACHE_SIZE).build(); |
||||
|
||||
public CachedMerkleTrieLoader(final ObservableMetricsSystem metricsSystem) { |
||||
|
||||
CacheMetricsCollector cacheMetrics = new CacheMetricsCollector(); |
||||
cacheMetrics.addCache("accountsNodes", accountNodes); |
||||
cacheMetrics.addCache("storageNodes", storageNodes); |
||||
if (metricsSystem instanceof PrometheusMetricsSystem) |
||||
((PrometheusMetricsSystem) metricsSystem) |
||||
.addCollector(BesuMetricCategory.BLOCKCHAIN, () -> cacheMetrics); |
||||
} |
||||
|
||||
public void preLoadAccount( |
||||
final BonsaiWorldStateKeyValueStorage worldStateStorage, |
||||
final Hash worldStateRootHash, |
||||
final Address account) { |
||||
CompletableFuture.runAsync( |
||||
() -> cacheAccountNodes(worldStateStorage, worldStateRootHash, account)); |
||||
} |
||||
|
||||
@VisibleForTesting |
||||
public void cacheAccountNodes( |
||||
final BonsaiWorldStateKeyValueStorage worldStateStorage, |
||||
final Hash worldStateRootHash, |
||||
final Address account) { |
||||
try { |
||||
final StoredMerklePatriciaTrie<Bytes, Bytes> accountTrie = |
||||
new StoredMerklePatriciaTrie<>( |
||||
(location, hash) -> { |
||||
Optional<Bytes> node = worldStateStorage.getAccountStateTrieNode(location, hash); |
||||
node.ifPresent(bytes -> accountNodes.put(Hash.hash(bytes), bytes)); |
||||
return node; |
||||
}, |
||||
worldStateRootHash, |
||||
Function.identity(), |
||||
Function.identity()); |
||||
accountTrie.get(Hash.hash(account)); |
||||
} catch (MerkleTrieException e) { |
||||
// ignore exception for the cache
|
||||
} |
||||
} |
||||
|
||||
public void preLoadStorageSlot( |
||||
final BonsaiWorldStateKeyValueStorage worldStateStorage, |
||||
final Address account, |
||||
final Hash slotHash) { |
||||
CompletableFuture.runAsync(() -> cacheStorageNodes(worldStateStorage, account, slotHash)); |
||||
} |
||||
|
||||
@VisibleForTesting |
||||
public void cacheStorageNodes( |
||||
final BonsaiWorldStateKeyValueStorage worldStateStorage, |
||||
final Address account, |
||||
final Hash slotHash) { |
||||
final Hash accountHash = Hash.hash(account); |
||||
worldStateStorage |
||||
.getStateTrieNode(Bytes.concatenate(accountHash, Bytes.EMPTY)) |
||||
.ifPresent( |
||||
storageRoot -> { |
||||
try { |
||||
final StoredMerklePatriciaTrie<Bytes, Bytes> storageTrie = |
||||
new StoredMerklePatriciaTrie<>( |
||||
(location, hash) -> { |
||||
Optional<Bytes> node = |
||||
worldStateStorage.getAccountStorageTrieNode( |
||||
accountHash, location, hash); |
||||
node.ifPresent(bytes -> storageNodes.put(Hash.hash(bytes), bytes)); |
||||
return node; |
||||
}, |
||||
Hash.hash(storageRoot), |
||||
Function.identity(), |
||||
Function.identity()); |
||||
storageTrie.get(slotHash); |
||||
} catch (MerkleTrieException e) { |
||||
// ignore exception for the cache
|
||||
} |
||||
}); |
||||
} |
||||
|
||||
public Optional<Bytes> getAccountStateTrieNode( |
||||
final BonsaiWorldStateKeyValueStorage worldStateKeyValueStorage, |
||||
final Bytes location, |
||||
final Bytes32 nodeHash) { |
||||
if (nodeHash.equals(MerklePatriciaTrie.EMPTY_TRIE_NODE_HASH)) { |
||||
return Optional.of(MerklePatriciaTrie.EMPTY_TRIE_NODE); |
||||
} else { |
||||
return Optional.ofNullable(accountNodes.getIfPresent(nodeHash)) |
||||
.or(() -> worldStateKeyValueStorage.getAccountStateTrieNode(location, nodeHash)); |
||||
} |
||||
} |
||||
|
||||
public Optional<Bytes> getAccountStorageTrieNode( |
||||
final BonsaiWorldStateKeyValueStorage worldStateKeyValueStorage, |
||||
final Hash accountHash, |
||||
final Bytes location, |
||||
final Bytes32 nodeHash) { |
||||
if (nodeHash.equals(MerklePatriciaTrie.EMPTY_TRIE_NODE_HASH)) { |
||||
return Optional.of(MerklePatriciaTrie.EMPTY_TRIE_NODE); |
||||
} else { |
||||
return Optional.ofNullable(storageNodes.getIfPresent(nodeHash)) |
||||
.or( |
||||
() -> |
||||
worldStateKeyValueStorage.getAccountStorageTrieNode( |
||||
accountHash, location, nodeHash)); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,171 @@ |
||||
/* |
||||
* Copyright Hyperledger Besu contributors. |
||||
* |
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||
* specific language governing permissions and limitations under the License. |
||||
* |
||||
* SPDX-License-Identifier: Apache-2.0 |
||||
* |
||||
*/ |
||||
package org.hyperledger.besu.ethereum.bonsai; |
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat; |
||||
|
||||
import org.hyperledger.besu.datatypes.Address; |
||||
import org.hyperledger.besu.datatypes.Hash; |
||||
import org.hyperledger.besu.ethereum.core.InMemoryKeyValueStorageProvider; |
||||
import org.hyperledger.besu.ethereum.core.TrieGenerator; |
||||
import org.hyperledger.besu.ethereum.rlp.RLP; |
||||
import org.hyperledger.besu.ethereum.storage.StorageProvider; |
||||
import org.hyperledger.besu.ethereum.trie.MerklePatriciaTrie; |
||||
import org.hyperledger.besu.ethereum.trie.StoredMerklePatriciaTrie; |
||||
import org.hyperledger.besu.ethereum.trie.TrieIterator; |
||||
import org.hyperledger.besu.ethereum.worldstate.StateTrieAccountValue; |
||||
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
import java.util.function.Function; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import org.apache.tuweni.bytes.Bytes; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.mockito.Mockito; |
||||
|
||||
public class CachedMerkleTrieLoaderTest { |
||||
|
||||
private CachedMerkleTrieLoader merkleTrieLoader; |
||||
private final StorageProvider storageProvider = new InMemoryKeyValueStorageProvider(); |
||||
private final BonsaiWorldStateKeyValueStorage inMemoryWorldState = |
||||
Mockito.spy(new BonsaiWorldStateKeyValueStorage(storageProvider)); |
||||
|
||||
final List<Address> accounts = |
||||
List.of(Address.fromHexString("0xdeadbeef"), Address.fromHexString("0xdeadbeee")); |
||||
|
||||
private MerklePatriciaTrie<Bytes, Bytes> trie; |
||||
|
||||
@Before |
||||
public void setup() { |
||||
trie = |
||||
TrieGenerator.generateTrie( |
||||
inMemoryWorldState, accounts.stream().map(Hash::hash).collect(Collectors.toList())); |
||||
merkleTrieLoader = new CachedMerkleTrieLoader(new NoOpMetricsSystem()); |
||||
} |
||||
|
||||
@Test |
||||
public void shouldAddAccountNodesInCacheDuringPreload() { |
||||
merkleTrieLoader.cacheAccountNodes( |
||||
inMemoryWorldState, Hash.wrap(trie.getRootHash()), accounts.get(0)); |
||||
|
||||
final BonsaiWorldStateKeyValueStorage emptyStorage = |
||||
new BonsaiWorldStateKeyValueStorage(new InMemoryKeyValueStorageProvider()); |
||||
StoredMerklePatriciaTrie<Bytes, Bytes> cachedTrie = |
||||
new StoredMerklePatriciaTrie<>( |
||||
(location, hash) -> |
||||
merkleTrieLoader.getAccountStateTrieNode(emptyStorage, location, hash), |
||||
trie.getRootHash(), |
||||
Function.identity(), |
||||
Function.identity()); |
||||
|
||||
final Hash hashAccountZero = Hash.hash(accounts.get(0)); |
||||
assertThat(cachedTrie.get(hashAccountZero)).isEqualTo(trie.get(hashAccountZero)); |
||||
} |
||||
|
||||
@Test |
||||
public void shouldAddStorageNodesInCacheDuringPreload() { |
||||
final Hash hashAccountZero = Hash.hash(accounts.get(0)); |
||||
final StateTrieAccountValue stateTrieAccountValue = |
||||
StateTrieAccountValue.readFrom(RLP.input(trie.get(hashAccountZero).orElseThrow())); |
||||
final StoredMerklePatriciaTrie<Bytes, Bytes> storageTrie = |
||||
new StoredMerklePatriciaTrie<>( |
||||
(location, hash) -> |
||||
inMemoryWorldState.getAccountStorageTrieNode(hashAccountZero, location, hash), |
||||
stateTrieAccountValue.getStorageRoot(), |
||||
Function.identity(), |
||||
Function.identity()); |
||||
final List<Bytes> originalSlots = new ArrayList<>(); |
||||
storageTrie.visitLeafs( |
||||
(keyHash, node) -> { |
||||
merkleTrieLoader.cacheStorageNodes( |
||||
inMemoryWorldState, accounts.get(0), Hash.wrap(keyHash)); |
||||
originalSlots.add(node.getRlp()); |
||||
return TrieIterator.State.CONTINUE; |
||||
}); |
||||
|
||||
final List<Bytes> cachedSlots = new ArrayList<>(); |
||||
final BonsaiWorldStateKeyValueStorage emptyStorage = |
||||
new BonsaiWorldStateKeyValueStorage(new InMemoryKeyValueStorageProvider()); |
||||
final StoredMerklePatriciaTrie<Bytes, Bytes> cachedTrie = |
||||
new StoredMerklePatriciaTrie<>( |
||||
(location, hash) -> |
||||
merkleTrieLoader.getAccountStorageTrieNode( |
||||
emptyStorage, hashAccountZero, location, hash), |
||||
stateTrieAccountValue.getStorageRoot(), |
||||
Function.identity(), |
||||
Function.identity()); |
||||
cachedTrie.visitLeafs( |
||||
(keyHash, node) -> { |
||||
cachedSlots.add(node.getRlp()); |
||||
return TrieIterator.State.CONTINUE; |
||||
}); |
||||
assertThat(originalSlots).isNotEmpty(); |
||||
assertThat(originalSlots).isEqualTo(cachedSlots); |
||||
} |
||||
|
||||
@Test |
||||
public void shouldFallbackWhenAccountNodesIsNotInCache() { |
||||
final StoredMerklePatriciaTrie<Bytes, Bytes> cachedTrie = |
||||
new StoredMerklePatriciaTrie<>( |
||||
(location, hash) -> |
||||
merkleTrieLoader.getAccountStateTrieNode(inMemoryWorldState, location, hash), |
||||
trie.getRootHash(), |
||||
Function.identity(), |
||||
Function.identity()); |
||||
final Hash hashAccountZero = Hash.hash(accounts.get(0)); |
||||
assertThat(cachedTrie.get(hashAccountZero)).isEqualTo(trie.get(hashAccountZero)); |
||||
} |
||||
|
||||
@Test |
||||
public void shouldFallbackWhenStorageNodesIsNotInCache() { |
||||
final Hash hashAccountZero = Hash.hash(accounts.get(0)); |
||||
final StateTrieAccountValue stateTrieAccountValue = |
||||
StateTrieAccountValue.readFrom(RLP.input(trie.get(hashAccountZero).orElseThrow())); |
||||
final StoredMerklePatriciaTrie<Bytes, Bytes> storageTrie = |
||||
new StoredMerklePatriciaTrie<>( |
||||
(location, hash) -> |
||||
inMemoryWorldState.getAccountStorageTrieNode(hashAccountZero, location, hash), |
||||
stateTrieAccountValue.getStorageRoot(), |
||||
Function.identity(), |
||||
Function.identity()); |
||||
final List<Bytes> originalSlots = new ArrayList<>(); |
||||
storageTrie.visitLeafs( |
||||
(keyHash, node) -> { |
||||
originalSlots.add(node.getRlp()); |
||||
return TrieIterator.State.CONTINUE; |
||||
}); |
||||
|
||||
final List<Bytes> cachedSlots = new ArrayList<>(); |
||||
final StoredMerklePatriciaTrie<Bytes, Bytes> cachedTrie = |
||||
new StoredMerklePatriciaTrie<>( |
||||
(location, hash) -> |
||||
merkleTrieLoader.getAccountStorageTrieNode( |
||||
inMemoryWorldState, hashAccountZero, location, hash), |
||||
stateTrieAccountValue.getStorageRoot(), |
||||
Function.identity(), |
||||
Function.identity()); |
||||
cachedTrie.visitLeafs( |
||||
(keyHash, node) -> { |
||||
cachedSlots.add(node.getRlp()); |
||||
return TrieIterator.State.CONTINUE; |
||||
}); |
||||
assertThat(originalSlots).isNotEmpty(); |
||||
assertThat(originalSlots).isEqualTo(cachedSlots); |
||||
} |
||||
} |
Loading…
Reference in new issue