mirror of https://github.com/hyperledger/besu
parent
06cda00141
commit
9967e186d0
@ -0,0 +1,179 @@ |
|||||||
|
/* |
||||||
|
* Copyright Hyperledger Besu Contributors. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||||
|
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||||
|
* specific language governing permissions and limitations under the License. |
||||||
|
* |
||||||
|
* SPDX-License-Identifier: Apache-2.0 |
||||||
|
* |
||||||
|
*/ |
||||||
|
package org.hyperledger.besu.ethereum.trie.diffbased.verkle; |
||||||
|
|
||||||
|
import org.hyperledger.besu.datatypes.AccountValue; |
||||||
|
import org.hyperledger.besu.datatypes.Address; |
||||||
|
import org.hyperledger.besu.datatypes.Hash; |
||||||
|
import org.hyperledger.besu.datatypes.Wei; |
||||||
|
import org.hyperledger.besu.ethereum.rlp.RLP; |
||||||
|
import org.hyperledger.besu.ethereum.rlp.RLPException; |
||||||
|
import org.hyperledger.besu.ethereum.rlp.RLPInput; |
||||||
|
import org.hyperledger.besu.ethereum.rlp.RLPOutput; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.bonsai.storage.BonsaiWorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.DiffBasedAccount; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.worldview.DiffBasedWorldView; |
||||||
|
import org.hyperledger.besu.evm.ModificationNotAllowedException; |
||||||
|
import org.hyperledger.besu.evm.account.AccountStorageEntry; |
||||||
|
import org.hyperledger.besu.evm.worldstate.UpdateTrackingAccount; |
||||||
|
|
||||||
|
import java.util.NavigableMap; |
||||||
|
import java.util.Objects; |
||||||
|
|
||||||
|
import org.apache.tuweni.bytes.Bytes; |
||||||
|
import org.apache.tuweni.bytes.Bytes32; |
||||||
|
|
||||||
|
public class VerkleAccount extends DiffBasedAccount { |
||||||
|
private Hash storageRoot; // TODO REMOVE AS USELESS
|
||||||
|
|
||||||
|
public VerkleAccount( |
||||||
|
final DiffBasedWorldView context, |
||||||
|
final Address address, |
||||||
|
final Hash addressHash, |
||||||
|
final long nonce, |
||||||
|
final Wei balance, |
||||||
|
final Hash storageRoot, |
||||||
|
final Hash codeHash, |
||||||
|
final boolean mutable) { |
||||||
|
super(context, address, addressHash, nonce, balance, codeHash, !mutable); |
||||||
|
this.storageRoot = storageRoot; |
||||||
|
} |
||||||
|
|
||||||
|
public VerkleAccount( |
||||||
|
final DiffBasedWorldView context, |
||||||
|
final Address address, |
||||||
|
final AccountValue stateTrieAccount, |
||||||
|
final boolean mutable) { |
||||||
|
super(context, address, stateTrieAccount, !mutable); |
||||||
|
this.storageRoot = stateTrieAccount.getStorageRoot(); |
||||||
|
} |
||||||
|
|
||||||
|
public VerkleAccount(final VerkleAccount toCopy) { |
||||||
|
this(toCopy, toCopy.context, false); |
||||||
|
} |
||||||
|
|
||||||
|
public VerkleAccount( |
||||||
|
final VerkleAccount toCopy, final DiffBasedWorldView context, final boolean mutable) { |
||||||
|
super(toCopy, context, !mutable); |
||||||
|
this.storageRoot = toCopy.storageRoot; |
||||||
|
} |
||||||
|
|
||||||
|
public VerkleAccount( |
||||||
|
final DiffBasedWorldView context, final UpdateTrackingAccount<VerkleAccount> tracked) { |
||||||
|
super( |
||||||
|
context, |
||||||
|
tracked.getAddress(), |
||||||
|
tracked.getAddressHash(), |
||||||
|
tracked.getNonce(), |
||||||
|
tracked.getBalance(), |
||||||
|
tracked.getCodeHash(), |
||||||
|
false); |
||||||
|
this.storageRoot = Hash.EMPTY_TRIE_HASH; |
||||||
|
updatedStorage.putAll(tracked.getUpdatedStorage()); |
||||||
|
} |
||||||
|
|
||||||
|
public static VerkleAccount fromRLP( |
||||||
|
final DiffBasedWorldView context, |
||||||
|
final Address address, |
||||||
|
final Bytes encoded, |
||||||
|
final boolean mutable) |
||||||
|
throws RLPException { |
||||||
|
final RLPInput in = RLP.input(encoded); |
||||||
|
in.enterList(); |
||||||
|
|
||||||
|
final long nonce = in.readLongScalar(); |
||||||
|
final Wei balance = Wei.of(in.readUInt256Scalar()); |
||||||
|
final Hash storageRoot = Hash.wrap(in.readBytes32()); |
||||||
|
final Hash codeHash = Hash.wrap(in.readBytes32()); |
||||||
|
|
||||||
|
in.leaveList(); |
||||||
|
|
||||||
|
return new VerkleAccount( |
||||||
|
context, address, address.addressHash(), nonce, balance, storageRoot, codeHash, mutable); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public NavigableMap<Bytes32, AccountStorageEntry> storageEntriesFrom( |
||||||
|
final Bytes32 startKeyHash, final int limit) { |
||||||
|
return ((BonsaiWorldStateKeyValueStorage) context.getWorldStateStorage()) |
||||||
|
.storageEntriesFrom(this.addressHash, startKeyHash, limit); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public void writeTo(final RLPOutput out) { |
||||||
|
out.startList(); |
||||||
|
|
||||||
|
out.writeLongScalar(nonce); |
||||||
|
out.writeUInt256Scalar(balance); |
||||||
|
out.writeBytes(storageRoot); |
||||||
|
out.writeBytes(codeHash); |
||||||
|
|
||||||
|
out.endList(); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Hash getStorageRoot() { |
||||||
|
return storageRoot; |
||||||
|
} |
||||||
|
|
||||||
|
public void setStorageRoot(final Hash storageRoot) { |
||||||
|
if (immutable) { |
||||||
|
throw new ModificationNotAllowedException(); |
||||||
|
} |
||||||
|
this.storageRoot = storageRoot; |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public String toString() { |
||||||
|
return "AccountState{" |
||||||
|
+ "address=" |
||||||
|
+ address |
||||||
|
+ ", nonce=" |
||||||
|
+ nonce |
||||||
|
+ ", balance=" |
||||||
|
+ balance |
||||||
|
+ ", storageRoot=" |
||||||
|
+ storageRoot |
||||||
|
+ ", codeHash=" |
||||||
|
+ codeHash |
||||||
|
+ '}'; |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Throws an exception if the two accounts represent different stored states |
||||||
|
* |
||||||
|
* @param source The bonsai account to compare |
||||||
|
* @param account The State Trie account to compare |
||||||
|
* @param context a description to be added to the thrown exceptions |
||||||
|
* @throws IllegalStateException if the stored values differ |
||||||
|
*/ |
||||||
|
public static void assertCloseEnoughForDiffing( |
||||||
|
final VerkleAccount source, final AccountValue account, final String context) { |
||||||
|
if (source == null) { |
||||||
|
throw new IllegalStateException(context + ": source is null but target isn't"); |
||||||
|
} else { |
||||||
|
if (source.nonce != account.getNonce()) { |
||||||
|
throw new IllegalStateException(context + ": nonces differ"); |
||||||
|
} |
||||||
|
if (!Objects.equals(source.balance, account.getBalance())) { |
||||||
|
throw new IllegalStateException(context + ": balances differ"); |
||||||
|
} |
||||||
|
if (!Objects.equals(source.storageRoot, account.getStorageRoot())) { |
||||||
|
throw new IllegalStateException(context + ": Storage Roots differ"); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,61 @@ |
|||||||
|
/* |
||||||
|
* Copyright Hyperledger Besu Contributors. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||||
|
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||||
|
* specific language governing permissions and limitations under the License. |
||||||
|
* |
||||||
|
* SPDX-License-Identifier: Apache-2.0 |
||||||
|
* |
||||||
|
*/ |
||||||
|
|
||||||
|
package org.hyperledger.besu.ethereum.trie.diffbased.verkle; |
||||||
|
|
||||||
|
import org.hyperledger.besu.ethereum.chain.Blockchain; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.DiffBasedWorldStateProvider; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.trielog.TrieLogManager; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.trielog.TrieLogPruner; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.cache.VerkleCachedWorldStorageManager; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.storage.VerkleWorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.worldview.VerkleWorldState; |
||||||
|
import org.hyperledger.besu.evm.internal.EvmConfiguration; |
||||||
|
import org.hyperledger.besu.metrics.ObservableMetricsSystem; |
||||||
|
import org.hyperledger.besu.plugin.BesuContext; |
||||||
|
|
||||||
|
import java.util.Optional; |
||||||
|
|
||||||
|
import com.google.common.annotations.VisibleForTesting; |
||||||
|
|
||||||
|
public class VerkleWorldStateProvider extends DiffBasedWorldStateProvider { |
||||||
|
|
||||||
|
public VerkleWorldStateProvider( |
||||||
|
final VerkleWorldStateKeyValueStorage worldStateKeyValueStorage, |
||||||
|
final Blockchain blockchain, |
||||||
|
final Optional<Long> maxLayersToLoad, |
||||||
|
final ObservableMetricsSystem metricsSystem, |
||||||
|
final BesuContext pluginContext, |
||||||
|
final EvmConfiguration evmConfiguration, |
||||||
|
final TrieLogPruner trieLogPruner) { |
||||||
|
super(worldStateKeyValueStorage, blockchain, maxLayersToLoad, pluginContext, trieLogPruner); |
||||||
|
provideCachedWorldStorageManager( |
||||||
|
new VerkleCachedWorldStorageManager(this, worldStateKeyValueStorage, metricsSystem)); |
||||||
|
loadPersistedState(new VerkleWorldState(this, worldStateKeyValueStorage, evmConfiguration)); |
||||||
|
} |
||||||
|
|
||||||
|
@VisibleForTesting |
||||||
|
VerkleWorldStateProvider( |
||||||
|
final VerkleCachedWorldStorageManager cachedWorldStorageManager, |
||||||
|
final TrieLogManager trieLogManager, |
||||||
|
final VerkleWorldStateKeyValueStorage worldStateKeyValueStorage, |
||||||
|
final Blockchain blockchain, |
||||||
|
final EvmConfiguration evmConfiguration) { |
||||||
|
super(worldStateKeyValueStorage, blockchain, trieLogManager); |
||||||
|
provideCachedWorldStorageManager(cachedWorldStorageManager); |
||||||
|
loadPersistedState(new VerkleWorldState(this, worldStateKeyValueStorage, evmConfiguration)); |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,63 @@ |
|||||||
|
/* |
||||||
|
* Copyright Hyperledger Besu Contributors. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||||
|
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||||
|
* specific language governing permissions and limitations under the License. |
||||||
|
* |
||||||
|
* SPDX-License-Identifier: Apache-2.0 |
||||||
|
*/ |
||||||
|
package org.hyperledger.besu.ethereum.trie.diffbased.verkle.cache; |
||||||
|
|
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.DiffBasedWorldStateProvider; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.cache.DiffBasedCachedWorldStorageManager; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.storage.DiffBasedWorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.worldview.DiffBasedWorldState; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.VerkleWorldStateProvider; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.storage.VerkleLayeredWorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.storage.VerkleSnapshotWorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.storage.VerkleWorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.worldview.VerkleWorldState; |
||||||
|
import org.hyperledger.besu.evm.internal.EvmConfiguration; |
||||||
|
import org.hyperledger.besu.metrics.ObservableMetricsSystem; |
||||||
|
|
||||||
|
public class VerkleCachedWorldStorageManager extends DiffBasedCachedWorldStorageManager { |
||||||
|
|
||||||
|
public VerkleCachedWorldStorageManager( |
||||||
|
final DiffBasedWorldStateProvider archive, |
||||||
|
final DiffBasedWorldStateKeyValueStorage worldStateKeyValueStorage, |
||||||
|
final ObservableMetricsSystem metricsSystem) { |
||||||
|
super(archive, worldStateKeyValueStorage, metricsSystem); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public DiffBasedWorldState createWorldState( |
||||||
|
final DiffBasedWorldStateProvider archive, |
||||||
|
final DiffBasedWorldStateKeyValueStorage worldStateKeyValueStorage, |
||||||
|
final EvmConfiguration evmConfiguration) { |
||||||
|
return new VerkleWorldState( |
||||||
|
(VerkleWorldStateProvider) archive, |
||||||
|
(VerkleWorldStateKeyValueStorage) worldStateKeyValueStorage, |
||||||
|
evmConfiguration); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public DiffBasedWorldStateKeyValueStorage createLayeredKeyValueStorage( |
||||||
|
final DiffBasedWorldStateKeyValueStorage worldStateKeyValueStorage) { |
||||||
|
return new VerkleLayeredWorldStateKeyValueStorage( |
||||||
|
(VerkleWorldStateKeyValueStorage) worldStateKeyValueStorage); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public DiffBasedWorldStateKeyValueStorage createSnapshotKeyValueStorage( |
||||||
|
final DiffBasedWorldStateKeyValueStorage worldStateKeyValueStorage, |
||||||
|
final ObservableMetricsSystem metricsSystem) { |
||||||
|
return new VerkleSnapshotWorldStateKeyValueStorage( |
||||||
|
(VerkleWorldStateKeyValueStorage) worldStateKeyValueStorage, metricsSystem); |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,52 @@ |
|||||||
|
/* |
||||||
|
* Copyright Hyperledger Besu Contributors. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||||
|
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||||
|
* specific language governing permissions and limitations under the License. |
||||||
|
* |
||||||
|
* SPDX-License-Identifier: Apache-2.0 |
||||||
|
* |
||||||
|
*/ |
||||||
|
package org.hyperledger.besu.ethereum.trie.diffbased.verkle.storage; |
||||||
|
|
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.StorageSubscriber; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.storage.DiffBasedLayeredWorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.metrics.ObservableMetricsSystem; |
||||||
|
import org.hyperledger.besu.plugin.services.storage.KeyValueStorage; |
||||||
|
import org.hyperledger.besu.plugin.services.storage.SnappedKeyValueStorage; |
||||||
|
import org.hyperledger.besu.services.kvstore.LayeredKeyValueStorage; |
||||||
|
|
||||||
|
public class VerkleLayeredWorldStateKeyValueStorage extends VerkleSnapshotWorldStateKeyValueStorage |
||||||
|
implements DiffBasedLayeredWorldStateKeyValueStorage, StorageSubscriber { |
||||||
|
|
||||||
|
public VerkleLayeredWorldStateKeyValueStorage(final VerkleWorldStateKeyValueStorage parent) { |
||||||
|
this( |
||||||
|
new LayeredKeyValueStorage(parent.getComposedWorldStateStorage()), |
||||||
|
parent.getTrieLogStorage(), |
||||||
|
parent, |
||||||
|
parent.getMetricsSystem()); |
||||||
|
} |
||||||
|
|
||||||
|
public VerkleLayeredWorldStateKeyValueStorage( |
||||||
|
final SnappedKeyValueStorage composedWorldStateStorage, |
||||||
|
final KeyValueStorage trieLogStorage, |
||||||
|
final VerkleWorldStateKeyValueStorage parent, |
||||||
|
final ObservableMetricsSystem metricsSystem) { |
||||||
|
super(parent, composedWorldStateStorage, trieLogStorage, metricsSystem); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public VerkleLayeredWorldStateKeyValueStorage clone() { |
||||||
|
return new VerkleLayeredWorldStateKeyValueStorage( |
||||||
|
((LayeredKeyValueStorage) composedWorldStateStorage).clone(), |
||||||
|
trieLogStorage, |
||||||
|
parentWorldStateStorage, |
||||||
|
metricsSystem); |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,201 @@ |
|||||||
|
/* |
||||||
|
* Copyright Hyperledger Besu Contributors. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||||
|
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||||
|
* specific language governing permissions and limitations under the License. |
||||||
|
* |
||||||
|
* SPDX-License-Identifier: Apache-2.0 |
||||||
|
* |
||||||
|
*/ |
||||||
|
package org.hyperledger.besu.ethereum.trie.diffbased.verkle.storage; |
||||||
|
|
||||||
|
import org.hyperledger.besu.datatypes.Hash; |
||||||
|
import org.hyperledger.besu.datatypes.StorageSlotKey; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.StorageSubscriber; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.storage.DiffBasedSnapshotWorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.metrics.ObservableMetricsSystem; |
||||||
|
import org.hyperledger.besu.plugin.services.exception.StorageException; |
||||||
|
import org.hyperledger.besu.plugin.services.storage.KeyValueStorage; |
||||||
|
import org.hyperledger.besu.plugin.services.storage.SnappableKeyValueStorage; |
||||||
|
import org.hyperledger.besu.plugin.services.storage.SnappedKeyValueStorage; |
||||||
|
|
||||||
|
import java.util.Optional; |
||||||
|
|
||||||
|
import org.apache.tuweni.bytes.Bytes; |
||||||
|
import org.apache.tuweni.bytes.Bytes32; |
||||||
|
import org.slf4j.Logger; |
||||||
|
import org.slf4j.LoggerFactory; |
||||||
|
|
||||||
|
public class VerkleSnapshotWorldStateKeyValueStorage extends VerkleWorldStateKeyValueStorage |
||||||
|
implements DiffBasedSnapshotWorldStateKeyValueStorage, StorageSubscriber { |
||||||
|
|
||||||
|
protected final VerkleWorldStateKeyValueStorage parentWorldStateStorage; |
||||||
|
private static final Logger LOG = |
||||||
|
LoggerFactory.getLogger(VerkleSnapshotWorldStateKeyValueStorage.class); |
||||||
|
private final long subscribeParentId; |
||||||
|
|
||||||
|
public VerkleSnapshotWorldStateKeyValueStorage( |
||||||
|
final VerkleWorldStateKeyValueStorage parentWorldStateStorage, |
||||||
|
final SnappedKeyValueStorage segmentedWorldStateStorage, |
||||||
|
final KeyValueStorage trieLogStorage, |
||||||
|
final ObservableMetricsSystem metricsSystem) { |
||||||
|
super(segmentedWorldStateStorage, trieLogStorage, metricsSystem); |
||||||
|
this.parentWorldStateStorage = parentWorldStateStorage; |
||||||
|
this.subscribeParentId = parentWorldStateStorage.subscribe(this); |
||||||
|
} |
||||||
|
|
||||||
|
public VerkleSnapshotWorldStateKeyValueStorage( |
||||||
|
final VerkleWorldStateKeyValueStorage worldStateKeyValueStorage, |
||||||
|
final ObservableMetricsSystem metricsSystem) { |
||||||
|
this( |
||||||
|
worldStateKeyValueStorage, |
||||||
|
((SnappableKeyValueStorage) worldStateKeyValueStorage.getComposedWorldStateStorage()) |
||||||
|
.takeSnapshot(), |
||||||
|
worldStateKeyValueStorage.getTrieLogStorage(), |
||||||
|
metricsSystem); |
||||||
|
} |
||||||
|
|
||||||
|
private boolean isClosedGet() { |
||||||
|
if (isClosed.get()) { |
||||||
|
Throwable t = new Throwable("Attempting to access closed worldstate"); |
||||||
|
LOG.warn(t.getMessage(), t); |
||||||
|
} |
||||||
|
return isClosed.get(); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Updater updater() { |
||||||
|
return new Updater( |
||||||
|
((SnappedKeyValueStorage) composedWorldStateStorage).getSnapshotTransaction(), |
||||||
|
trieLogStorage.startTransaction(), |
||||||
|
flatDbStrategy); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Optional<Bytes> getAccount(final Hash accountHash) { |
||||||
|
return isClosedGet() ? Optional.empty() : super.getAccount(accountHash); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Optional<Bytes> getCode(final Bytes32 codeHash, final Hash accountHash) { |
||||||
|
return isClosedGet() ? Optional.empty() : super.getCode(codeHash, accountHash); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Optional<byte[]> getTrieLog(final Hash blockHash) { |
||||||
|
return isClosedGet() ? Optional.empty() : super.getTrieLog(blockHash); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Optional<Bytes> getStateTrieNode(final Bytes location) { |
||||||
|
return isClosedGet() ? Optional.empty() : super.getStateTrieNode(location); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Optional<Bytes> getWorldStateRootHash() { |
||||||
|
return isClosedGet() ? Optional.empty() : super.getWorldStateRootHash(); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Optional<Hash> getWorldStateBlockHash() { |
||||||
|
return isClosedGet() ? Optional.empty() : super.getWorldStateBlockHash(); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Optional<Bytes> getStorageValueByStorageSlotKey( |
||||||
|
final Hash accountHash, final StorageSlotKey storageSlotKey) { |
||||||
|
return isClosedGet() |
||||||
|
? Optional.empty() |
||||||
|
: super.getStorageValueByStorageSlotKey(accountHash, storageSlotKey); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public boolean isWorldStateAvailable(final Bytes32 rootHash, final Hash blockHash) { |
||||||
|
return !isClosedGet() && super.isWorldStateAvailable(rootHash, blockHash); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public void clear() { |
||||||
|
// snapshot storage does not implement clear
|
||||||
|
throw new StorageException("Snapshot storage does not implement clear"); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public void clearFlatDatabase() { |
||||||
|
// snapshot storage does not implement clear
|
||||||
|
throw new StorageException("Snapshot storage does not implement clear"); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public void clearTrieLog() { |
||||||
|
// snapshot storage does not implement clear
|
||||||
|
throw new StorageException("Snapshot storage does not implement clear"); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public void onCloseStorage() { |
||||||
|
try { |
||||||
|
// when the parent storage clears, close regardless of subscribers
|
||||||
|
doClose(); |
||||||
|
} catch (Exception e) { |
||||||
|
throw new RuntimeException(e); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public void onClearStorage() { |
||||||
|
try { |
||||||
|
// when the parent storage clears, close regardless of subscribers
|
||||||
|
doClose(); |
||||||
|
} catch (Exception e) { |
||||||
|
throw new RuntimeException(e); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public void onClearFlatDatabaseStorage() { |
||||||
|
// when the parent storage clears, close regardless of subscribers
|
||||||
|
try { |
||||||
|
doClose(); |
||||||
|
} catch (Exception e) { |
||||||
|
throw new RuntimeException(e); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public void onClearTrieLog() { |
||||||
|
// when the parent storage clears, close regardless of subscribers
|
||||||
|
try { |
||||||
|
doClose(); |
||||||
|
} catch (Exception e) { |
||||||
|
throw new RuntimeException(e); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
protected synchronized void doClose() throws Exception { |
||||||
|
if (!isClosedGet()) { |
||||||
|
// alert any subscribers we are closing:
|
||||||
|
subscribers.forEach(StorageSubscriber::onCloseStorage); |
||||||
|
|
||||||
|
// close all of the SnappedKeyValueStorages:
|
||||||
|
composedWorldStateStorage.close(); |
||||||
|
|
||||||
|
// unsubscribe the parent worldstate
|
||||||
|
parentWorldStateStorage.unSubscribe(subscribeParentId); |
||||||
|
|
||||||
|
// set storage closed
|
||||||
|
isClosed.set(true); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
public VerkleWorldStateKeyValueStorage getParentWorldStateStorage() { |
||||||
|
return parentWorldStateStorage; |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,223 @@ |
|||||||
|
/* |
||||||
|
* Copyright Hyperledger Besu Contributors. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||||
|
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||||
|
* specific language governing permissions and limitations under the License. |
||||||
|
* |
||||||
|
* SPDX-License-Identifier: Apache-2.0 |
||||||
|
*/ |
||||||
|
package org.hyperledger.besu.ethereum.trie.diffbased.verkle.storage; |
||||||
|
|
||||||
|
import static org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.ACCOUNT_INFO_STATE; |
||||||
|
import static org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.ACCOUNT_STORAGE_STORAGE; |
||||||
|
import static org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.CODE_STORAGE; |
||||||
|
import static org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.TRIE_BRANCH_STORAGE; |
||||||
|
|
||||||
|
import org.hyperledger.besu.datatypes.Hash; |
||||||
|
import org.hyperledger.besu.datatypes.StorageSlotKey; |
||||||
|
import org.hyperledger.besu.ethereum.storage.StorageProvider; |
||||||
|
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.bonsai.storage.flat.FlatDbStrategy; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.bonsai.storage.flat.FullFlatDbStrategy; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.storage.DiffBasedWorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.ethereum.worldstate.DataStorageFormat; |
||||||
|
import org.hyperledger.besu.ethereum.worldstate.FlatDbMode; |
||||||
|
import org.hyperledger.besu.ethereum.worldstate.WorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.metrics.ObservableMetricsSystem; |
||||||
|
import org.hyperledger.besu.plugin.services.storage.KeyValueStorage; |
||||||
|
import org.hyperledger.besu.plugin.services.storage.KeyValueStorageTransaction; |
||||||
|
import org.hyperledger.besu.plugin.services.storage.SegmentedKeyValueStorage; |
||||||
|
import org.hyperledger.besu.plugin.services.storage.SegmentedKeyValueStorageTransaction; |
||||||
|
|
||||||
|
import java.util.List; |
||||||
|
import java.util.Optional; |
||||||
|
|
||||||
|
import org.apache.tuweni.bytes.Bytes; |
||||||
|
import org.apache.tuweni.bytes.Bytes32; |
||||||
|
|
||||||
|
public class VerkleWorldStateKeyValueStorage extends DiffBasedWorldStateKeyValueStorage |
||||||
|
implements WorldStateKeyValueStorage { |
||||||
|
|
||||||
|
protected FullFlatDbStrategy flatDbStrategy; |
||||||
|
|
||||||
|
public VerkleWorldStateKeyValueStorage( |
||||||
|
final StorageProvider provider, final ObservableMetricsSystem metricsSystem) { |
||||||
|
super( |
||||||
|
provider.getStorageBySegmentIdentifiers( |
||||||
|
List.of( |
||||||
|
ACCOUNT_INFO_STATE, CODE_STORAGE, ACCOUNT_STORAGE_STORAGE, TRIE_BRANCH_STORAGE)), |
||||||
|
provider.getStorageBySegmentIdentifier(KeyValueSegmentIdentifier.TRIE_LOG_STORAGE), |
||||||
|
metricsSystem); |
||||||
|
this.flatDbStrategy = new FullFlatDbStrategy(metricsSystem); |
||||||
|
} |
||||||
|
|
||||||
|
public VerkleWorldStateKeyValueStorage( |
||||||
|
final SegmentedKeyValueStorage composedWorldStateStorage, |
||||||
|
final KeyValueStorage trieLogStorage, |
||||||
|
final ObservableMetricsSystem metricsSystem) { |
||||||
|
super(composedWorldStateStorage, trieLogStorage, metricsSystem); |
||||||
|
this.flatDbStrategy = new FullFlatDbStrategy(metricsSystem); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public FlatDbStrategy getFlatDbStrategy() { |
||||||
|
return flatDbStrategy; |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public DataStorageFormat getDataStorageFormat() { |
||||||
|
return DataStorageFormat.VERKLE; |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public FlatDbMode getFlatDbMode() { |
||||||
|
return FlatDbMode.FULL; |
||||||
|
} |
||||||
|
|
||||||
|
public Optional<Bytes> getCode(final Bytes32 codeHash, final Hash accountHash) { |
||||||
|
if (codeHash.equals(Hash.EMPTY)) { |
||||||
|
return Optional.of(Bytes.EMPTY); |
||||||
|
} else { |
||||||
|
return getFlatDbStrategy().getFlatCode(codeHash, accountHash, composedWorldStateStorage); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
public Optional<Bytes> getAccount(final Hash accountHash) { |
||||||
|
return getFlatDbStrategy().getFlatAccount(null, null, accountHash, composedWorldStateStorage); |
||||||
|
} |
||||||
|
|
||||||
|
public Optional<Bytes> getStorageValueByStorageSlotKey( |
||||||
|
final Hash accountHash, final StorageSlotKey storageSlotKey) { |
||||||
|
return getFlatDbStrategy() |
||||||
|
.getFlatStorageValueByStorageSlotKey( |
||||||
|
null, null, null, accountHash, storageSlotKey, composedWorldStateStorage); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public void clear() { |
||||||
|
super.clear(); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Updater updater() { |
||||||
|
return new Updater( |
||||||
|
composedWorldStateStorage.startTransaction(), |
||||||
|
trieLogStorage.startTransaction(), |
||||||
|
flatDbStrategy); |
||||||
|
} |
||||||
|
|
||||||
|
public static class Updater implements DiffBasedWorldStateKeyValueStorage.Updater { |
||||||
|
|
||||||
|
private final SegmentedKeyValueStorageTransaction composedWorldStateTransaction; |
||||||
|
private final KeyValueStorageTransaction trieLogStorageTransaction; |
||||||
|
private final FlatDbStrategy flatDbStrategy; |
||||||
|
|
||||||
|
public Updater( |
||||||
|
final SegmentedKeyValueStorageTransaction composedWorldStateTransaction, |
||||||
|
final KeyValueStorageTransaction trieLogStorageTransaction, |
||||||
|
final FlatDbStrategy flatDbStrategy) { |
||||||
|
|
||||||
|
this.composedWorldStateTransaction = composedWorldStateTransaction; |
||||||
|
this.trieLogStorageTransaction = trieLogStorageTransaction; |
||||||
|
this.flatDbStrategy = flatDbStrategy; |
||||||
|
} |
||||||
|
|
||||||
|
public Updater removeCode(final Hash accountHash) { |
||||||
|
flatDbStrategy.removeFlatCode(composedWorldStateTransaction, accountHash); |
||||||
|
return this; |
||||||
|
} |
||||||
|
|
||||||
|
public Updater putCode(final Hash accountHash, final Bytes code) { |
||||||
|
// Skip the hash calculation for empty code
|
||||||
|
final Hash codeHash = code.size() == 0 ? Hash.EMPTY : Hash.hash(code); |
||||||
|
return putCode(accountHash, codeHash, code); |
||||||
|
} |
||||||
|
|
||||||
|
public Updater putCode(final Hash accountHash, final Bytes32 codeHash, final Bytes code) { |
||||||
|
if (code.size() == 0) { |
||||||
|
// Don't save empty values
|
||||||
|
return this; |
||||||
|
} |
||||||
|
flatDbStrategy.putFlatCode(composedWorldStateTransaction, accountHash, codeHash, code); |
||||||
|
return this; |
||||||
|
} |
||||||
|
|
||||||
|
public Updater removeAccountInfoState(final Hash accountHash) { |
||||||
|
flatDbStrategy.removeFlatAccount(composedWorldStateTransaction, accountHash); |
||||||
|
return this; |
||||||
|
} |
||||||
|
|
||||||
|
public Updater putAccountInfoState(final Hash accountHash, final Bytes accountValue) { |
||||||
|
if (accountValue.size() == 0) { |
||||||
|
// Don't save empty values
|
||||||
|
return this; |
||||||
|
} |
||||||
|
flatDbStrategy.putFlatAccount(composedWorldStateTransaction, accountHash, accountValue); |
||||||
|
return this; |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Updater saveWorldState(final Bytes blockHash, final Bytes32 nodeHash, final Bytes node) { |
||||||
|
composedWorldStateTransaction.put( |
||||||
|
TRIE_BRANCH_STORAGE, Bytes.EMPTY.toArrayUnsafe(), node.toArrayUnsafe()); |
||||||
|
composedWorldStateTransaction.put( |
||||||
|
TRIE_BRANCH_STORAGE, WORLD_ROOT_HASH_KEY, nodeHash.toArrayUnsafe()); |
||||||
|
composedWorldStateTransaction.put( |
||||||
|
TRIE_BRANCH_STORAGE, WORLD_BLOCK_HASH_KEY, blockHash.toArrayUnsafe()); |
||||||
|
return this; |
||||||
|
} |
||||||
|
|
||||||
|
public Updater putStateTrieNode(final Bytes location, final Bytes node) { |
||||||
|
composedWorldStateTransaction.put( |
||||||
|
TRIE_BRANCH_STORAGE, location.toArrayUnsafe(), node.toArrayUnsafe()); |
||||||
|
return this; |
||||||
|
} |
||||||
|
|
||||||
|
public Updater removeStateTrieNode(final Bytes location) { |
||||||
|
composedWorldStateTransaction.remove(TRIE_BRANCH_STORAGE, location.toArrayUnsafe()); |
||||||
|
return this; |
||||||
|
} |
||||||
|
|
||||||
|
public synchronized Updater putStorageValueBySlotHash( |
||||||
|
final Hash accountHash, final Hash slotHash, final Bytes storage) { |
||||||
|
flatDbStrategy.putFlatAccountStorageValueByStorageSlotHash( |
||||||
|
composedWorldStateTransaction, accountHash, slotHash, storage); |
||||||
|
return this; |
||||||
|
} |
||||||
|
|
||||||
|
public synchronized void removeStorageValueBySlotHash( |
||||||
|
final Hash accountHash, final Hash slotHash) { |
||||||
|
flatDbStrategy.removeFlatAccountStorageValueByStorageSlotHash( |
||||||
|
composedWorldStateTransaction, accountHash, slotHash); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public SegmentedKeyValueStorageTransaction getWorldStateTransaction() { |
||||||
|
return composedWorldStateTransaction; |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public KeyValueStorageTransaction getTrieLogStorageTransaction() { |
||||||
|
return trieLogStorageTransaction; |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public void commit() { |
||||||
|
// write the log ahead, then the worldstate
|
||||||
|
trieLogStorageTransaction.commit(); |
||||||
|
composedWorldStateTransaction.commit(); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public void rollback() { |
||||||
|
composedWorldStateTransaction.rollback(); |
||||||
|
trieLogStorageTransaction.rollback(); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,272 @@ |
|||||||
|
/* |
||||||
|
* Copyright Hyperledger Besu Contributors. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||||
|
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||||
|
* specific language governing permissions and limitations under the License. |
||||||
|
* |
||||||
|
* SPDX-License-Identifier: Apache-2.0 |
||||||
|
* |
||||||
|
*/ |
||||||
|
package org.hyperledger.besu.ethereum.trie.diffbased.verkle.trielog; |
||||||
|
|
||||||
|
import org.hyperledger.besu.datatypes.AccountValue; |
||||||
|
import org.hyperledger.besu.datatypes.Address; |
||||||
|
import org.hyperledger.besu.datatypes.Hash; |
||||||
|
import org.hyperledger.besu.datatypes.StorageSlotKey; |
||||||
|
import org.hyperledger.besu.ethereum.rlp.BytesValueRLPInput; |
||||||
|
import org.hyperledger.besu.ethereum.rlp.BytesValueRLPOutput; |
||||||
|
import org.hyperledger.besu.ethereum.rlp.RLPInput; |
||||||
|
import org.hyperledger.besu.ethereum.rlp.RLPOutput; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.DiffBasedValue; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.trielog.TrieLogLayer; |
||||||
|
import org.hyperledger.besu.ethereum.worldstate.StateTrieAccountValue; |
||||||
|
import org.hyperledger.besu.plugin.data.BlockHeader; |
||||||
|
import org.hyperledger.besu.plugin.services.trielogs.TrieLog; |
||||||
|
import org.hyperledger.besu.plugin.services.trielogs.TrieLogAccumulator; |
||||||
|
import org.hyperledger.besu.plugin.services.trielogs.TrieLogFactory; |
||||||
|
|
||||||
|
import java.util.Map; |
||||||
|
import java.util.Optional; |
||||||
|
import java.util.Set; |
||||||
|
import java.util.TreeMap; |
||||||
|
import java.util.TreeSet; |
||||||
|
import java.util.function.BiConsumer; |
||||||
|
import java.util.function.Function; |
||||||
|
|
||||||
|
import org.apache.tuweni.bytes.Bytes; |
||||||
|
import org.apache.tuweni.units.bigints.UInt256; |
||||||
|
|
||||||
|
public class TrieLogFactoryImpl implements TrieLogFactory { |
||||||
|
@Override |
||||||
|
public TrieLogLayer create(final TrieLogAccumulator accumulator, final BlockHeader blockHeader) { |
||||||
|
TrieLogLayer layer = new TrieLogLayer(); |
||||||
|
layer.setBlockHash(blockHeader.getBlockHash()); |
||||||
|
layer.setBlockNumber(blockHeader.getNumber()); |
||||||
|
for (final var updatedAccount : accumulator.getAccountsToUpdate().entrySet()) { |
||||||
|
final var bonsaiValue = updatedAccount.getValue(); |
||||||
|
final var oldAccountValue = bonsaiValue.getPrior(); |
||||||
|
final var newAccountValue = bonsaiValue.getUpdated(); |
||||||
|
if (oldAccountValue == null && newAccountValue == null) { |
||||||
|
// by default do not persist empty reads of accounts to the trie log
|
||||||
|
continue; |
||||||
|
} |
||||||
|
layer.addAccountChange(updatedAccount.getKey(), oldAccountValue, newAccountValue); |
||||||
|
} |
||||||
|
|
||||||
|
for (final var updatedCode : accumulator.getCodeToUpdate().entrySet()) { |
||||||
|
layer.addCodeChange( |
||||||
|
updatedCode.getKey(), |
||||||
|
updatedCode.getValue().getPrior(), |
||||||
|
updatedCode.getValue().getUpdated(), |
||||||
|
blockHeader.getBlockHash()); |
||||||
|
} |
||||||
|
|
||||||
|
for (final var updatesStorage : accumulator.getStorageToUpdate().entrySet()) { |
||||||
|
final Address address = updatesStorage.getKey(); |
||||||
|
for (final var slotUpdate : updatesStorage.getValue().entrySet()) { |
||||||
|
var val = slotUpdate.getValue(); |
||||||
|
|
||||||
|
if (val.getPrior() == null && val.getUpdated() == null) { |
||||||
|
// by default do not persist empty reads to the trie log
|
||||||
|
continue; |
||||||
|
} |
||||||
|
|
||||||
|
System.out.println(val.getPrior() + " " + val.getUpdated()); |
||||||
|
layer.addStorageChange(address, slotUpdate.getKey(), val.getPrior(), val.getUpdated()); |
||||||
|
} |
||||||
|
} |
||||||
|
return layer; |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public byte[] serialize(final TrieLog layer) { |
||||||
|
final BytesValueRLPOutput rlpLog = new BytesValueRLPOutput(); |
||||||
|
writeTo(layer, rlpLog); |
||||||
|
return rlpLog.encoded().toArrayUnsafe(); |
||||||
|
} |
||||||
|
|
||||||
|
public static void writeTo(final TrieLog layer, final RLPOutput output) { |
||||||
|
layer.freeze(); |
||||||
|
|
||||||
|
final Set<Address> addresses = new TreeSet<>(); |
||||||
|
addresses.addAll(layer.getAccountChanges().keySet()); |
||||||
|
addresses.addAll(layer.getCodeChanges().keySet()); |
||||||
|
addresses.addAll(layer.getStorageChanges().keySet()); |
||||||
|
|
||||||
|
output.startList(); // container
|
||||||
|
output.writeBytes(layer.getBlockHash()); |
||||||
|
|
||||||
|
for (final Address address : addresses) { |
||||||
|
output.startList(); // this change
|
||||||
|
output.writeBytes(address); |
||||||
|
|
||||||
|
final TrieLog.LogTuple<AccountValue> accountChange = layer.getAccountChanges().get(address); |
||||||
|
if (accountChange == null || accountChange.isUnchanged()) { |
||||||
|
output.writeNull(); |
||||||
|
} else { |
||||||
|
writeRlp(accountChange, output, (o, sta) -> sta.writeTo(o)); |
||||||
|
} |
||||||
|
|
||||||
|
final TrieLog.LogTuple<Bytes> codeChange = layer.getCodeChanges().get(address); |
||||||
|
if (codeChange == null || codeChange.isUnchanged()) { |
||||||
|
output.writeNull(); |
||||||
|
} else { |
||||||
|
writeRlp(codeChange, output, RLPOutput::writeBytes); |
||||||
|
} |
||||||
|
|
||||||
|
final Map<StorageSlotKey, TrieLog.LogTuple<UInt256>> storageChanges = |
||||||
|
layer.getStorageChanges().get(address); |
||||||
|
if (storageChanges == null) { |
||||||
|
output.writeNull(); |
||||||
|
} else { |
||||||
|
output.startList(); |
||||||
|
for (final Map.Entry<StorageSlotKey, TrieLog.LogTuple<UInt256>> storageChangeEntry : |
||||||
|
storageChanges.entrySet()) { |
||||||
|
output.startList(); |
||||||
|
// do not write slotKey, it is not used in mainnet bonsai trielogs
|
||||||
|
StorageSlotKey storageSlotKey = storageChangeEntry.getKey(); |
||||||
|
output.writeBytes(storageSlotKey.getSlotHash()); |
||||||
|
writeInnerRlp(storageChangeEntry.getValue(), output, RLPOutput::writeBytes); |
||||||
|
if (storageSlotKey.getSlotKey().isPresent()) { |
||||||
|
output.writeUInt256Scalar(storageSlotKey.getSlotKey().get()); |
||||||
|
} |
||||||
|
output.endList(); |
||||||
|
} |
||||||
|
output.endList(); |
||||||
|
} |
||||||
|
|
||||||
|
output.endList(); // this change
|
||||||
|
} |
||||||
|
output.endList(); // container
|
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public TrieLogLayer deserialize(final byte[] bytes) { |
||||||
|
return readFrom(new BytesValueRLPInput(Bytes.wrap(bytes), false)); |
||||||
|
} |
||||||
|
|
||||||
|
public static TrieLogLayer readFrom(final RLPInput input) { |
||||||
|
final TrieLogLayer newLayer = new TrieLogLayer(); |
||||||
|
|
||||||
|
input.enterList(); |
||||||
|
newLayer.setBlockHash(Hash.wrap(input.readBytes32())); |
||||||
|
|
||||||
|
while (!input.isEndOfCurrentList()) { |
||||||
|
input.enterList(); |
||||||
|
final Address address = Address.readFrom(input); |
||||||
|
|
||||||
|
if (input.nextIsNull()) { |
||||||
|
input.skipNext(); |
||||||
|
} else { |
||||||
|
input.enterList(); |
||||||
|
final StateTrieAccountValue oldValue = nullOrValue(input, StateTrieAccountValue::readFrom); |
||||||
|
final StateTrieAccountValue newValue = nullOrValue(input, StateTrieAccountValue::readFrom); |
||||||
|
final boolean isCleared = getOptionalIsCleared(input); |
||||||
|
input.leaveList(); |
||||||
|
newLayer |
||||||
|
.getAccountChanges() |
||||||
|
.put(address, new DiffBasedValue<>(oldValue, newValue, isCleared)); |
||||||
|
} |
||||||
|
|
||||||
|
if (input.nextIsNull()) { |
||||||
|
input.skipNext(); |
||||||
|
} else { |
||||||
|
input.enterList(); |
||||||
|
final Bytes oldCode = nullOrValue(input, RLPInput::readBytes); |
||||||
|
final Bytes newCode = nullOrValue(input, RLPInput::readBytes); |
||||||
|
final boolean isCleared = getOptionalIsCleared(input); |
||||||
|
input.leaveList(); |
||||||
|
newLayer.getCodeChanges().put(address, new DiffBasedValue<>(oldCode, newCode, isCleared)); |
||||||
|
} |
||||||
|
|
||||||
|
if (input.nextIsNull()) { |
||||||
|
input.skipNext(); |
||||||
|
} else { |
||||||
|
final Map<StorageSlotKey, DiffBasedValue<UInt256>> storageChanges = new TreeMap<>(); |
||||||
|
input.enterList(); |
||||||
|
while (!input.isEndOfCurrentList()) { |
||||||
|
int storageElementlistSize = input.enterList(); |
||||||
|
final Hash slotHash = Hash.wrap(input.readBytes32()); |
||||||
|
final UInt256 oldValue = |
||||||
|
nullOrValue(input, rlpInput -> UInt256.fromBytes(rlpInput.readBytes())); |
||||||
|
final UInt256 newValue = |
||||||
|
nullOrValue(input, rlpInput -> UInt256.fromBytes(rlpInput.readBytes())); |
||||||
|
final boolean isCleared = getOptionalIsCleared(input); |
||||||
|
final Optional<UInt256> slotKey = |
||||||
|
Optional.of(storageElementlistSize) |
||||||
|
.filter(listSize -> listSize == 5) |
||||||
|
.map(__ -> input.readUInt256Scalar()) |
||||||
|
.or(Optional::empty); |
||||||
|
|
||||||
|
final StorageSlotKey storageSlotKey = new StorageSlotKey(slotHash, slotKey); |
||||||
|
storageChanges.put(storageSlotKey, new DiffBasedValue<>(oldValue, newValue, isCleared)); |
||||||
|
input.leaveList(); |
||||||
|
} |
||||||
|
input.leaveList(); |
||||||
|
newLayer.getStorageChanges().put(address, storageChanges); |
||||||
|
} |
||||||
|
|
||||||
|
// TODO add trie nodes
|
||||||
|
|
||||||
|
// lenient leave list for forward compatible additions.
|
||||||
|
input.leaveListLenient(); |
||||||
|
} |
||||||
|
input.leaveListLenient(); |
||||||
|
newLayer.freeze(); |
||||||
|
|
||||||
|
return newLayer; |
||||||
|
} |
||||||
|
|
||||||
|
protected static <T> T nullOrValue(final RLPInput input, final Function<RLPInput, T> reader) { |
||||||
|
if (input.nextIsNull()) { |
||||||
|
input.skipNext(); |
||||||
|
return null; |
||||||
|
} else { |
||||||
|
return reader.apply(input); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
protected static boolean getOptionalIsCleared(final RLPInput input) { |
||||||
|
return Optional.of(input.isEndOfCurrentList()) |
||||||
|
.filter(isEnd -> !isEnd) // isCleared is optional
|
||||||
|
.map(__ -> nullOrValue(input, RLPInput::readInt)) |
||||||
|
.filter(i -> i == 1) |
||||||
|
.isPresent(); |
||||||
|
} |
||||||
|
|
||||||
|
public static <T> void writeRlp( |
||||||
|
final TrieLog.LogTuple<T> value, |
||||||
|
final RLPOutput output, |
||||||
|
final BiConsumer<RLPOutput, T> writer) { |
||||||
|
output.startList(); |
||||||
|
writeInnerRlp(value, output, writer); |
||||||
|
output.endList(); |
||||||
|
} |
||||||
|
|
||||||
|
public static <T> void writeInnerRlp( |
||||||
|
final TrieLog.LogTuple<T> value, |
||||||
|
final RLPOutput output, |
||||||
|
final BiConsumer<RLPOutput, T> writer) { |
||||||
|
if (value.getPrior() == null) { |
||||||
|
output.writeNull(); |
||||||
|
} else { |
||||||
|
writer.accept(output, value.getPrior()); |
||||||
|
} |
||||||
|
if (value.getUpdated() == null) { |
||||||
|
output.writeNull(); |
||||||
|
} else { |
||||||
|
writer.accept(output, value.getUpdated()); |
||||||
|
} |
||||||
|
if (!value.isCleared()) { |
||||||
|
output.writeNull(); |
||||||
|
} else { |
||||||
|
output.writeInt(1); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,361 @@ |
|||||||
|
/* |
||||||
|
* Copyright ConsenSys AG. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||||
|
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||||
|
* specific language governing permissions and limitations under the License. |
||||||
|
* |
||||||
|
* SPDX-License-Identifier: Apache-2.0 |
||||||
|
* |
||||||
|
*/ |
||||||
|
|
||||||
|
package org.hyperledger.besu.ethereum.trie.diffbased.verkle.worldview; |
||||||
|
|
||||||
|
import static org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.TRIE_BRANCH_STORAGE; |
||||||
|
|
||||||
|
import org.hyperledger.besu.datatypes.Address; |
||||||
|
import org.hyperledger.besu.datatypes.Hash; |
||||||
|
import org.hyperledger.besu.datatypes.StorageSlotKey; |
||||||
|
import org.hyperledger.besu.ethereum.core.MutableWorldState; |
||||||
|
import org.hyperledger.besu.ethereum.trie.NodeLoader; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.DiffBasedValue; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.cache.DiffBasedCachedWorldStorageManager; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.storage.DiffBasedWorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.trielog.TrieLogManager; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.worldview.DiffBasedWorldState; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.worldview.accumulator.DiffBasedWorldStateUpdateAccumulator; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.worldview.accumulator.preload.StorageConsumingMap; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.VerkleAccount; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.VerkleWorldStateProvider; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.storage.VerkleLayeredWorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.storage.VerkleWorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.ethereum.verkletrie.VerkleTrie; |
||||||
|
import org.hyperledger.besu.ethereum.verkletrie.VerkleTrieKeyValueGenerator; |
||||||
|
import org.hyperledger.besu.evm.account.Account; |
||||||
|
import org.hyperledger.besu.evm.internal.EvmConfiguration; |
||||||
|
import org.hyperledger.besu.plugin.services.storage.SegmentIdentifier; |
||||||
|
import org.hyperledger.besu.plugin.services.storage.SegmentedKeyValueStorageTransaction; |
||||||
|
|
||||||
|
import java.util.Map; |
||||||
|
import java.util.Optional; |
||||||
|
import java.util.stream.Stream; |
||||||
|
import javax.annotation.Nonnull; |
||||||
|
|
||||||
|
import kotlin.Pair; |
||||||
|
import org.apache.tuweni.bytes.Bytes; |
||||||
|
import org.apache.tuweni.bytes.Bytes32; |
||||||
|
import org.apache.tuweni.units.bigints.UInt256; |
||||||
|
import org.slf4j.Logger; |
||||||
|
import org.slf4j.LoggerFactory; |
||||||
|
|
||||||
|
public class VerkleWorldState extends DiffBasedWorldState { |
||||||
|
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(VerkleWorldState.class); |
||||||
|
|
||||||
|
private final VerkleTrieKeyValueGenerator verkleTrieKeyValueGenerator = |
||||||
|
new VerkleTrieKeyValueGenerator(); |
||||||
|
|
||||||
|
public VerkleWorldState( |
||||||
|
final VerkleWorldStateProvider archive, |
||||||
|
final VerkleWorldStateKeyValueStorage worldStateKeyValueStorage, |
||||||
|
final EvmConfiguration evmConfiguration) { |
||||||
|
this( |
||||||
|
worldStateKeyValueStorage, |
||||||
|
archive.getCachedWorldStorageManager(), |
||||||
|
archive.getTrieLogManager(), |
||||||
|
evmConfiguration); |
||||||
|
} |
||||||
|
|
||||||
|
protected VerkleWorldState( |
||||||
|
final VerkleWorldStateKeyValueStorage worldStateKeyValueStorage, |
||||||
|
final DiffBasedCachedWorldStorageManager cachedWorldStorageManager, |
||||||
|
final TrieLogManager trieLogManager, |
||||||
|
final EvmConfiguration evmConfiguration) { |
||||||
|
super(worldStateKeyValueStorage, cachedWorldStorageManager, trieLogManager); |
||||||
|
this.setAccumulator( |
||||||
|
new VerkleWorldStateUpdateAccumulator( |
||||||
|
this, (addr, value) -> {}, (addr, value) -> {}, evmConfiguration)); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public VerkleWorldStateKeyValueStorage getWorldStateStorage() { |
||||||
|
return (VerkleWorldStateKeyValueStorage) worldStateKeyValueStorage; |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
protected Hash calculateRootHash( |
||||||
|
final Optional<DiffBasedWorldStateKeyValueStorage.Updater> maybeStateUpdater, |
||||||
|
final DiffBasedWorldStateUpdateAccumulator<?> worldStateUpdater) { |
||||||
|
return internalCalculateRootHash( |
||||||
|
maybeStateUpdater.map(VerkleWorldStateKeyValueStorage.Updater.class::cast), |
||||||
|
(VerkleWorldStateUpdateAccumulator) worldStateUpdater); |
||||||
|
} |
||||||
|
|
||||||
|
protected Hash internalCalculateRootHash( |
||||||
|
final Optional<VerkleWorldStateKeyValueStorage.Updater> maybeStateUpdater, |
||||||
|
final VerkleWorldStateUpdateAccumulator worldStateUpdater) { |
||||||
|
|
||||||
|
final VerkleTrie stateTrie = |
||||||
|
createTrie( |
||||||
|
(location, hash) -> worldStateKeyValueStorage.getStateTrieNode(location), |
||||||
|
worldStateRootHash); |
||||||
|
// clearStorage(maybeStateUpdater, worldStateUpdater);
|
||||||
|
|
||||||
|
Stream<Map.Entry<Address, StorageConsumingMap<StorageSlotKey, DiffBasedValue<UInt256>>>> |
||||||
|
storageStream = worldStateUpdater.getStorageToUpdate().entrySet().stream(); |
||||||
|
if (maybeStateUpdater.isEmpty()) { |
||||||
|
storageStream = |
||||||
|
storageStream |
||||||
|
.parallel(); // if we are not updating the state updater we can use parallel stream
|
||||||
|
} |
||||||
|
storageStream.forEach( |
||||||
|
addressMapEntry -> |
||||||
|
updateAccountStorageState( |
||||||
|
stateTrie, maybeStateUpdater, worldStateUpdater, addressMapEntry)); |
||||||
|
|
||||||
|
// Third update the code. This has the side effect of ensuring a code hash is calculated.
|
||||||
|
updateCode(stateTrie, maybeStateUpdater, worldStateUpdater); |
||||||
|
|
||||||
|
// for manicured tries and composting, collect branches here (not implemented)
|
||||||
|
updateTheAccounts(maybeStateUpdater, worldStateUpdater, stateTrie); |
||||||
|
|
||||||
|
LOG.info("start commit "); |
||||||
|
maybeStateUpdater.ifPresent( |
||||||
|
bonsaiUpdater -> |
||||||
|
stateTrie.commit( |
||||||
|
(location, hash, value) -> { |
||||||
|
writeTrieNode( |
||||||
|
TRIE_BRANCH_STORAGE, |
||||||
|
bonsaiUpdater.getWorldStateTransaction(), |
||||||
|
location, |
||||||
|
value); |
||||||
|
})); |
||||||
|
|
||||||
|
LOG.info("end commit "); |
||||||
|
LOG.info(stateTrie.toDotTree()); |
||||||
|
final Bytes32 rootHash = stateTrie.getRootHash(); |
||||||
|
|
||||||
|
LOG.info("end commit "); |
||||||
|
return Hash.wrap(rootHash); |
||||||
|
} |
||||||
|
|
||||||
|
private void updateTheAccounts( |
||||||
|
final Optional<VerkleWorldStateKeyValueStorage.Updater> maybeStateUpdater, |
||||||
|
final VerkleWorldStateUpdateAccumulator worldStateUpdater, |
||||||
|
final VerkleTrie stateTrie) { |
||||||
|
for (final Map.Entry<Address, DiffBasedValue<VerkleAccount>> accountUpdate : |
||||||
|
worldStateUpdater.getAccountsToUpdate().entrySet()) { |
||||||
|
final Address accountKey = accountUpdate.getKey(); |
||||||
|
final DiffBasedValue<VerkleAccount> bonsaiValue = accountUpdate.getValue(); |
||||||
|
final VerkleAccount priorAccount = bonsaiValue.getPrior(); |
||||||
|
final VerkleAccount updatedAccount = bonsaiValue.getUpdated(); |
||||||
|
if (updatedAccount == null) { |
||||||
|
final Hash addressHash = hashAndSavePreImage(accountKey); |
||||||
|
verkleTrieKeyValueGenerator |
||||||
|
.generateKeysForAccount(accountKey) |
||||||
|
.forEach( |
||||||
|
bytes -> { |
||||||
|
System.out.println("remove " + bytes); |
||||||
|
stateTrie.remove(bytes); |
||||||
|
}); |
||||||
|
maybeStateUpdater.ifPresent( |
||||||
|
bonsaiUpdater -> bonsaiUpdater.removeAccountInfoState(addressHash)); |
||||||
|
} else { |
||||||
|
final Bytes priorValue = priorAccount == null ? null : priorAccount.serializeAccount(); |
||||||
|
final Bytes accountValue = updatedAccount.serializeAccount(); |
||||||
|
if (!accountValue.equals(priorValue)) { |
||||||
|
verkleTrieKeyValueGenerator |
||||||
|
.generateKeyValuesForAccount( |
||||||
|
accountKey, |
||||||
|
updatedAccount.getNonce(), |
||||||
|
updatedAccount.getBalance(), |
||||||
|
updatedAccount.getCodeHash()) |
||||||
|
.forEach( |
||||||
|
(bytes, bytes2) -> { |
||||||
|
System.out.println("add " + bytes + " " + bytes2); |
||||||
|
stateTrie.put(bytes, bytes2); |
||||||
|
}); |
||||||
|
maybeStateUpdater.ifPresent( |
||||||
|
bonsaiUpdater -> |
||||||
|
bonsaiUpdater.putAccountInfoState(hashAndSavePreImage(accountKey), accountValue)); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
private void updateCode( |
||||||
|
final VerkleTrie stateTrie, |
||||||
|
final Optional<VerkleWorldStateKeyValueStorage.Updater> maybeStateUpdater, |
||||||
|
final VerkleWorldStateUpdateAccumulator worldStateUpdater) { |
||||||
|
maybeStateUpdater.ifPresent( |
||||||
|
bonsaiUpdater -> { |
||||||
|
for (final Map.Entry<Address, DiffBasedValue<Bytes>> codeUpdate : |
||||||
|
worldStateUpdater.getCodeToUpdate().entrySet()) { |
||||||
|
final Bytes previousCode = codeUpdate.getValue().getPrior(); |
||||||
|
final Bytes updatedCode = codeUpdate.getValue().getUpdated(); |
||||||
|
final Address address = codeUpdate.getKey(); |
||||||
|
final Hash accountHash = address.addressHash(); |
||||||
|
if (updatedCode == null) { |
||||||
|
verkleTrieKeyValueGenerator |
||||||
|
.generateKeysForCode(address, previousCode) |
||||||
|
.forEach( |
||||||
|
bytes -> { |
||||||
|
System.out.println("remove code " + bytes); |
||||||
|
stateTrie.remove(bytes); |
||||||
|
}); |
||||||
|
bonsaiUpdater.removeCode(accountHash); |
||||||
|
} else { |
||||||
|
if (updatedCode.isEmpty()) { |
||||||
|
final Hash codeHash = updatedCode.size() == 0 ? Hash.EMPTY : Hash.hash(updatedCode); |
||||||
|
verkleTrieKeyValueGenerator |
||||||
|
.generateKeyValuesForCode(address, codeHash, updatedCode) |
||||||
|
.forEach( |
||||||
|
(bytes, bytes2) -> { |
||||||
|
// System.out.println("add code " + bytes + " " + bytes2);
|
||||||
|
stateTrie.put(bytes, bytes2); |
||||||
|
}); |
||||||
|
bonsaiUpdater.removeCode(accountHash); |
||||||
|
} else { |
||||||
|
final Hash codeHash = updatedCode.size() == 0 ? Hash.EMPTY : Hash.hash(updatedCode); |
||||||
|
verkleTrieKeyValueGenerator |
||||||
|
.generateKeyValuesForCode(address, codeHash, updatedCode) |
||||||
|
.forEach( |
||||||
|
(bytes, bytes2) -> { |
||||||
|
System.out.println("add code " + bytes + " " + bytes2); |
||||||
|
stateTrie.put(bytes, bytes2); |
||||||
|
}); |
||||||
|
bonsaiUpdater.putCode(accountHash, null, updatedCode); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
}); |
||||||
|
} |
||||||
|
|
||||||
|
private void updateAccountStorageState( |
||||||
|
final VerkleTrie stateTrie, |
||||||
|
final Optional<VerkleWorldStateKeyValueStorage.Updater> maybeStateUpdater, |
||||||
|
final VerkleWorldStateUpdateAccumulator worldStateUpdater, |
||||||
|
final Map.Entry<Address, StorageConsumingMap<StorageSlotKey, DiffBasedValue<UInt256>>> |
||||||
|
storageAccountUpdate) { |
||||||
|
final Address updatedAddress = storageAccountUpdate.getKey(); |
||||||
|
final Hash updatedAddressHash = updatedAddress.addressHash(); |
||||||
|
if (worldStateUpdater.getAccountsToUpdate().containsKey(updatedAddress)) { |
||||||
|
|
||||||
|
// for manicured tries and composting, collect branches here (not implemented)
|
||||||
|
for (final Map.Entry<StorageSlotKey, DiffBasedValue<UInt256>> storageUpdate : |
||||||
|
storageAccountUpdate.getValue().entrySet()) { |
||||||
|
final Hash slotHash = storageUpdate.getKey().getSlotHash(); |
||||||
|
final UInt256 updatedStorage = storageUpdate.getValue().getUpdated(); |
||||||
|
if (updatedStorage == null) { |
||||||
|
verkleTrieKeyValueGenerator |
||||||
|
.generateKeysForStorage(updatedAddress, storageUpdate.getKey()) |
||||||
|
.forEach( |
||||||
|
bytes -> { |
||||||
|
System.out.println("remove storage" + bytes); |
||||||
|
stateTrie.remove(bytes); |
||||||
|
}); |
||||||
|
maybeStateUpdater.ifPresent( |
||||||
|
diffBasedUpdater -> |
||||||
|
diffBasedUpdater.removeStorageValueBySlotHash(updatedAddressHash, slotHash)); |
||||||
|
} else { |
||||||
|
final Pair<Bytes, Bytes> storage = |
||||||
|
verkleTrieKeyValueGenerator.generateKeyValuesForStorage( |
||||||
|
updatedAddress, storageUpdate.getKey(), updatedStorage); |
||||||
|
System.out.println("add storage " + storage.getFirst() + " " + storage.getSecond()); |
||||||
|
stateTrie |
||||||
|
.put(storage.getFirst(), storage.getSecond()) |
||||||
|
.ifPresentOrElse( |
||||||
|
bytes -> { |
||||||
|
System.out.println("found old key " + bytes); |
||||||
|
storageUpdate.getValue().setPrior(UInt256.fromBytes(bytes)); |
||||||
|
}, |
||||||
|
() -> { |
||||||
|
storageUpdate.getValue().setPrior(null); |
||||||
|
}); |
||||||
|
if (updatedStorage.equals(UInt256.ZERO)) { |
||||||
|
maybeStateUpdater.ifPresent( |
||||||
|
bonsaiUpdater -> |
||||||
|
bonsaiUpdater.removeStorageValueBySlotHash(updatedAddressHash, slotHash)); |
||||||
|
} else { |
||||||
|
maybeStateUpdater.ifPresent( |
||||||
|
bonsaiUpdater -> |
||||||
|
bonsaiUpdater.putStorageValueBySlotHash( |
||||||
|
updatedAddressHash, slotHash, updatedStorage)); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public MutableWorldState freeze() { |
||||||
|
this.isFrozen = true; |
||||||
|
this.worldStateKeyValueStorage = |
||||||
|
new VerkleLayeredWorldStateKeyValueStorage(getWorldStateStorage()); |
||||||
|
return this; |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Account get(final Address address) { |
||||||
|
return getWorldStateStorage() |
||||||
|
.getAccount(address.addressHash()) |
||||||
|
.map(bytes -> VerkleAccount.fromRLP(accumulator, address, bytes, true)) |
||||||
|
.orElse(null); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Optional<Bytes> getCode(@Nonnull final Address address, final Hash codeHash) { |
||||||
|
return getWorldStateStorage().getCode(codeHash, address.addressHash()); |
||||||
|
} |
||||||
|
|
||||||
|
protected void writeTrieNode( |
||||||
|
final SegmentIdentifier segmentId, |
||||||
|
final SegmentedKeyValueStorageTransaction tx, |
||||||
|
final Bytes location, |
||||||
|
final Bytes value) { |
||||||
|
tx.put(segmentId, location.toArrayUnsafe(), value.toArrayUnsafe()); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public UInt256 getStorageValue(final Address address, final UInt256 storageKey) { |
||||||
|
return getStorageValueByStorageSlotKey(address, new StorageSlotKey(storageKey)) |
||||||
|
.orElse(UInt256.ZERO); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Optional<UInt256> getStorageValueByStorageSlotKey( |
||||||
|
final Address address, final StorageSlotKey storageSlotKey) { |
||||||
|
return getWorldStateStorage() |
||||||
|
.getStorageValueByStorageSlotKey(address.addressHash(), storageSlotKey) |
||||||
|
.map(UInt256::fromBytes); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public UInt256 getPriorStorageValue(final Address address, final UInt256 storageKey) { |
||||||
|
return getStorageValue(address, storageKey); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public Map<Bytes32, Bytes> getAllAccountStorage(final Address address, final Hash rootHash) { |
||||||
|
throw new UnsupportedOperationException("getAllAccountStorage not yet available for verkle"); |
||||||
|
} |
||||||
|
|
||||||
|
private VerkleTrie createTrie(final NodeLoader nodeLoader, final Bytes32 rootHash) { |
||||||
|
return new VerkleTrie(nodeLoader, rootHash); |
||||||
|
} |
||||||
|
|
||||||
|
protected Hash hashAndSavePreImage(final Bytes value) { |
||||||
|
// by default do not save has preImages
|
||||||
|
return Hash.hash(value); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
protected Hash getEmptyTrieHash() { |
||||||
|
return Hash.wrap(Bytes32.ZERO); |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,104 @@ |
|||||||
|
/* |
||||||
|
* Copyright ConsenSys AG. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||||
|
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||||
|
* specific language governing permissions and limitations under the License. |
||||||
|
* |
||||||
|
* SPDX-License-Identifier: Apache-2.0 |
||||||
|
* |
||||||
|
*/ |
||||||
|
|
||||||
|
package org.hyperledger.besu.ethereum.trie.diffbased.verkle.worldview; |
||||||
|
|
||||||
|
import org.hyperledger.besu.datatypes.AccountValue; |
||||||
|
import org.hyperledger.besu.datatypes.Address; |
||||||
|
import org.hyperledger.besu.datatypes.Hash; |
||||||
|
import org.hyperledger.besu.datatypes.StorageSlotKey; |
||||||
|
import org.hyperledger.besu.datatypes.Wei; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.DiffBasedValue; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.worldview.DiffBasedWorldView; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.worldview.accumulator.DiffBasedWorldStateUpdateAccumulator; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.worldview.accumulator.preload.Consumer; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.VerkleAccount; |
||||||
|
import org.hyperledger.besu.evm.internal.EvmConfiguration; |
||||||
|
import org.hyperledger.besu.evm.worldstate.UpdateTrackingAccount; |
||||||
|
|
||||||
|
public class VerkleWorldStateUpdateAccumulator |
||||||
|
extends DiffBasedWorldStateUpdateAccumulator<VerkleAccount> { |
||||||
|
public VerkleWorldStateUpdateAccumulator( |
||||||
|
final DiffBasedWorldView world, |
||||||
|
final Consumer<DiffBasedValue<VerkleAccount>> accountPreloader, |
||||||
|
final Consumer<StorageSlotKey> storagePreloader, |
||||||
|
final EvmConfiguration evmConfiguration) { |
||||||
|
super(world, accountPreloader, storagePreloader, evmConfiguration); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
public DiffBasedWorldStateUpdateAccumulator<VerkleAccount> copy() { |
||||||
|
final VerkleWorldStateUpdateAccumulator copy = |
||||||
|
new VerkleWorldStateUpdateAccumulator( |
||||||
|
wrappedWorldView(), |
||||||
|
getAccountPreloader(), |
||||||
|
getStoragePreloader(), |
||||||
|
getEvmConfiguration()); |
||||||
|
copy.cloneFromUpdater(this); |
||||||
|
return copy; |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
protected VerkleAccount copyAccount(final VerkleAccount account) { |
||||||
|
return new VerkleAccount(account); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
protected VerkleAccount copyAccount( |
||||||
|
final VerkleAccount toCopy, final DiffBasedWorldView context, final boolean mutable) { |
||||||
|
return new VerkleAccount(toCopy, context, mutable); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
protected VerkleAccount createAccount( |
||||||
|
final DiffBasedWorldView context, |
||||||
|
final Address address, |
||||||
|
final AccountValue stateTrieAccount, |
||||||
|
final boolean mutable) { |
||||||
|
return new VerkleAccount(context, address, stateTrieAccount, mutable); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
protected VerkleAccount createAccount( |
||||||
|
final DiffBasedWorldView context, |
||||||
|
final Address address, |
||||||
|
final Hash addressHash, |
||||||
|
final long nonce, |
||||||
|
final Wei balance, |
||||||
|
final Hash storageRoot, |
||||||
|
final Hash codeHash, |
||||||
|
final boolean mutable) { |
||||||
|
return new VerkleAccount( |
||||||
|
context, address, addressHash, nonce, balance, storageRoot, codeHash, mutable); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
protected VerkleAccount createAccount( |
||||||
|
final DiffBasedWorldView context, final UpdateTrackingAccount<VerkleAccount> tracked) { |
||||||
|
return new VerkleAccount(context, tracked); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
protected void assertCloseEnoughForDiffing( |
||||||
|
final VerkleAccount source, final AccountValue account, final String context) { |
||||||
|
VerkleAccount.assertCloseEnoughForDiffing(source, account, context); |
||||||
|
} |
||||||
|
|
||||||
|
@Override |
||||||
|
protected boolean shouldIgnoreIdenticalValuesDuringAccountRollingUpdate() { |
||||||
|
return false; |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,584 @@ |
|||||||
|
/* |
||||||
|
* Copyright ConsenSys AG. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||||
|
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||||
|
* specific language governing permissions and limitations under the License. |
||||||
|
* |
||||||
|
* SPDX-License-Identifier: Apache-2.0 |
||||||
|
* |
||||||
|
*/ |
||||||
|
|
||||||
|
package org.hyperledger.besu.ethereum.trie.diffbased.verkle; |
||||||
|
|
||||||
|
import static org.assertj.core.api.Assertions.assertThat; |
||||||
|
import static org.mockito.Mockito.mock; |
||||||
|
|
||||||
|
import org.hyperledger.besu.datatypes.Address; |
||||||
|
import org.hyperledger.besu.datatypes.Hash; |
||||||
|
import org.hyperledger.besu.datatypes.Wei; |
||||||
|
import org.hyperledger.besu.ethereum.chain.Blockchain; |
||||||
|
import org.hyperledger.besu.ethereum.core.BlockHeader; |
||||||
|
import org.hyperledger.besu.ethereum.core.BlockHeaderTestFixture; |
||||||
|
import org.hyperledger.besu.ethereum.core.Difficulty; |
||||||
|
import org.hyperledger.besu.ethereum.core.InMemoryKeyValueStorageProvider; |
||||||
|
import org.hyperledger.besu.ethereum.mainnet.MainnetBlockHeaderFunctions; |
||||||
|
import org.hyperledger.besu.ethereum.rlp.BytesValueRLPInput; |
||||||
|
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.common.trielog.TrieLogLayer; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.storage.VerkleWorldStateKeyValueStorage; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.trielog.TrieLogFactoryImpl; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.worldview.VerkleWorldState; |
||||||
|
import org.hyperledger.besu.ethereum.trie.diffbased.verkle.worldview.VerkleWorldStateUpdateAccumulator; |
||||||
|
import org.hyperledger.besu.evm.account.MutableAccount; |
||||||
|
import org.hyperledger.besu.evm.internal.EvmConfiguration; |
||||||
|
import org.hyperledger.besu.evm.log.LogsBloomFilter; |
||||||
|
import org.hyperledger.besu.evm.worldstate.WorldUpdater; |
||||||
|
import org.hyperledger.besu.metrics.noop.NoOpMetricsSystem; |
||||||
|
import org.hyperledger.besu.plugin.services.storage.KeyValueStorage; |
||||||
|
import org.hyperledger.besu.plugin.services.storage.KeyValueStorageTransaction; |
||||||
|
|
||||||
|
import java.util.Optional; |
||||||
|
import java.util.stream.Collectors; |
||||||
|
|
||||||
|
import org.apache.tuweni.bytes.Bytes; |
||||||
|
import org.apache.tuweni.bytes.Bytes32; |
||||||
|
import org.apache.tuweni.units.bigints.UInt256; |
||||||
|
import org.junit.jupiter.api.BeforeEach; |
||||||
|
import org.junit.jupiter.api.Test; |
||||||
|
import org.junit.jupiter.api.extension.ExtendWith; |
||||||
|
import org.mockito.junit.jupiter.MockitoExtension; |
||||||
|
|
||||||
|
@SuppressWarnings("unused") |
||||||
|
@ExtendWith(MockitoExtension.class) |
||||||
|
class LogRollingTests { |
||||||
|
|
||||||
|
private VerkleWorldStateProvider archive; |
||||||
|
|
||||||
|
private static final BlockHeaderTestFixture blockHeaderTestFixture = new BlockHeaderTestFixture(); |
||||||
|
private InMemoryKeyValueStorageProvider provider; |
||||||
|
private KeyValueStorage accountStorage; |
||||||
|
private KeyValueStorage codeStorage; |
||||||
|
private KeyValueStorage storageStorage; |
||||||
|
private KeyValueStorage trieBranchStorage; |
||||||
|
private KeyValueStorage trieLogStorage; |
||||||
|
|
||||||
|
private InMemoryKeyValueStorageProvider secondProvider; |
||||||
|
private VerkleWorldStateProvider secondArchive; |
||||||
|
private KeyValueStorage secondAccountStorage; |
||||||
|
private KeyValueStorage secondCodeStorage; |
||||||
|
private KeyValueStorage secondStorageStorage; |
||||||
|
private KeyValueStorage secondTrieBranchStorage; |
||||||
|
private KeyValueStorage secondTrieLogStorage; |
||||||
|
private final Blockchain blockchain = mock(Blockchain.class); |
||||||
|
|
||||||
|
private static final Address addressOne = |
||||||
|
Address.fromHexString("0x1111111111111111111111111111111111111111"); |
||||||
|
|
||||||
|
private static final BlockHeader headerOne = |
||||||
|
new BlockHeader( |
||||||
|
Hash.ZERO, |
||||||
|
Hash.EMPTY_LIST_HASH, |
||||||
|
Address.ZERO, |
||||||
|
Hash.fromHexString("0x3869378cd87434ffd04c4e187312d69d1430dc62e575c4b4b061ac625b88ec08"), |
||||||
|
Hash.EMPTY_TRIE_HASH, |
||||||
|
Hash.EMPTY_LIST_HASH, |
||||||
|
LogsBloomFilter.builder().build(), |
||||||
|
Difficulty.ONE, |
||||||
|
1, |
||||||
|
0, |
||||||
|
0, |
||||||
|
0, |
||||||
|
Bytes.EMPTY, |
||||||
|
Wei.ZERO, |
||||||
|
Hash.ZERO, |
||||||
|
0, |
||||||
|
null, |
||||||
|
null, // blobGasUSed
|
||||||
|
null, |
||||||
|
null, |
||||||
|
null, |
||||||
|
new MainnetBlockHeaderFunctions()); |
||||||
|
private static final BlockHeader headerTwo = |
||||||
|
new BlockHeader( |
||||||
|
headerOne.getHash(), |
||||||
|
Hash.EMPTY_LIST_HASH, |
||||||
|
Address.ZERO, |
||||||
|
Hash.fromHexString("0x3e7c057b149069fadbb2bd2c752184cb5c7a9c736d27682c9e557ceda8ede10e"), |
||||||
|
Hash.EMPTY_TRIE_HASH, |
||||||
|
Hash.EMPTY_LIST_HASH, |
||||||
|
LogsBloomFilter.builder().build(), |
||||||
|
Difficulty.ONE, |
||||||
|
2, |
||||||
|
0, |
||||||
|
0, |
||||||
|
0, |
||||||
|
Bytes.EMPTY, |
||||||
|
Wei.ZERO, |
||||||
|
Hash.ZERO, |
||||||
|
0, |
||||||
|
null, |
||||||
|
null, // blobGasUsed
|
||||||
|
null, |
||||||
|
null, |
||||||
|
null, |
||||||
|
new MainnetBlockHeaderFunctions()); |
||||||
|
private static final BlockHeader headerThree = |
||||||
|
new BlockHeader( |
||||||
|
headerOne.getHash(), |
||||||
|
Hash.EMPTY_LIST_HASH, |
||||||
|
Address.ZERO, |
||||||
|
Hash.fromHexString("0xec5d7bd6bd7ce01e58bb389475767350852e2ce2bb72b8cd9c9b55d118c14e07"), |
||||||
|
Hash.EMPTY_TRIE_HASH, |
||||||
|
Hash.EMPTY_LIST_HASH, |
||||||
|
LogsBloomFilter.builder().build(), |
||||||
|
Difficulty.ONE, |
||||||
|
3, |
||||||
|
0, |
||||||
|
0, |
||||||
|
0, |
||||||
|
Bytes.EMPTY, |
||||||
|
Wei.ZERO, |
||||||
|
Hash.ZERO, |
||||||
|
0, |
||||||
|
null, |
||||||
|
null, // blobGasUsed
|
||||||
|
null, |
||||||
|
null, |
||||||
|
null, |
||||||
|
new MainnetBlockHeaderFunctions()); |
||||||
|
|
||||||
|
private static final BlockHeader headerFour = |
||||||
|
new BlockHeader( |
||||||
|
headerOne.getHash(), |
||||||
|
Hash.EMPTY_LIST_HASH, |
||||||
|
Address.ZERO, |
||||||
|
Hash.fromHexString("0x3869378cd87434ffd04c4e187312d69d1430dc62e575c4b4b061ac625b88ec08"), |
||||||
|
Hash.EMPTY_TRIE_HASH, |
||||||
|
Hash.EMPTY_LIST_HASH, |
||||||
|
LogsBloomFilter.builder().build(), |
||||||
|
Difficulty.ONE, |
||||||
|
3, |
||||||
|
0, |
||||||
|
0, |
||||||
|
0, |
||||||
|
Bytes.EMPTY, |
||||||
|
Wei.ZERO, |
||||||
|
Hash.ZERO, |
||||||
|
0, |
||||||
|
null, |
||||||
|
null, // blobGasUsed
|
||||||
|
null, |
||||||
|
null, |
||||||
|
null, |
||||||
|
new MainnetBlockHeaderFunctions()); |
||||||
|
|
||||||
|
@BeforeEach |
||||||
|
void createStorage() { |
||||||
|
provider = new InMemoryKeyValueStorageProvider(); |
||||||
|
archive = InMemoryKeyValueStorageProvider.createVerkleInMemoryWorldStateArchive(blockchain); |
||||||
|
accountStorage = |
||||||
|
provider.getStorageBySegmentIdentifier(KeyValueSegmentIdentifier.ACCOUNT_INFO_STATE); |
||||||
|
codeStorage = provider.getStorageBySegmentIdentifier(KeyValueSegmentIdentifier.CODE_STORAGE); |
||||||
|
storageStorage = |
||||||
|
provider.getStorageBySegmentIdentifier(KeyValueSegmentIdentifier.ACCOUNT_STORAGE_STORAGE); |
||||||
|
trieBranchStorage = |
||||||
|
provider.getStorageBySegmentIdentifier(KeyValueSegmentIdentifier.TRIE_BRANCH_STORAGE); |
||||||
|
trieLogStorage = |
||||||
|
provider.getStorageBySegmentIdentifier(KeyValueSegmentIdentifier.TRIE_LOG_STORAGE); |
||||||
|
|
||||||
|
secondProvider = new InMemoryKeyValueStorageProvider(); |
||||||
|
secondArchive = |
||||||
|
InMemoryKeyValueStorageProvider.createVerkleInMemoryWorldStateArchive(blockchain); |
||||||
|
secondAccountStorage = |
||||||
|
secondProvider.getStorageBySegmentIdentifier(KeyValueSegmentIdentifier.ACCOUNT_INFO_STATE); |
||||||
|
secondCodeStorage = |
||||||
|
secondProvider.getStorageBySegmentIdentifier(KeyValueSegmentIdentifier.CODE_STORAGE); |
||||||
|
secondStorageStorage = |
||||||
|
secondProvider.getStorageBySegmentIdentifier( |
||||||
|
KeyValueSegmentIdentifier.ACCOUNT_STORAGE_STORAGE); |
||||||
|
secondTrieBranchStorage = |
||||||
|
secondProvider.getStorageBySegmentIdentifier(KeyValueSegmentIdentifier.TRIE_BRANCH_STORAGE); |
||||||
|
secondTrieLogStorage = |
||||||
|
secondProvider.getStorageBySegmentIdentifier(KeyValueSegmentIdentifier.TRIE_LOG_STORAGE); |
||||||
|
} |
||||||
|
|
||||||
|
@Test |
||||||
|
void rollForwardComparedWithTestnet() { // TODO change the name
|
||||||
|
|
||||||
|
final VerkleWorldState worldState = |
||||||
|
new VerkleWorldState( |
||||||
|
archive, |
||||||
|
new VerkleWorldStateKeyValueStorage(provider, new NoOpMetricsSystem()), |
||||||
|
EvmConfiguration.DEFAULT); |
||||||
|
final WorldUpdater updater = worldState.updater(); |
||||||
|
|
||||||
|
final MutableAccount contract = |
||||||
|
updater.createAccount( |
||||||
|
Address.fromHexString("0x2a97e18168654393a573599759104efdfec6d8bd"), 1, Wei.ZERO); |
||||||
|
contract.setCode( |
||||||
|
Bytes.fromHexString( |
||||||
|
"608060405234801561000f575f80fd5b5060043610610034575f3560e01c80632e64cec1146100385780636057361d14610056575b5f80fd5b610040610072565b60405161004d919061029a565b60405180910390f35b610070600480360381019061006b91906102e1565b61019c565b005b5f8060405161008090610275565b604051809103905ff080158015610099573d5f803e3d5ffd5b5090505f60015f9054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16632e64cec16040518163ffffffff1660e01b8152600401602060405180830381865afa158015610107573d5f803e3d5ffd5b505050506040513d601f19601f8201168201806040525081019061012b9190610320565b90508173ffffffffffffffffffffffffffffffffffffffff16636057361d826040518263ffffffff1660e01b8152600401610166919061029a565b5f604051808303815f87803b15801561017d575f80fd5b505af115801561018f573d5f803e3d5ffd5b505050505f549250505090565b805f819055505f6040516101af90610275565b604051809103905ff0801580156101c8573d5f803e3d5ffd5b5090508073ffffffffffffffffffffffffffffffffffffffff16636057361d836040518263ffffffff1660e01b8152600401610204919061029a565b5f604051808303815f87803b15801561021b575f80fd5b505af115801561022d573d5f803e3d5ffd5b505050508060015f6101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055505050565b6101e38061034c83390190565b5f819050919050565b61029481610282565b82525050565b5f6020820190506102ad5f83018461028b565b92915050565b5f80fd5b6102c081610282565b81146102ca575f80fd5b50565b5f813590506102db816102b7565b92915050565b5f602082840312156102f6576102f56102b3565b5b5f610303848285016102cd565b91505092915050565b5f8151905061031a816102b7565b92915050565b5f60208284031215610335576103346102b3565b5b5f6103428482850161030c565b9150509291505056fe608060405234801561000f575f80fd5b506101c68061001d5f395ff3fe60806040526004361061003e575f3560e01c80632711432d146100425780632e64cec11461006c5780636057361d14610096578063d64c8ca4146100be575b5f80fd5b34801561004d575f80fd5b506100566100c8565b604051610063919061011e565b60405180910390f35b348015610077575f80fd5b506100806100d1565b60405161008d919061011e565b60405180910390f35b3480156100a1575f80fd5b506100bc60048036038101906100b79190610165565b6100d9565b005b6100c66100e9565b005b5f600154905090565b5f8054905090565b805f819055508060018190555050565b5f3390508073ffffffffffffffffffffffffffffffffffffffff16ff5b5f819050919050565b61011881610106565b82525050565b5f6020820190506101315f83018461010f565b92915050565b5f80fd5b61014481610106565b811461014e575f80fd5b50565b5f8135905061015f8161013b565b92915050565b5f6020828403121561017a57610179610137565b5b5f61018784828501610151565b9150509291505056fea2646970667358221220dc349a9524617af5742ac60346440c0d09b175e4d9c4d95e378a9652cb9acbb064736f6c63430008160033a264697066735822122079744fe4f745783dffcec2415a6b99b8b7b340bcf4a768d5563f00d2ec1f916b64736f6c63430008160033")); |
||||||
|
contract.setStorageValue(UInt256.ZERO, UInt256.fromHexString("0x0c")); |
||||||
|
final MutableAccount mutableAccount = |
||||||
|
updater.createAccount( |
||||||
|
Address.fromHexString("0xb247faa497c752519917402cd79414727222f792"), |
||||||
|
2, |
||||||
|
Wei.fromHexString("56cdce8421269edc4")); |
||||||
|
updater.commit(); |
||||||
|
worldState.persist(null); |
||||||
|
} |
||||||
|
|
||||||
|
@Test |
||||||
|
void simpleRollForwardTest() { |
||||||
|
|
||||||
|
final VerkleWorldState worldState = |
||||||
|
new VerkleWorldState( |
||||||
|
archive, |
||||||
|
new VerkleWorldStateKeyValueStorage(provider, new NoOpMetricsSystem()), |
||||||
|
EvmConfiguration.DEFAULT); |
||||||
|
final WorldUpdater updater = worldState.updater(); |
||||||
|
|
||||||
|
final MutableAccount mutableAccount = updater.createAccount(addressOne, 1, Wei.of(1L)); |
||||||
|
mutableAccount.setCode(Bytes.of(0, 1, 2)); |
||||||
|
mutableAccount.setStorageValue(UInt256.ONE, UInt256.ONE); |
||||||
|
updater.commit(); |
||||||
|
worldState.persist(headerOne); |
||||||
|
|
||||||
|
final VerkleWorldState secondWorldState = |
||||||
|
new VerkleWorldState( |
||||||
|
secondArchive, |
||||||
|
new VerkleWorldStateKeyValueStorage(secondProvider, new NoOpMetricsSystem()), |
||||||
|
EvmConfiguration.DEFAULT); |
||||||
|
final VerkleWorldStateUpdateAccumulator secondUpdater = |
||||||
|
(VerkleWorldStateUpdateAccumulator) secondWorldState.updater(); |
||||||
|
|
||||||
|
final Optional<byte[]> value = trieLogStorage.get(headerOne.getHash().toArrayUnsafe()); |
||||||
|
|
||||||
|
final TrieLogLayer layer = |
||||||
|
TrieLogFactoryImpl.readFrom(new BytesValueRLPInput(Bytes.wrap(value.get()), false)); |
||||||
|
|
||||||
|
secondUpdater.rollForward(layer); |
||||||
|
secondUpdater.commit(); |
||||||
|
secondWorldState.persist(null); |
||||||
|
|
||||||
|
assertKeyValueStorageEqual(accountStorage, secondAccountStorage); |
||||||
|
assertKeyValueStorageEqual(codeStorage, secondCodeStorage); |
||||||
|
assertKeyValueStorageEqual(storageStorage, secondStorageStorage); |
||||||
|
final KeyValueStorageTransaction tx = trieBranchStorage.startTransaction(); |
||||||
|
tx.remove(VerkleWorldStateKeyValueStorage.WORLD_BLOCK_HASH_KEY); |
||||||
|
tx.commit(); |
||||||
|
assertKeyValueStorageEqual(trieBranchStorage, secondTrieBranchStorage); |
||||||
|
// trie logs won't be the same, we shouldn't generate logs on rolls.
|
||||||
|
assertKeyValueSubset(trieLogStorage, secondTrieLogStorage); |
||||||
|
assertThat(secondWorldState.rootHash()).isEqualByComparingTo(worldState.rootHash()); |
||||||
|
} |
||||||
|
|
||||||
|
@Test |
||||||
|
void rollForwardTwice() { |
||||||
|
final VerkleWorldState worldState = |
||||||
|
new VerkleWorldState( |
||||||
|
archive, |
||||||
|
new VerkleWorldStateKeyValueStorage(provider, new NoOpMetricsSystem()), |
||||||
|
EvmConfiguration.DEFAULT); |
||||||
|
|
||||||
|
final WorldUpdater updater = worldState.updater(); |
||||||
|
final MutableAccount mutableAccount = updater.createAccount(addressOne, 1, Wei.of(1L)); |
||||||
|
mutableAccount.setCode(Bytes.of(0, 1, 2)); |
||||||
|
mutableAccount.setStorageValue(UInt256.ONE, UInt256.ONE); |
||||||
|
updater.commit(); |
||||||
|
|
||||||
|
worldState.persist(headerOne); |
||||||
|
|
||||||
|
final WorldUpdater updater2 = worldState.updater(); |
||||||
|
final MutableAccount mutableAccount2 = updater2.getAccount(addressOne); |
||||||
|
mutableAccount2.setStorageValue(UInt256.ONE, UInt256.valueOf(2)); |
||||||
|
updater2.commit(); |
||||||
|
|
||||||
|
worldState.persist(headerTwo); |
||||||
|
|
||||||
|
final VerkleWorldState secondWorldState = |
||||||
|
new VerkleWorldState( |
||||||
|
secondArchive, |
||||||
|
new VerkleWorldStateKeyValueStorage(secondProvider, new NoOpMetricsSystem()), |
||||||
|
EvmConfiguration.DEFAULT); |
||||||
|
final VerkleWorldStateUpdateAccumulator secondUpdater = |
||||||
|
(VerkleWorldStateUpdateAccumulator) secondWorldState.updater(); |
||||||
|
|
||||||
|
final TrieLogLayer layerOne = getTrieLogLayer(trieLogStorage, headerOne.getHash()); |
||||||
|
secondUpdater.rollForward(layerOne); |
||||||
|
secondUpdater.commit(); |
||||||
|
secondWorldState.persist(null); |
||||||
|
|
||||||
|
final TrieLogLayer layerTwo = getTrieLogLayer(trieLogStorage, headerTwo.getHash()); |
||||||
|
secondUpdater.rollForward(layerTwo); |
||||||
|
secondUpdater.commit(); |
||||||
|
secondWorldState.persist(null); |
||||||
|
|
||||||
|
assertKeyValueStorageEqual(accountStorage, secondAccountStorage); |
||||||
|
assertKeyValueStorageEqual(codeStorage, secondCodeStorage); |
||||||
|
assertKeyValueStorageEqual(storageStorage, secondStorageStorage); |
||||||
|
final KeyValueStorageTransaction tx = trieBranchStorage.startTransaction(); |
||||||
|
tx.remove(VerkleWorldStateKeyValueStorage.WORLD_BLOCK_HASH_KEY); |
||||||
|
tx.commit(); |
||||||
|
assertKeyValueStorageEqual(trieBranchStorage, secondTrieBranchStorage); |
||||||
|
// trie logs won't be the same, we shouldn't generate logs on rolls.
|
||||||
|
assertKeyValueSubset(trieLogStorage, secondTrieLogStorage); |
||||||
|
assertThat(secondWorldState.rootHash()).isEqualByComparingTo(worldState.rootHash()); |
||||||
|
} |
||||||
|
|
||||||
|
@Test |
||||||
|
void rollBackOnce() { |
||||||
|
final VerkleWorldState worldState = |
||||||
|
new VerkleWorldState( |
||||||
|
archive, |
||||||
|
new VerkleWorldStateKeyValueStorage(provider, new NoOpMetricsSystem()), |
||||||
|
EvmConfiguration.DEFAULT); |
||||||
|
|
||||||
|
final WorldUpdater updater = worldState.updater(); |
||||||
|
final MutableAccount mutableAccount = updater.createAccount(addressOne, 1, Wei.of(1L)); |
||||||
|
mutableAccount.setCode(Bytes.of(0, 1, 2)); |
||||||
|
mutableAccount.setStorageValue(UInt256.ONE, UInt256.ONE); |
||||||
|
updater.commit(); |
||||||
|
|
||||||
|
worldState.persist(headerOne); |
||||||
|
|
||||||
|
final WorldUpdater updater2 = worldState.updater(); |
||||||
|
final MutableAccount mutableAccount2 = updater2.getAccount(addressOne); |
||||||
|
mutableAccount2.setStorageValue(UInt256.ONE, UInt256.valueOf(2)); |
||||||
|
updater2.commit(); |
||||||
|
|
||||||
|
worldState.persist(headerTwo); |
||||||
|
final VerkleWorldStateUpdateAccumulator firstRollbackUpdater = |
||||||
|
(VerkleWorldStateUpdateAccumulator) worldState.updater(); |
||||||
|
|
||||||
|
final TrieLogLayer layerTwo = getTrieLogLayer(trieLogStorage, headerTwo.getHash()); |
||||||
|
firstRollbackUpdater.rollBack(layerTwo); |
||||||
|
|
||||||
|
worldState.persist(headerOne); |
||||||
|
|
||||||
|
final VerkleWorldState secondWorldState = |
||||||
|
new VerkleWorldState( |
||||||
|
secondArchive, |
||||||
|
new VerkleWorldStateKeyValueStorage(secondProvider, new NoOpMetricsSystem()), |
||||||
|
EvmConfiguration.DEFAULT); |
||||||
|
|
||||||
|
final WorldUpdater secondUpdater = secondWorldState.updater(); |
||||||
|
final MutableAccount secondMutableAccount = |
||||||
|
secondUpdater.createAccount(addressOne, 1, Wei.of(1L)); |
||||||
|
secondMutableAccount.setCode(Bytes.of(0, 1, 2)); |
||||||
|
secondMutableAccount.setStorageValue(UInt256.ONE, UInt256.ONE); |
||||||
|
secondUpdater.commit(); |
||||||
|
|
||||||
|
secondWorldState.persist(null); |
||||||
|
|
||||||
|
assertKeyValueStorageEqual(accountStorage, secondAccountStorage); |
||||||
|
assertKeyValueStorageEqual(codeStorage, secondCodeStorage); |
||||||
|
assertKeyValueStorageEqual(storageStorage, secondStorageStorage); |
||||||
|
final KeyValueStorageTransaction tx = trieBranchStorage.startTransaction(); |
||||||
|
tx.remove(VerkleWorldStateKeyValueStorage.WORLD_BLOCK_HASH_KEY); |
||||||
|
tx.commit(); |
||||||
|
assertKeyValueStorageEqual(trieBranchStorage, secondTrieBranchStorage); |
||||||
|
// trie logs won't be the same, we don't delete the roll back log
|
||||||
|
assertKeyValueSubset(trieLogStorage, secondTrieLogStorage); |
||||||
|
assertThat(secondWorldState.rootHash()).isEqualByComparingTo(worldState.rootHash()); |
||||||
|
} |
||||||
|
|
||||||
|
@Test |
||||||
|
void rollBackTwice() { |
||||||
|
final VerkleWorldState worldState = |
||||||
|
new VerkleWorldState( |
||||||
|
archive, |
||||||
|
new VerkleWorldStateKeyValueStorage(provider, new NoOpMetricsSystem()), |
||||||
|
EvmConfiguration.DEFAULT); |
||||||
|
|
||||||
|
final WorldUpdater updater = worldState.updater(); |
||||||
|
final MutableAccount mutableAccount = updater.createAccount(addressOne, 1, Wei.of(1L)); |
||||||
|
mutableAccount.setCode(Bytes.of(0, 1, 2)); |
||||||
|
mutableAccount.setStorageValue(UInt256.ONE, UInt256.ONE); |
||||||
|
updater.commit(); |
||||||
|
|
||||||
|
worldState.persist(headerOne); |
||||||
|
final TrieLogLayer layerOne = getTrieLogLayer(trieLogStorage, headerOne.getHash()); |
||||||
|
|
||||||
|
final WorldUpdater updater2 = worldState.updater(); |
||||||
|
final MutableAccount mutableAccount2 = updater2.getAccount(addressOne); |
||||||
|
mutableAccount2.setStorageValue(UInt256.ONE, UInt256.valueOf(2)); |
||||||
|
updater2.commit(); |
||||||
|
|
||||||
|
worldState.persist(headerTwo); |
||||||
|
final VerkleWorldStateUpdateAccumulator firstRollbackUpdater = |
||||||
|
(VerkleWorldStateUpdateAccumulator) worldState.updater(); |
||||||
|
|
||||||
|
final TrieLogLayer layerTwo = getTrieLogLayer(trieLogStorage, headerTwo.getHash()); |
||||||
|
firstRollbackUpdater.rollBack(layerTwo); |
||||||
|
|
||||||
|
worldState.persist(headerOne); |
||||||
|
|
||||||
|
final VerkleWorldStateUpdateAccumulator secondRollbackUpdater = |
||||||
|
(VerkleWorldStateUpdateAccumulator) worldState.updater(); |
||||||
|
secondRollbackUpdater.rollBack(layerOne); |
||||||
|
|
||||||
|
worldState.persist(null); |
||||||
|
|
||||||
|
assertThat(worldState.rootHash()).isEqualTo(Bytes32.ZERO); |
||||||
|
} |
||||||
|
|
||||||
|
@Test |
||||||
|
void rollBackFourTimes() { |
||||||
|
final VerkleWorldState worldState = |
||||||
|
new VerkleWorldState( |
||||||
|
archive, |
||||||
|
new VerkleWorldStateKeyValueStorage(provider, new NoOpMetricsSystem()), |
||||||
|
EvmConfiguration.DEFAULT); |
||||||
|
|
||||||
|
final WorldUpdater updater = worldState.updater(); |
||||||
|
final MutableAccount mutableAccount = updater.createAccount(addressOne, 1, Wei.of(1L)); |
||||||
|
mutableAccount.setCode(Bytes.of(0, 1, 2)); |
||||||
|
mutableAccount.setStorageValue(UInt256.ONE, UInt256.ONE); |
||||||
|
updater.commit(); |
||||||
|
|
||||||
|
worldState.persist(headerOne); |
||||||
|
final TrieLogLayer layerOne = getTrieLogLayer(trieLogStorage, headerOne.getHash()); |
||||||
|
|
||||||
|
final WorldUpdater updater2 = worldState.updater(); |
||||||
|
final MutableAccount mutableAccount2 = updater2.getAccount(addressOne); |
||||||
|
mutableAccount2.setStorageValue(UInt256.ONE, UInt256.valueOf(2)); |
||||||
|
updater2.commit(); |
||||||
|
|
||||||
|
worldState.persist(headerTwo); |
||||||
|
final TrieLogLayer layerTwo = getTrieLogLayer(trieLogStorage, headerTwo.getHash()); |
||||||
|
|
||||||
|
final WorldUpdater updater3 = worldState.updater(); |
||||||
|
final MutableAccount mutableAccount3 = updater3.getAccount(addressOne); |
||||||
|
mutableAccount3.setStorageValue(UInt256.ONE, UInt256.valueOf(0)); |
||||||
|
updater3.commit(); |
||||||
|
|
||||||
|
worldState.persist(headerThree); |
||||||
|
final TrieLogLayer layerThree = getTrieLogLayer(trieLogStorage, headerThree.getHash()); |
||||||
|
|
||||||
|
final WorldUpdater updater4 = worldState.updater(); |
||||||
|
final MutableAccount mutableAccount4 = updater4.getAccount(addressOne); |
||||||
|
mutableAccount4.setStorageValue(UInt256.ONE, UInt256.valueOf(1)); |
||||||
|
updater4.commit(); |
||||||
|
|
||||||
|
worldState.persist(headerFour); |
||||||
|
final TrieLogLayer layerFour = getTrieLogLayer(trieLogStorage, headerFour.getHash()); |
||||||
|
|
||||||
|
final VerkleWorldStateUpdateAccumulator firstRollbackUpdater = |
||||||
|
(VerkleWorldStateUpdateAccumulator) worldState.updater(); |
||||||
|
|
||||||
|
firstRollbackUpdater.rollBack(layerFour); |
||||||
|
|
||||||
|
System.out.println(layerFour.dump()); |
||||||
|
worldState.persist(headerThree); |
||||||
|
|
||||||
|
final VerkleWorldStateUpdateAccumulator secondRollbackUpdater = |
||||||
|
(VerkleWorldStateUpdateAccumulator) worldState.updater(); |
||||||
|
secondRollbackUpdater.rollBack(layerThree); |
||||||
|
|
||||||
|
worldState.persist(headerTwo); |
||||||
|
|
||||||
|
final VerkleWorldStateUpdateAccumulator thirdRollbackUpdater = |
||||||
|
(VerkleWorldStateUpdateAccumulator) worldState.updater(); |
||||||
|
thirdRollbackUpdater.rollBack(layerTwo); |
||||||
|
|
||||||
|
worldState.persist(headerOne); |
||||||
|
|
||||||
|
final VerkleWorldStateUpdateAccumulator fourRollbackUpdater = |
||||||
|
(VerkleWorldStateUpdateAccumulator) worldState.updater(); |
||||||
|
fourRollbackUpdater.rollBack(layerOne); |
||||||
|
|
||||||
|
worldState.persist(null); |
||||||
|
|
||||||
|
assertThat(worldState.rootHash()).isEqualTo(Bytes32.ZERO); |
||||||
|
} |
||||||
|
|
||||||
|
@Test |
||||||
|
void rollingWithRemovedStorageValue() { |
||||||
|
final VerkleWorldState worldState = |
||||||
|
new VerkleWorldState( |
||||||
|
archive, |
||||||
|
new VerkleWorldStateKeyValueStorage(provider, new NoOpMetricsSystem()), |
||||||
|
EvmConfiguration.DEFAULT); |
||||||
|
|
||||||
|
final WorldUpdater updater = worldState.updater(); |
||||||
|
final MutableAccount mutableAccount = updater.createAccount(addressOne, 1, Wei.of(1L)); |
||||||
|
mutableAccount.setCode(Bytes.of(0, 1, 2)); |
||||||
|
mutableAccount.setStorageValue(UInt256.ONE, UInt256.ONE); |
||||||
|
updater.commit(); |
||||||
|
|
||||||
|
worldState.persist(headerOne); |
||||||
|
|
||||||
|
/*final WorldUpdater updater2 = worldState.updater(); |
||||||
|
final MutableAccount mutableAccount2 = updater2.getAccount(addressOne); |
||||||
|
mutableAccount2.setStorageValue(UInt256.ONE, UInt256.ZERO); |
||||||
|
updater2.commit(); |
||||||
|
|
||||||
|
blockHeaderTestFixture.stateRoot(Hash.fromHexString("0x1879f69465e8ef937ce1f13cb5b328437239a2764982cea5e337cd5d217a2866")); |
||||||
|
blockHeaderTestFixture.number(2); |
||||||
|
final BlockHeader blockHeaderTwo = blockHeaderTestFixture.buildHeader(); |
||||||
|
worldState.persist(blockHeaderTwo); |
||||||
|
|
||||||
|
final VerkleWorldStateUpdateAccumulator firstRollbackUpdater = |
||||||
|
(VerkleWorldStateUpdateAccumulator) worldState.updater(); |
||||||
|
|
||||||
|
final TrieLogLayer layerTwo = getTrieLogLayer(trieLogStorage, blockHeaderTwo.getBlockHash()); |
||||||
|
firstRollbackUpdater.rollBack(layerTwo); |
||||||
|
System.out.println("rollback"); |
||||||
|
|
||||||
|
worldState.persist(null); |
||||||
|
assertThat(worldState.rootHash()).isEqualTo(blockHeaderOne.getStateRoot());*/ |
||||||
|
|
||||||
|
final VerkleWorldStateUpdateAccumulator secondRollbackUpdater = |
||||||
|
(VerkleWorldStateUpdateAccumulator) worldState.updater(); |
||||||
|
|
||||||
|
final TrieLogLayer layerOne = getTrieLogLayer(trieLogStorage, headerOne.getBlockHash()); |
||||||
|
secondRollbackUpdater.rollBack(layerOne); |
||||||
|
|
||||||
|
worldState.persist(null); |
||||||
|
assertThat(worldState.rootHash()).isEqualTo(Bytes32.ZERO); |
||||||
|
} |
||||||
|
|
||||||
|
private TrieLogLayer getTrieLogLayer(final KeyValueStorage storage, final Bytes key) { |
||||||
|
return storage |
||||||
|
.get(key.toArrayUnsafe()) |
||||||
|
.map(bytes -> TrieLogFactoryImpl.readFrom(new BytesValueRLPInput(Bytes.wrap(bytes), false))) |
||||||
|
.get(); |
||||||
|
} |
||||||
|
|
||||||
|
private static void assertKeyValueStorageEqual( |
||||||
|
final KeyValueStorage first, final KeyValueStorage second) { |
||||||
|
final var firstKeys = |
||||||
|
first.getAllKeysThat(k -> true).stream().map(Bytes::wrap).collect(Collectors.toSet()); |
||||||
|
final var secondKeys = |
||||||
|
second.getAllKeysThat(k -> true).stream().map(Bytes::wrap).collect(Collectors.toSet()); |
||||||
|
|
||||||
|
assertThat(secondKeys).isEqualTo(firstKeys); |
||||||
|
for (final Bytes key : firstKeys) { |
||||||
|
assertThat(Bytes.wrap(second.get(key.toArrayUnsafe()).get())) |
||||||
|
.isEqualByComparingTo(Bytes.wrap(first.get(key.toArrayUnsafe()).get())); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
private static void assertKeyValueSubset( |
||||||
|
final KeyValueStorage largerSet, final KeyValueStorage smallerSet) { |
||||||
|
final var largerKeys = |
||||||
|
largerSet.getAllKeysThat(k -> true).stream().map(Bytes::wrap).collect(Collectors.toSet()); |
||||||
|
final var smallerKeys = |
||||||
|
smallerSet.getAllKeysThat(k -> true).stream().map(Bytes::wrap).collect(Collectors.toSet()); |
||||||
|
|
||||||
|
assertThat(largerKeys).containsAll(smallerKeys); |
||||||
|
for (final Bytes key : largerKeys) { |
||||||
|
if (smallerKeys.contains(key)) { |
||||||
|
assertThat(Bytes.wrap(largerSet.get(key.toArrayUnsafe()).get())) |
||||||
|
.isEqualByComparingTo(Bytes.wrap(smallerSet.get(key.toArrayUnsafe()).get())); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,61 @@ |
|||||||
|
/* |
||||||
|
* Copyright Hyperledger Besu Contributors. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||||
|
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||||
|
* specific language governing permissions and limitations under the License. |
||||||
|
* |
||||||
|
* SPDX-License-Identifier: Apache-2.0 |
||||||
|
*/ |
||||||
|
package org.hyperledger.besu.ethereum.verkletrie; |
||||||
|
|
||||||
|
import org.hyperledger.besu.ethereum.trie.NodeLoader; |
||||||
|
import org.hyperledger.besu.ethereum.trie.NodeUpdater; |
||||||
|
import org.hyperledger.besu.ethereum.trie.verkle.StoredVerkleTrie; |
||||||
|
import org.hyperledger.besu.ethereum.trie.verkle.factory.StoredNodeFactory; |
||||||
|
|
||||||
|
import java.util.Optional; |
||||||
|
|
||||||
|
import org.apache.tuweni.bytes.Bytes; |
||||||
|
import org.apache.tuweni.bytes.Bytes32; |
||||||
|
|
||||||
|
public class VerkleTrie { |
||||||
|
|
||||||
|
private final org.hyperledger.besu.ethereum.trie.verkle.StoredVerkleTrie<Bytes, Bytes> verkleTrie; |
||||||
|
|
||||||
|
private final StoredNodeFactory<Bytes> nodeFactory; |
||||||
|
|
||||||
|
public VerkleTrie(final NodeLoader nodeLoader, final Bytes32 rootHash) { |
||||||
|
nodeFactory = new StoredNodeFactory<>(nodeLoader, value -> value); |
||||||
|
verkleTrie = new StoredVerkleTrie<>(nodeFactory); |
||||||
|
} |
||||||
|
|
||||||
|
public Optional<Bytes> get(final Bytes key) { |
||||||
|
return verkleTrie.get(key); |
||||||
|
} |
||||||
|
|
||||||
|
public Optional<Bytes> put(final Bytes key, final Bytes value) { |
||||||
|
return verkleTrie.put(key, Bytes32.leftPad(value)); |
||||||
|
} |
||||||
|
|
||||||
|
public void remove(final Bytes key) { |
||||||
|
verkleTrie.remove(Bytes32.wrap(key)); |
||||||
|
} |
||||||
|
|
||||||
|
public Bytes32 getRootHash() { |
||||||
|
return verkleTrie.getRootHash(); |
||||||
|
} |
||||||
|
|
||||||
|
public void commit(final NodeUpdater nodeUpdater) { |
||||||
|
verkleTrie.commit(nodeUpdater); |
||||||
|
} |
||||||
|
|
||||||
|
public String toDotTree() { |
||||||
|
return verkleTrie.toDotTree(false); |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,95 @@ |
|||||||
|
/* |
||||||
|
* Copyright Hyperledger Besu Contributors. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on |
||||||
|
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the |
||||||
|
* specific language governing permissions and limitations under the License. |
||||||
|
* |
||||||
|
* SPDX-License-Identifier: Apache-2.0 |
||||||
|
*/ |
||||||
|
package org.hyperledger.besu.ethereum.verkletrie; |
||||||
|
|
||||||
|
import org.hyperledger.besu.datatypes.Address; |
||||||
|
import org.hyperledger.besu.datatypes.Hash; |
||||||
|
import org.hyperledger.besu.datatypes.StorageSlotKey; |
||||||
|
import org.hyperledger.besu.datatypes.Wei; |
||||||
|
import org.hyperledger.besu.ethereum.trie.verkle.adapter.TrieKeyAdapter; |
||||||
|
import org.hyperledger.besu.ethereum.trie.verkle.hasher.PedersenHasher; |
||||||
|
|
||||||
|
import java.util.ArrayList; |
||||||
|
import java.util.HashMap; |
||||||
|
import java.util.List; |
||||||
|
import java.util.Map; |
||||||
|
|
||||||
|
import kotlin.Pair; |
||||||
|
import org.apache.tuweni.bytes.Bytes; |
||||||
|
import org.apache.tuweni.bytes.Bytes32; |
||||||
|
import org.apache.tuweni.units.bigints.UInt256; |
||||||
|
|
||||||
|
public class VerkleTrieKeyValueGenerator { |
||||||
|
|
||||||
|
final TrieKeyAdapter trieKeyAdapter = new TrieKeyAdapter(new PedersenHasher()); |
||||||
|
|
||||||
|
public Map<Bytes, Bytes> generateKeyValuesForAccount( |
||||||
|
final Address address, final long nonce, final Wei balance, final Hash codeHash) { |
||||||
|
final Map<Bytes, Bytes> keyValues = new HashMap<>(); |
||||||
|
keyValues.put(trieKeyAdapter.versionKey(address), Bytes32.ZERO); |
||||||
|
keyValues.put(trieKeyAdapter.balanceKey(address), toLittleEndian(balance)); |
||||||
|
keyValues.put(trieKeyAdapter.nonceKey(address), toLittleEndian(UInt256.valueOf(nonce))); |
||||||
|
keyValues.put(trieKeyAdapter.codeKeccakKey(address), codeHash); |
||||||
|
return keyValues; |
||||||
|
} |
||||||
|
|
||||||
|
public List<Bytes> generateKeysForAccount(final Address address) { |
||||||
|
final List<Bytes> keys = new ArrayList<>(); |
||||||
|
keys.add(trieKeyAdapter.versionKey(address)); |
||||||
|
keys.add(trieKeyAdapter.balanceKey(address)); |
||||||
|
keys.add(trieKeyAdapter.nonceKey(address)); |
||||||
|
return keys; |
||||||
|
} |
||||||
|
|
||||||
|
public Map<Bytes, Bytes> generateKeyValuesForCode( |
||||||
|
final Address address, final Bytes32 keccakCodeHash, final Bytes code) { |
||||||
|
final Map<Bytes, Bytes> keyValues = new HashMap<>(); |
||||||
|
keyValues.put( |
||||||
|
trieKeyAdapter.codeSizeKey(address), toLittleEndian(UInt256.valueOf(code.size()))); |
||||||
|
List<Bytes32> codeChunks = trieKeyAdapter.chunkifyCode(code); |
||||||
|
for (int i = 0; i < codeChunks.size(); i++) { |
||||||
|
// System.out.println("add code " + trieKeyAdapter.codeChunkKey(address, UInt256.valueOf(i)) +
|
||||||
|
// " " + codeChunks.get(i));
|
||||||
|
keyValues.put(trieKeyAdapter.codeChunkKey(address, UInt256.valueOf(i)), codeChunks.get(i)); |
||||||
|
} |
||||||
|
return keyValues; |
||||||
|
} |
||||||
|
|
||||||
|
public List<Bytes> generateKeysForCode(final Address address, final Bytes code) { |
||||||
|
final List<Bytes> keys = new ArrayList<>(); |
||||||
|
keys.add(trieKeyAdapter.codeKeccakKey(address)); |
||||||
|
keys.add(trieKeyAdapter.codeSizeKey(address)); |
||||||
|
List<Bytes32> codeChunks = trieKeyAdapter.chunkifyCode(code); |
||||||
|
for (int i = 0; i < codeChunks.size(); i++) { |
||||||
|
keys.add(trieKeyAdapter.codeChunkKey(address, UInt256.valueOf(i))); |
||||||
|
} |
||||||
|
return keys; |
||||||
|
} |
||||||
|
|
||||||
|
public Pair<Bytes, Bytes> generateKeyValuesForStorage( |
||||||
|
final Address address, final StorageSlotKey storageKey, final Bytes value) { |
||||||
|
return new Pair<>( |
||||||
|
trieKeyAdapter.storageKey(address, storageKey.getSlotKey().orElseThrow()), value); |
||||||
|
} |
||||||
|
|
||||||
|
public List<Bytes> generateKeysForStorage( |
||||||
|
final Address address, final StorageSlotKey storageKey) { |
||||||
|
return List.of(trieKeyAdapter.storageKey(address, storageKey.getSlotKey().orElseThrow())); |
||||||
|
} |
||||||
|
|
||||||
|
private static Bytes toLittleEndian(final Bytes originalValue) { |
||||||
|
return originalValue.reverse(); |
||||||
|
} |
||||||
|
} |
File diff suppressed because it is too large
Load Diff
Loading…
Reference in new issue