V3 agents rebase (#2746)
### Description It's your favourite PR coming right back... V3 agents! Closes https://github.com/hyperlane-xyz/issues/issues/561 Builds on top of https://github.com/hyperlane-xyz/hyperlane-monorepo/pull/2742 Depends on https://github.com/hyperlane-xyz/hyperlane-monorepo/pull/2681 for e2e testing This PR includes: - [x] Merkle tree hook indexer - [x] Merkle tree builder task - [x] Update submitter to trigger retries if no proof is available yet Slightly more detailed overview of the work here: https://github.com/hyperlane-xyz/hyperlane-monorepo/issues/2720#issuecomment-1724038643 <!-- What's included in this PR? --> ### Drive-by changes <!-- Are there any minor or drive-by changes also included? --> ### Related issues <!-- - Fixes #[issue number here] --> ### Backward compatibility <!-- Are these changes backward compatible? Are there any infrastructure implications, e.g. changes that would prohibit deploying older commits using this infra tooling? Yes/No --> ### Testing <!-- What kind of testing have these changes undergone? None/Manual/Unit Tests --> --------- Co-authored-by: -f <kunalarora1729@gmail.com> Co-authored-by: Trevor Porter <trkporter@ucdavis.edu> Co-authored-by: Kunal Arora <55632507+aroralanuk@users.noreply.github.com> Co-authored-by: Mattie Conover <git@mconover.dev> Co-authored-by: Guillaume Bouvignies <guillaumebouvignies@gmail.com> Co-authored-by: Yorke Rhodes <yorke@hyperlane.xyz> Co-authored-by: Guillaume Bouvignies <guillaume.bouvignies@kurtosistech.com>pull/2777/head
parent
a60ec18237
commit
af5bd88aa8
@ -0,0 +1,2 @@ |
||||
pub(crate) mod builder; |
||||
pub(crate) mod processor; |
@ -0,0 +1,101 @@ |
||||
use std::{ |
||||
fmt::{Debug, Formatter}, |
||||
sync::Arc, |
||||
time::Duration, |
||||
}; |
||||
|
||||
use async_trait::async_trait; |
||||
use derive_new::new; |
||||
use eyre::Result; |
||||
use hyperlane_base::db::HyperlaneRocksDB; |
||||
use hyperlane_core::{HyperlaneDomain, MerkleTreeInsertion}; |
||||
use prometheus::IntGauge; |
||||
use tokio::sync::RwLock; |
||||
use tracing::debug; |
||||
|
||||
use crate::processor::ProcessorExt; |
||||
|
||||
use super::builder::MerkleTreeBuilder; |
||||
|
||||
/// Finds unprocessed merkle tree insertions and adds them to the prover sync
|
||||
#[derive(new)] |
||||
pub struct MerkleTreeProcessor { |
||||
db: HyperlaneRocksDB, |
||||
metrics: MerkleTreeProcessorMetrics, |
||||
prover_sync: Arc<RwLock<MerkleTreeBuilder>>, |
||||
#[new(default)] |
||||
leaf_index: u32, |
||||
} |
||||
|
||||
impl Debug for MerkleTreeProcessor { |
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { |
||||
write!( |
||||
f, |
||||
"MerkleTreeProcessor {{ leaf_index: {:?} }}", |
||||
self.leaf_index |
||||
) |
||||
} |
||||
} |
||||
|
||||
#[async_trait] |
||||
impl ProcessorExt for MerkleTreeProcessor { |
||||
/// The domain this processor is getting merkle tree hook insertions from.
|
||||
fn domain(&self) -> &HyperlaneDomain { |
||||
self.db.domain() |
||||
} |
||||
|
||||
/// One round of processing, extracted from infinite work loop for
|
||||
/// testing purposes.
|
||||
async fn tick(&mut self) -> Result<()> { |
||||
if let Some(insertion) = self.next_unprocessed_leaf()? { |
||||
// Feed the message to the prover sync
|
||||
self.prover_sync |
||||
.write() |
||||
.await |
||||
.ingest_message_id(insertion.message_id()) |
||||
.await?; |
||||
|
||||
// Increase the leaf index to move on to the next leaf
|
||||
self.leaf_index += 1; |
||||
} else { |
||||
tokio::time::sleep(Duration::from_secs(1)).await; |
||||
} |
||||
Ok(()) |
||||
} |
||||
} |
||||
|
||||
impl MerkleTreeProcessor { |
||||
fn next_unprocessed_leaf(&mut self) -> Result<Option<MerkleTreeInsertion>> { |
||||
let leaf = if let Some(insertion) = self |
||||
.db |
||||
.retrieve_merkle_tree_insertion_by_leaf_index(&self.leaf_index)? |
||||
{ |
||||
// Update the metrics
|
||||
self.metrics |
||||
.max_leaf_index_gauge |
||||
.set(insertion.index() as i64); |
||||
Some(insertion) |
||||
} else { |
||||
debug!(leaf_index=?self.leaf_index, "No message found in DB for leaf index"); |
||||
None |
||||
}; |
||||
Ok(leaf) |
||||
} |
||||
} |
||||
|
||||
#[derive(Debug)] |
||||
pub struct MerkleTreeProcessorMetrics { |
||||
max_leaf_index_gauge: IntGauge, |
||||
} |
||||
|
||||
impl MerkleTreeProcessorMetrics { |
||||
pub fn new() -> Self { |
||||
Self { |
||||
max_leaf_index_gauge: IntGauge::new( |
||||
"max_leaf_index_gauge", |
||||
"The max merkle tree leaf index", |
||||
) |
||||
.unwrap(), |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,37 @@ |
||||
use std::fmt::Debug; |
||||
|
||||
use async_trait::async_trait; |
||||
use derive_new::new; |
||||
use eyre::Result; |
||||
use hyperlane_core::HyperlaneDomain; |
||||
use tokio::task::JoinHandle; |
||||
use tracing::{info_span, instrument, instrument::Instrumented, Instrument}; |
||||
|
||||
#[async_trait] |
||||
pub trait ProcessorExt: Send + Debug { |
||||
/// The domain this processor is getting messages from.
|
||||
fn domain(&self) -> &HyperlaneDomain; |
||||
|
||||
/// One round of processing, extracted from infinite work loop for
|
||||
/// testing purposes.
|
||||
async fn tick(&mut self) -> Result<()>; |
||||
} |
||||
|
||||
#[derive(new)] |
||||
pub struct Processor { |
||||
ticker: Box<dyn ProcessorExt>, |
||||
} |
||||
|
||||
impl Processor { |
||||
pub fn spawn(self) -> Instrumented<JoinHandle<Result<()>>> { |
||||
let span = info_span!("MessageProcessor"); |
||||
tokio::spawn(async move { self.main_loop().await }).instrument(span) |
||||
} |
||||
|
||||
#[instrument(ret, err, skip(self), level = "info", fields(domain=%self.ticker.domain()))] |
||||
async fn main_loop(mut self) -> Result<()> { |
||||
loop { |
||||
self.ticker.tick().await?; |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,156 @@ |
||||
[ |
||||
{ |
||||
"inputs": [ |
||||
{ |
||||
"internalType": "address", |
||||
"name": "_mailbox", |
||||
"type": "address" |
||||
} |
||||
], |
||||
"stateMutability": "nonpayable", |
||||
"type": "constructor" |
||||
}, |
||||
{ |
||||
"anonymous": false, |
||||
"inputs": [ |
||||
{ |
||||
"indexed": false, |
||||
"internalType": "bytes32", |
||||
"name": "messageId", |
||||
"type": "bytes32" |
||||
}, |
||||
{ |
||||
"indexed": false, |
||||
"internalType": "uint32", |
||||
"name": "index", |
||||
"type": "uint32" |
||||
} |
||||
], |
||||
"name": "InsertedIntoTree", |
||||
"type": "event" |
||||
}, |
||||
{ |
||||
"inputs": [], |
||||
"name": "count", |
||||
"outputs": [ |
||||
{ |
||||
"internalType": "uint32", |
||||
"name": "", |
||||
"type": "uint32" |
||||
} |
||||
], |
||||
"stateMutability": "view", |
||||
"type": "function" |
||||
}, |
||||
{ |
||||
"inputs": [], |
||||
"name": "deployedBlock", |
||||
"outputs": [ |
||||
{ |
||||
"internalType": "uint256", |
||||
"name": "", |
||||
"type": "uint256" |
||||
} |
||||
], |
||||
"stateMutability": "view", |
||||
"type": "function" |
||||
}, |
||||
{ |
||||
"inputs": [], |
||||
"name": "latestCheckpoint", |
||||
"outputs": [ |
||||
{ |
||||
"internalType": "bytes32", |
||||
"name": "", |
||||
"type": "bytes32" |
||||
}, |
||||
{ |
||||
"internalType": "uint32", |
||||
"name": "", |
||||
"type": "uint32" |
||||
} |
||||
], |
||||
"stateMutability": "view", |
||||
"type": "function" |
||||
}, |
||||
{ |
||||
"inputs": [ |
||||
{ |
||||
"internalType": "bytes", |
||||
"name": "", |
||||
"type": "bytes" |
||||
}, |
||||
{ |
||||
"internalType": "bytes", |
||||
"name": "message", |
||||
"type": "bytes" |
||||
} |
||||
], |
||||
"name": "postDispatch", |
||||
"outputs": [], |
||||
"stateMutability": "payable", |
||||
"type": "function" |
||||
}, |
||||
{ |
||||
"inputs": [ |
||||
{ |
||||
"internalType": "bytes", |
||||
"name": "", |
||||
"type": "bytes" |
||||
}, |
||||
{ |
||||
"internalType": "bytes", |
||||
"name": "", |
||||
"type": "bytes" |
||||
} |
||||
], |
||||
"name": "quoteDispatch", |
||||
"outputs": [ |
||||
{ |
||||
"internalType": "uint256", |
||||
"name": "", |
||||
"type": "uint256" |
||||
} |
||||
], |
||||
"stateMutability": "pure", |
||||
"type": "function" |
||||
}, |
||||
{ |
||||
"inputs": [], |
||||
"name": "root", |
||||
"outputs": [ |
||||
{ |
||||
"internalType": "bytes32", |
||||
"name": "", |
||||
"type": "bytes32" |
||||
} |
||||
], |
||||
"stateMutability": "view", |
||||
"type": "function" |
||||
}, |
||||
{ |
||||
"inputs": [], |
||||
"name": "tree", |
||||
"outputs": [ |
||||
{ |
||||
"components": [ |
||||
{ |
||||
"internalType": "bytes32[32]", |
||||
"name": "branch", |
||||
"type": "bytes32[32]" |
||||
}, |
||||
{ |
||||
"internalType": "uint256", |
||||
"name": "count", |
||||
"type": "uint256" |
||||
} |
||||
], |
||||
"internalType": "struct MerkleLib.Tree", |
||||
"name": "", |
||||
"type": "tuple" |
||||
} |
||||
], |
||||
"stateMutability": "view", |
||||
"type": "function" |
||||
} |
||||
] |
@ -0,0 +1,282 @@ |
||||
#![allow(missing_docs)] |
||||
use std::num::NonZeroU64; |
||||
use std::ops::RangeInclusive; |
||||
use std::sync::Arc; |
||||
|
||||
use async_trait::async_trait; |
||||
use ethers::prelude::Middleware; |
||||
use ethers_core::types::BlockNumber; |
||||
use hyperlane_core::accumulator::incremental::IncrementalMerkle; |
||||
use tracing::instrument; |
||||
|
||||
use hyperlane_core::{ |
||||
ChainCommunicationError, ChainResult, Checkpoint, ContractLocator, HyperlaneChain, |
||||
HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexer, LogMeta, MerkleTreeHook, |
||||
MerkleTreeInsertion, SequenceIndexer, H256, |
||||
}; |
||||
|
||||
use crate::contracts::merkle_tree_hook::MerkleTreeHook as MerkleTreeHookContract; |
||||
use crate::trait_builder::BuildableWithProvider; |
||||
use crate::EthereumProvider; |
||||
|
||||
pub struct MerkleTreeHookBuilder {} |
||||
|
||||
#[async_trait] |
||||
impl BuildableWithProvider for MerkleTreeHookBuilder { |
||||
type Output = Box<dyn MerkleTreeHook>; |
||||
|
||||
async fn build_with_provider<M: Middleware + 'static>( |
||||
&self, |
||||
provider: M, |
||||
locator: &ContractLocator, |
||||
) -> Self::Output { |
||||
Box::new(EthereumMerkleTreeHook::new(Arc::new(provider), locator)) |
||||
} |
||||
} |
||||
|
||||
pub struct MerkleTreeHookIndexerBuilder { |
||||
pub finality_blocks: u32, |
||||
} |
||||
|
||||
#[async_trait] |
||||
impl BuildableWithProvider for MerkleTreeHookIndexerBuilder { |
||||
type Output = Box<dyn SequenceIndexer<MerkleTreeInsertion>>; |
||||
|
||||
async fn build_with_provider<M: Middleware + 'static>( |
||||
&self, |
||||
provider: M, |
||||
locator: &ContractLocator, |
||||
) -> Self::Output { |
||||
Box::new(EthereumMerkleTreeHookIndexer::new( |
||||
Arc::new(provider), |
||||
locator, |
||||
self.finality_blocks, |
||||
)) |
||||
} |
||||
} |
||||
|
||||
#[derive(Debug)] |
||||
/// Struct that retrieves event data for an Ethereum MerkleTreeHook
|
||||
pub struct EthereumMerkleTreeHookIndexer<M> |
||||
where |
||||
M: Middleware, |
||||
{ |
||||
contract: Arc<MerkleTreeHookContract<M>>, |
||||
provider: Arc<M>, |
||||
finality_blocks: u32, |
||||
} |
||||
|
||||
impl<M> EthereumMerkleTreeHookIndexer<M> |
||||
where |
||||
M: Middleware + 'static, |
||||
{ |
||||
/// Create new EthereumMerkleTreeHookIndexer
|
||||
pub fn new(provider: Arc<M>, locator: &ContractLocator, finality_blocks: u32) -> Self { |
||||
Self { |
||||
contract: Arc::new(MerkleTreeHookContract::new( |
||||
locator.address, |
||||
provider.clone(), |
||||
)), |
||||
provider, |
||||
finality_blocks, |
||||
} |
||||
} |
||||
} |
||||
|
||||
#[async_trait] |
||||
impl<M> Indexer<MerkleTreeInsertion> for EthereumMerkleTreeHookIndexer<M> |
||||
where |
||||
M: Middleware + 'static, |
||||
{ |
||||
#[instrument(err, skip(self))] |
||||
async fn fetch_logs( |
||||
&self, |
||||
range: RangeInclusive<u32>, |
||||
) -> ChainResult<Vec<(MerkleTreeInsertion, LogMeta)>> { |
||||
let events = self |
||||
.contract |
||||
.inserted_into_tree_filter() |
||||
.from_block(*range.start()) |
||||
.to_block(*range.end()) |
||||
.query_with_meta() |
||||
.await?; |
||||
|
||||
let logs = events |
||||
.into_iter() |
||||
.map(|(log, log_meta)| { |
||||
( |
||||
MerkleTreeInsertion::new(log.index, H256::from(log.message_id)), |
||||
log_meta.into(), |
||||
) |
||||
}) |
||||
.collect(); |
||||
Ok(logs) |
||||
} |
||||
|
||||
#[instrument(level = "debug", err, ret, skip(self))] |
||||
async fn get_finalized_block_number(&self) -> ChainResult<u32> { |
||||
Ok(self |
||||
.provider |
||||
.get_block_number() |
||||
.await |
||||
.map_err(ChainCommunicationError::from_other)? |
||||
.as_u32() |
||||
.saturating_sub(self.finality_blocks)) |
||||
} |
||||
} |
||||
|
||||
#[async_trait] |
||||
impl<M> SequenceIndexer<MerkleTreeInsertion> for EthereumMerkleTreeHookIndexer<M> |
||||
where |
||||
M: Middleware + 'static, |
||||
{ |
||||
async fn sequence_and_tip(&self) -> ChainResult<(Option<u32>, u32)> { |
||||
// The InterchainGasPaymasterIndexerBuilder must return a `SequenceIndexer` type.
|
||||
// It's fine if only a blanket implementation is provided for EVM chains, since their
|
||||
// indexing only uses the `Index` trait, which is a supertrait of `SequenceIndexer`.
|
||||
// TODO: if `SequenceIndexer` turns out to not depend on `Indexer` at all, then the supertrait
|
||||
// dependency could be removed, even if the builder would still need to return a type that is both
|
||||
// ``SequenceIndexer` and `Indexer`.
|
||||
let tip = self.get_finalized_block_number().await?; |
||||
Ok((None, tip)) |
||||
} |
||||
} |
||||
|
||||
/// A reference to a Mailbox contract on some Ethereum chain
|
||||
#[derive(Debug)] |
||||
pub struct EthereumMerkleTreeHook<M> |
||||
where |
||||
M: Middleware, |
||||
{ |
||||
contract: Arc<MerkleTreeHookContract<M>>, |
||||
domain: HyperlaneDomain, |
||||
provider: Arc<M>, |
||||
} |
||||
|
||||
impl<M> EthereumMerkleTreeHook<M> |
||||
where |
||||
M: Middleware, |
||||
{ |
||||
/// Create a reference to a mailbox at a specific Ethereum address on some
|
||||
/// chain
|
||||
pub fn new(provider: Arc<M>, locator: &ContractLocator) -> Self { |
||||
Self { |
||||
contract: Arc::new(MerkleTreeHookContract::new( |
||||
locator.address, |
||||
provider.clone(), |
||||
)), |
||||
domain: locator.domain.clone(), |
||||
provider, |
||||
} |
||||
} |
||||
} |
||||
|
||||
impl<M> HyperlaneChain for EthereumMerkleTreeHook<M> |
||||
where |
||||
M: Middleware + 'static, |
||||
{ |
||||
fn domain(&self) -> &HyperlaneDomain { |
||||
&self.domain |
||||
} |
||||
|
||||
fn provider(&self) -> Box<dyn HyperlaneProvider> { |
||||
Box::new(EthereumProvider::new( |
||||
self.provider.clone(), |
||||
self.domain.clone(), |
||||
)) |
||||
} |
||||
} |
||||
|
||||
impl<M> HyperlaneContract for EthereumMerkleTreeHook<M> |
||||
where |
||||
M: Middleware + 'static, |
||||
{ |
||||
fn address(&self) -> H256 { |
||||
self.contract.address().into() |
||||
} |
||||
} |
||||
|
||||
#[async_trait] |
||||
impl<M> MerkleTreeHook for EthereumMerkleTreeHook<M> |
||||
where |
||||
M: Middleware + 'static, |
||||
{ |
||||
#[instrument(skip(self))] |
||||
async fn latest_checkpoint(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> { |
||||
let lag = maybe_lag.map(|v| v.get()).unwrap_or(0).into(); |
||||
|
||||
let fixed_block_number: BlockNumber = self |
||||
.provider |
||||
.get_block_number() |
||||
.await |
||||
.map_err(ChainCommunicationError::from_other)? |
||||
.saturating_sub(lag) |
||||
.into(); |
||||
|
||||
let (root, index) = self |
||||
.contract |
||||
.latest_checkpoint() |
||||
.block(fixed_block_number) |
||||
.call() |
||||
.await?; |
||||
Ok(Checkpoint { |
||||
merkle_tree_hook_address: self.address(), |
||||
mailbox_domain: self.domain.id(), |
||||
root: root.into(), |
||||
index, |
||||
}) |
||||
} |
||||
|
||||
#[instrument(skip(self))] |
||||
#[allow(clippy::needless_range_loop)] |
||||
async fn tree(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> { |
||||
let lag = maybe_lag.map(|v| v.get()).unwrap_or(0).into(); |
||||
|
||||
let fixed_block_number: BlockNumber = self |
||||
.provider |
||||
.get_block_number() |
||||
.await |
||||
.map_err(ChainCommunicationError::from_other)? |
||||
.saturating_sub(lag) |
||||
.into(); |
||||
|
||||
// TODO: implement From<Tree> for IncrementalMerkle
|
||||
let raw_tree = self |
||||
.contract |
||||
.tree() |
||||
.block(fixed_block_number) |
||||
.call() |
||||
.await?; |
||||
let branch = raw_tree |
||||
.branch |
||||
.iter() |
||||
.map(|v| v.into()) |
||||
.collect::<Vec<_>>() |
||||
.try_into() |
||||
.unwrap(); |
||||
|
||||
let tree = IncrementalMerkle::new(branch, raw_tree.count.as_usize()); |
||||
|
||||
Ok(tree) |
||||
} |
||||
|
||||
#[instrument(skip(self))] |
||||
async fn count(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<u32> { |
||||
let lag = maybe_lag.map(|v| v.get()).unwrap_or(0).into(); |
||||
let fixed_block_number: BlockNumber = self |
||||
.provider |
||||
.get_block_number() |
||||
.await |
||||
.map_err(ChainCommunicationError::from_other)? |
||||
.saturating_sub(lag) |
||||
.into(); |
||||
|
||||
let count = self |
||||
.contract |
||||
.count() |
||||
.block(fixed_block_number) |
||||
.call() |
||||
.await?; |
||||
Ok(count) |
||||
} |
||||
} |
@ -0,0 +1,101 @@ |
||||
use std::{num::NonZeroU64, ops::RangeInclusive}; |
||||
|
||||
use async_trait::async_trait; |
||||
use derive_new::new; |
||||
use hyperlane_core::{ |
||||
accumulator::incremental::IncrementalMerkle, ChainCommunicationError, ChainResult, Checkpoint, |
||||
Indexer, LogMeta, MerkleTreeHook, MerkleTreeInsertion, SequenceIndexer, |
||||
}; |
||||
use hyperlane_sealevel_mailbox::accounts::OutboxAccount; |
||||
use solana_sdk::commitment_config::CommitmentConfig; |
||||
use tracing::instrument; |
||||
|
||||
use crate::SealevelMailbox; |
||||
|
||||
#[async_trait] |
||||
impl MerkleTreeHook for SealevelMailbox { |
||||
#[instrument(err, ret, skip(self))] |
||||
async fn tree(&self, lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> { |
||||
assert!( |
||||
lag.is_none(), |
||||
"Sealevel does not support querying point-in-time" |
||||
); |
||||
|
||||
let outbox_account = self |
||||
.rpc_client |
||||
.get_account_with_commitment(&self.outbox.0, CommitmentConfig::finalized()) |
||||
.await |
||||
.map_err(ChainCommunicationError::from_other)? |
||||
.value |
||||
.ok_or_else(|| { |
||||
ChainCommunicationError::from_other_str("Could not find account data") |
||||
})?; |
||||
let outbox = OutboxAccount::fetch(&mut outbox_account.data.as_ref()) |
||||
.map_err(ChainCommunicationError::from_other)? |
||||
.into_inner(); |
||||
|
||||
Ok(outbox.tree) |
||||
} |
||||
|
||||
#[instrument(err, ret, skip(self))] |
||||
async fn latest_checkpoint(&self, lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> { |
||||
assert!( |
||||
lag.is_none(), |
||||
"Sealevel does not support querying point-in-time" |
||||
); |
||||
|
||||
let tree = self.tree(lag).await?; |
||||
|
||||
let root = tree.root(); |
||||
let count: u32 = tree |
||||
.count() |
||||
.try_into() |
||||
.map_err(ChainCommunicationError::from_other)?; |
||||
let index = count.checked_sub(1).ok_or_else(|| { |
||||
ChainCommunicationError::from_contract_error_str( |
||||
"Outbox is empty, cannot compute checkpoint", |
||||
) |
||||
})?; |
||||
let checkpoint = Checkpoint { |
||||
merkle_tree_hook_address: self.program_id.to_bytes().into(), |
||||
mailbox_domain: self.domain.id(), |
||||
root, |
||||
index, |
||||
}; |
||||
Ok(checkpoint) |
||||
} |
||||
|
||||
#[instrument(err, ret, skip(self))] |
||||
async fn count(&self, _maybe_lag: Option<NonZeroU64>) -> ChainResult<u32> { |
||||
let tree = self.tree(_maybe_lag).await?; |
||||
|
||||
tree.count() |
||||
.try_into() |
||||
.map_err(ChainCommunicationError::from_other) |
||||
} |
||||
} |
||||
|
||||
/// Struct that retrieves event data for a Sealevel merkle tree hook contract
|
||||
#[derive(Debug, new)] |
||||
pub struct SealevelMerkleTreeHookIndexer {} |
||||
|
||||
#[async_trait] |
||||
impl Indexer<MerkleTreeInsertion> for SealevelMerkleTreeHookIndexer { |
||||
async fn fetch_logs( |
||||
&self, |
||||
_range: RangeInclusive<u32>, |
||||
) -> ChainResult<Vec<(MerkleTreeInsertion, LogMeta)>> { |
||||
Ok(vec![]) |
||||
} |
||||
|
||||
async fn get_finalized_block_number(&self) -> ChainResult<u32> { |
||||
Ok(0) |
||||
} |
||||
} |
||||
|
||||
#[async_trait] |
||||
impl SequenceIndexer<MerkleTreeInsertion> for SealevelMerkleTreeHookIndexer { |
||||
async fn sequence_and_tip(&self) -> ChainResult<(Option<u32>, u32)> { |
||||
Ok((None, 0)) |
||||
} |
||||
} |
@ -1,44 +0,0 @@ |
||||
# Hyperlane-Agent Helm Chart |
||||
|
||||
![Version: 0.1.0](https://img.shields.io/badge/Version-0.1.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 0.1.0](https://img.shields.io/badge/AppVersion-0.1.0-informational?style=flat-square) |
||||
|
||||
A Helm Chart that encapsulates the deployment of the Hyperlane Rust Agent(s). It is currently designed to be deployed against a Google Kubernetes Engine cluster, but specification of another PVC Storage Class should be sufficient to make it compatible with other cloud providers. |
||||
|
||||
Additional documentation is present in comments in `yalues.yaml`. |
||||
|
||||
## Values |
||||
|
||||
| Key | Type | Default | Description | |
||||
| -------------------------------------- | ------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | |
||||
| affinity | object | `{}` | | |
||||
| fullnameOverride | string | `""` | | |
||||
| image.pullPolicy | string | `"Always"` | | |
||||
| image.repository | string | `"gcr.io/clabs-optics/optics-agent"` | Main repository for Hyperlane Agent binaries, provided by cLabs | |
||||
| image.tag | string | `"latest"` | Overrides the image tag whose default is the chart appVersion. | |
||||
| imagePullSecrets | list | `[]` | | |
||||
| nameOverride | string | `""` | | |
||||
| nodeSelector | object | `{}` | | |
||||
| hyperlane | object | `{"outboxChain":{"address":null,"connectionType":null,"connectionUrl":null,"domain":null,"name":"goerli","protocol":null},enabled":false,"messageInterval":null,"signers":[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}]},"processor":{"enabled":false,"pollingInterval":null,"signers":[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}]},"relayer":{"enabled":false,"pollingInterval":null,"signers":[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}]},"inboxChains":[{"address":null,"connectionType":null,"connectionUrl":null,"domain":null,"name":"alfajores","protocol":null}],"runEnv":"default","validator":{"signer":"","enabled":false,"pollingInterval":null,"signers":[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}],"updatePause":null}}` | Hyperlane Overrides By Default, Hyperlane Agents load the config baked into the Docker Image Pass values here in order to override the values in the config Note: For successful operation, one _must_ pass signer keys as they are not baked into the image for security reasons. | |
||||
| hyperlane.outboxChain.address | string | `nil` | The contract address for the home contract | |
||||
| hyperlane.outboxChain.connectionUrl | string | `nil` | Connection string pointing to an RPC endpoint for the home chain | |
||||
| hyperlane.outboxChain.domain | string | `nil` | The hard-coded domain corresponding to this blockchain | |
||||
| hyperlane.outboxChain.protocol | string | `nil` | RPC Style | |
||||
| hyperlane.relayer.enabled | bool | `false` | Enables or disables the relayer | |
||||
| hyperlane.inboxChains | list | `[{"address":null,"connectionType":null,"connectionUrl":null,"domain":null,"name":"alfajores","protocol":null}]` | Replica chain overrides, a sequence | |
||||
| hyperlane.inboxChains[0].address | string | `nil` | The contract address for the replica contract | |
||||
| hyperlane.inboxChains[0].connectionUrl | string | `nil` | Connection string pointing to an RPC endpoint for the replica chain | |
||||
| hyperlane.validator.signer | string | `""` | Specialized key used by validator and watcher used to sign attestations, separate from validator.keys | |
||||
| hyperlane.validator.enabled | bool | `false` | Enables or disables the validator | |
||||
| hyperlane.validator.pollingInterval | string | `nil` | How long to wait between checking for updates | |
||||
| hyperlane.validator.signers | list | `[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}]` | Trnsaction Signing keys for home and replica(s) | |
||||
| podAnnotations | object | `{}` | | |
||||
| podSecurityContext | object | `{}` | | |
||||
| replicaCount | int | `1` | | |
||||
| resources | object | `{}` | | |
||||
| securityContext | object | `{}` | | |
||||
| tolerations | list | `[]` | | |
||||
| volumeStorageClass | string | `"standard"` | Default to standard storageclass provided by GKE | |
||||
|
||||
--- |
||||
|
||||
Autogenerated from chart metadata using [helm-docs v1.5.0](https://github.com/norwoodj/helm-docs/releases/v1.5.0) |
@ -1,435 +0,0 @@ |
||||
//! This module is responsible for parsing the agent's settings using the old config format.
|
||||
|
||||
// TODO: Remove this module once we have finished migrating to the new format.
|
||||
|
||||
use std::{ |
||||
collections::{HashMap, HashSet}, |
||||
path::PathBuf, |
||||
}; |
||||
|
||||
use ethers_prometheus::middleware::PrometheusMiddlewareConf; |
||||
use eyre::{eyre, Context}; |
||||
use hyperlane_core::{cfg_unwrap_all, config::*, utils::hex_or_base58_to_h256, HyperlaneDomain}; |
||||
use serde::Deserialize; |
||||
|
||||
use super::envs::*; |
||||
use crate::settings::{ |
||||
chains::IndexSettings, trace::TracingConfig, ChainConf, ChainConnectionConf, |
||||
CheckpointSyncerConf, CoreContractAddresses, Settings, SignerConf, |
||||
}; |
||||
|
||||
/// Raw base settings.
|
||||
#[derive(Debug, Deserialize)] |
||||
#[serde(rename_all = "camelCase")] |
||||
pub struct DeprecatedRawSettings { |
||||
chains: Option<HashMap<String, DeprecatedRawChainConf>>, |
||||
defaultsigner: Option<DeprecatedRawSignerConf>, |
||||
metrics: Option<StrOrInt>, |
||||
tracing: Option<TracingConfig>, |
||||
} |
||||
|
||||
impl FromRawConf<DeprecatedRawSettings, Option<&HashSet<&str>>> for Settings { |
||||
fn from_config_filtered( |
||||
raw: DeprecatedRawSettings, |
||||
cwp: &ConfigPath, |
||||
filter: Option<&HashSet<&str>>, |
||||
) -> Result<Self, ConfigParsingError> { |
||||
let mut err = ConfigParsingError::default(); |
||||
let chains: HashMap<String, ChainConf> = if let Some(mut chains) = raw.chains { |
||||
let default_signer: Option<SignerConf> = raw.defaultsigner.and_then(|r| { |
||||
r.parse_config(&cwp.join("defaultsigner")) |
||||
.take_config_err(&mut err) |
||||
}); |
||||
if let Some(filter) = filter { |
||||
chains.retain(|k, _| filter.contains(&k.as_str())); |
||||
} |
||||
let chains_path = cwp + "chains"; |
||||
chains |
||||
.into_iter() |
||||
.map(|(k, v)| { |
||||
let cwp = &chains_path + &k; |
||||
let k = k.to_ascii_lowercase(); |
||||
let mut parsed: ChainConf = v.parse_config(&cwp)?; |
||||
if let Some(default_signer) = &default_signer { |
||||
parsed.signer.get_or_insert_with(|| default_signer.clone()); |
||||
} |
||||
Ok((k, parsed)) |
||||
}) |
||||
.filter_map(|res| match res { |
||||
Ok((k, v)) => Some((k, v)), |
||||
Err(e) => { |
||||
err.merge(e); |
||||
None |
||||
} |
||||
}) |
||||
.collect() |
||||
} else { |
||||
Default::default() |
||||
}; |
||||
let tracing = raw.tracing.unwrap_or_default(); |
||||
let metrics = raw |
||||
.metrics |
||||
.and_then(|port| port.try_into().take_err(&mut err, || cwp + "metrics")) |
||||
.unwrap_or(9090); |
||||
|
||||
err.into_result(Self { |
||||
chains, |
||||
metrics_port: metrics, |
||||
tracing, |
||||
}) |
||||
} |
||||
} |
||||
|
||||
#[derive(Deserialize, Debug)] |
||||
#[serde(tag = "protocol", content = "connection", rename_all = "camelCase")] |
||||
enum DeprecatedRawChainConnectionConf { |
||||
Ethereum(h_eth::RawConnectionConf), |
||||
Fuel(h_fuel::DeprecatedRawConnectionConf), |
||||
Sealevel(h_sealevel::DeprecatedRawConnectionConf), |
||||
#[serde(other)] |
||||
Unknown, |
||||
} |
||||
|
||||
impl FromRawConf<DeprecatedRawChainConnectionConf> for ChainConnectionConf { |
||||
fn from_config_filtered( |
||||
raw: DeprecatedRawChainConnectionConf, |
||||
cwp: &ConfigPath, |
||||
_filter: (), |
||||
) -> ConfigResult<Self> { |
||||
use DeprecatedRawChainConnectionConf::*; |
||||
match raw { |
||||
Ethereum(r) => Ok(Self::Ethereum(r.parse_config(&cwp.join("connection"))?)), |
||||
Fuel(r) => Ok(Self::Fuel(r.parse_config(&cwp.join("connection"))?)), |
||||
Sealevel(r) => Ok(Self::Sealevel(r.parse_config(&cwp.join("connection"))?)), |
||||
Unknown => { |
||||
Err(eyre!("Unknown chain protocol")).into_config_result(|| cwp.join("protocol")) |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
#[derive(Debug, Deserialize)] |
||||
#[serde(rename_all = "camelCase")] |
||||
struct DeprecatedRawCoreContractAddresses { |
||||
mailbox: Option<String>, |
||||
interchain_gas_paymaster: Option<String>, |
||||
validator_announce: Option<String>, |
||||
} |
||||
|
||||
impl FromRawConf<DeprecatedRawCoreContractAddresses> for CoreContractAddresses { |
||||
fn from_config_filtered( |
||||
raw: DeprecatedRawCoreContractAddresses, |
||||
cwp: &ConfigPath, |
||||
_filter: (), |
||||
) -> ConfigResult<Self> { |
||||
let mut err = ConfigParsingError::default(); |
||||
|
||||
macro_rules! parse_addr { |
||||
($name:ident) => { |
||||
let $name = raw |
||||
.$name |
||||
.ok_or_else(|| { |
||||
eyre!( |
||||
"Missing {} core contract address", |
||||
stringify!($name).replace('_', " ") |
||||
) |
||||
}) |
||||
.take_err(&mut err, || cwp + stringify!($name)) |
||||
.and_then(|v| { |
||||
hex_or_base58_to_h256(&v).take_err(&mut err, || cwp + stringify!($name)) |
||||
}); |
||||
}; |
||||
} |
||||
|
||||
parse_addr!(mailbox); |
||||
parse_addr!(interchain_gas_paymaster); |
||||
parse_addr!(validator_announce); |
||||
|
||||
cfg_unwrap_all!(cwp, err: [mailbox, interchain_gas_paymaster, validator_announce]); |
||||
|
||||
err.into_result(Self { |
||||
mailbox, |
||||
interchain_gas_paymaster, |
||||
validator_announce, |
||||
}) |
||||
} |
||||
} |
||||
|
||||
#[derive(Debug, Deserialize)] |
||||
#[serde(rename_all = "camelCase")] |
||||
struct DeprecatedRawIndexSettings { |
||||
from: Option<StrOrInt>, |
||||
chunk: Option<StrOrInt>, |
||||
mode: Option<String>, |
||||
} |
||||
|
||||
impl FromRawConf<DeprecatedRawIndexSettings> for IndexSettings { |
||||
fn from_config_filtered( |
||||
raw: DeprecatedRawIndexSettings, |
||||
cwp: &ConfigPath, |
||||
_filter: (), |
||||
) -> ConfigResult<Self> { |
||||
let mut err = ConfigParsingError::default(); |
||||
|
||||
let from = raw |
||||
.from |
||||
.and_then(|v| v.try_into().take_err(&mut err, || cwp + "from")) |
||||
.unwrap_or_default(); |
||||
|
||||
let chunk_size = raw |
||||
.chunk |
||||
.and_then(|v| v.try_into().take_err(&mut err, || cwp + "chunk")) |
||||
.unwrap_or(1999); |
||||
|
||||
let mode = raw |
||||
.mode |
||||
.map(serde_json::Value::from) |
||||
.and_then(|m| { |
||||
serde_json::from_value(m) |
||||
.context("Invalid mode") |
||||
.take_err(&mut err, || cwp + "mode") |
||||
}) |
||||
.unwrap_or_default(); |
||||
|
||||
err.into_result(Self { |
||||
from, |
||||
chunk_size, |
||||
mode, |
||||
}) |
||||
} |
||||
} |
||||
|
||||
/// A raw chain setup is a domain ID, an address on that chain (where the
|
||||
/// mailbox is deployed) and details for connecting to the chain API.
|
||||
#[derive(Debug, Deserialize)] |
||||
#[serde(rename_all = "camelCase")] |
||||
pub struct DeprecatedRawChainConf { |
||||
name: Option<String>, |
||||
domain: Option<StrOrInt>, |
||||
pub(super) signer: Option<DeprecatedRawSignerConf>, |
||||
finality_blocks: Option<StrOrInt>, |
||||
addresses: Option<DeprecatedRawCoreContractAddresses>, |
||||
#[serde(flatten, default)] |
||||
connection: Option<DeprecatedRawChainConnectionConf>, |
||||
// TODO: if people actually use the metrics conf we should also add a raw form.
|
||||
#[serde(default)] |
||||
metrics_conf: Option<PrometheusMiddlewareConf>, |
||||
#[serde(default)] |
||||
index: Option<DeprecatedRawIndexSettings>, |
||||
} |
||||
|
||||
impl FromRawConf<DeprecatedRawChainConf> for ChainConf { |
||||
fn from_config_filtered( |
||||
raw: DeprecatedRawChainConf, |
||||
cwp: &ConfigPath, |
||||
_filter: (), |
||||
) -> ConfigResult<Self> { |
||||
let mut err = ConfigParsingError::default(); |
||||
|
||||
let connection = raw |
||||
.connection |
||||
.ok_or_else(|| eyre!("Missing `connection` configuration")) |
||||
.take_err(&mut err, || cwp + "connection") |
||||
.and_then(|r| r.parse_config(cwp).take_config_err(&mut err)); |
||||
|
||||
let domain = connection.as_ref().and_then(|c: &ChainConnectionConf| { |
||||
let protocol = c.protocol(); |
||||
let domain_id = raw |
||||
.domain |
||||
.ok_or_else(|| eyre!("Missing `domain` configuration")) |
||||
.take_err(&mut err, || cwp + "domain") |
||||
.and_then(|r| { |
||||
r.try_into() |
||||
.context("Invalid domain id, expected integer") |
||||
.take_err(&mut err, || cwp + "domain") |
||||
}); |
||||
let name = raw |
||||
.name |
||||
.as_deref() |
||||
.ok_or_else(|| eyre!("Missing domain `name` configuration")) |
||||
.take_err(&mut err, || cwp + "name"); |
||||
HyperlaneDomain::from_config(domain_id?, name?, protocol) |
||||
.take_err(&mut err, || cwp.clone()) |
||||
}); |
||||
|
||||
let addresses = raw |
||||
.addresses |
||||
.ok_or_else(|| eyre!("Missing `addresses` configuration for core contracts")) |
||||
.take_err(&mut err, || cwp + "addresses") |
||||
.and_then(|v| { |
||||
v.parse_config(&cwp.join("addresses")) |
||||
.take_config_err(&mut err) |
||||
}); |
||||
|
||||
let signer = raw.signer.and_then(|v| -> Option<SignerConf> { |
||||
v.parse_config(&cwp.join("signer")) |
||||
.take_config_err(&mut err) |
||||
}); |
||||
|
||||
let finality_blocks = raw |
||||
.finality_blocks |
||||
.and_then(|v| { |
||||
v.try_into() |
||||
.context("Invalid `finalityBlocks`, expected integer") |
||||
.take_err(&mut err, || cwp + "finality_blocks") |
||||
}) |
||||
.unwrap_or(0); |
||||
|
||||
let index = raw |
||||
.index |
||||
.and_then(|v| v.parse_config(&cwp.join("index")).take_config_err(&mut err)) |
||||
.unwrap_or_default(); |
||||
|
||||
let metrics_conf = raw.metrics_conf.unwrap_or_default(); |
||||
|
||||
cfg_unwrap_all!(cwp, err: [connection, domain, addresses]); |
||||
|
||||
err.into_result(Self { |
||||
connection, |
||||
domain, |
||||
addresses, |
||||
signer, |
||||
finality_blocks, |
||||
index, |
||||
metrics_conf, |
||||
}) |
||||
} |
||||
} |
||||
|
||||
/// Raw signer types
|
||||
#[derive(Debug, Deserialize, Default)] |
||||
#[serde(rename_all = "camelCase")] |
||||
pub struct DeprecatedRawSignerConf { |
||||
#[serde(rename = "type")] |
||||
signer_type: Option<String>, |
||||
key: Option<String>, |
||||
id: Option<String>, |
||||
region: Option<String>, |
||||
} |
||||
|
||||
/// Raw checkpoint syncer types
|
||||
#[derive(Debug, Deserialize)] |
||||
#[serde(tag = "type", rename_all = "camelCase")] |
||||
pub enum DeprecatedRawCheckpointSyncerConf { |
||||
/// A local checkpoint syncer
|
||||
LocalStorage { |
||||
/// Path
|
||||
path: Option<String>, |
||||
}, |
||||
/// A checkpoint syncer on S3
|
||||
S3 { |
||||
/// Bucket name
|
||||
bucket: Option<String>, |
||||
/// S3 Region
|
||||
region: Option<String>, |
||||
/// Folder name inside bucket - defaults to the root of the bucket
|
||||
folder: Option<String>, |
||||
}, |
||||
/// Unknown checkpoint syncer type was specified
|
||||
#[serde(other)] |
||||
Unknown, |
||||
} |
||||
|
||||
impl FromRawConf<DeprecatedRawSignerConf> for SignerConf { |
||||
fn from_config_filtered( |
||||
raw: DeprecatedRawSignerConf, |
||||
cwp: &ConfigPath, |
||||
_filter: (), |
||||
) -> ConfigResult<Self> { |
||||
let key_path = || cwp + "key"; |
||||
let region_path = || cwp + "region"; |
||||
|
||||
match raw.signer_type.as_deref() { |
||||
Some("hexKey") => Ok(Self::HexKey { |
||||
key: raw |
||||
.key |
||||
.ok_or_else(|| eyre!("Missing `key` for HexKey signer")) |
||||
.into_config_result(key_path)? |
||||
.parse() |
||||
.into_config_result(key_path)?, |
||||
}), |
||||
Some("aws") => Ok(Self::Aws { |
||||
id: raw |
||||
.id |
||||
.ok_or_else(|| eyre!("Missing `id` for Aws signer")) |
||||
.into_config_result(|| cwp + "id")?, |
||||
region: raw |
||||
.region |
||||
.ok_or_else(|| eyre!("Missing `region` for Aws signer")) |
||||
.into_config_result(region_path)? |
||||
.parse() |
||||
.into_config_result(region_path)?, |
||||
}), |
||||
Some(t) => Err(eyre!("Unknown signer type `{t}`")).into_config_result(|| cwp + "type"), |
||||
None if raw.key.is_some() => Ok(Self::HexKey { |
||||
key: raw.key.unwrap().parse().into_config_result(key_path)?, |
||||
}), |
||||
None if raw.id.is_some() | raw.region.is_some() => Ok(Self::Aws { |
||||
id: raw |
||||
.id |
||||
.ok_or_else(|| eyre!("Missing `id` for Aws signer")) |
||||
.into_config_result(|| cwp + "id")?, |
||||
region: raw |
||||
.region |
||||
.ok_or_else(|| eyre!("Missing `region` for Aws signer")) |
||||
.into_config_result(region_path)? |
||||
.parse() |
||||
.into_config_result(region_path)?, |
||||
}), |
||||
None => Ok(Self::Node), |
||||
} |
||||
} |
||||
} |
||||
|
||||
impl FromRawConf<DeprecatedRawCheckpointSyncerConf> for CheckpointSyncerConf { |
||||
fn from_config_filtered( |
||||
raw: DeprecatedRawCheckpointSyncerConf, |
||||
cwp: &ConfigPath, |
||||
_filter: (), |
||||
) -> ConfigResult<Self> { |
||||
match raw { |
||||
DeprecatedRawCheckpointSyncerConf::LocalStorage { path } => { |
||||
let path: PathBuf = path |
||||
.ok_or_else(|| eyre!("Missing `path` for LocalStorage checkpoint syncer")) |
||||
.into_config_result(|| cwp + "path")? |
||||
.parse() |
||||
.into_config_result(|| cwp + "path")?; |
||||
if !path.exists() { |
||||
std::fs::create_dir_all(&path) |
||||
.with_context(|| { |
||||
format!( |
||||
"Failed to create local checkpoint syncer storage directory at {:?}", |
||||
path |
||||
) |
||||
}) |
||||
.into_config_result(|| cwp + "path")?; |
||||
} else if !path.is_dir() { |
||||
Err(eyre!( |
||||
"LocalStorage checkpoint syncer path is not a directory" |
||||
)) |
||||
.into_config_result(|| cwp + "path")?; |
||||
} |
||||
Ok(Self::LocalStorage { path }) |
||||
} |
||||
DeprecatedRawCheckpointSyncerConf::S3 { |
||||
bucket, |
||||
folder, |
||||
region, |
||||
} => Ok(Self::S3 { |
||||
bucket: bucket |
||||
.ok_or_else(|| eyre!("Missing `bucket` for S3 checkpoint syncer")) |
||||
.into_config_result(|| cwp + "bucket")?, |
||||
folder, |
||||
region: region |
||||
.ok_or_else(|| eyre!("Missing `region` for S3 checkpoint syncer")) |
||||
.into_config_result(|| cwp + "region")? |
||||
.parse() |
||||
.into_config_result(|| cwp + "region")?, |
||||
}), |
||||
DeprecatedRawCheckpointSyncerConf::Unknown => { |
||||
Err(eyre!("Missing `type` for checkpoint syncer")) |
||||
.into_config_result(|| cwp + "type") |
||||
} |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,66 @@ |
||||
use std::fmt::Debug; |
||||
|
||||
use config::{ConfigError, Map, Source, Value, ValueKind}; |
||||
use convert_case::{Case, Casing}; |
||||
use derive_new::new; |
||||
use itertools::Itertools; |
||||
|
||||
#[derive(Clone, Debug, new)] |
||||
pub struct CaseAdapter<S> { |
||||
inner: S, |
||||
casing: Case, |
||||
} |
||||
|
||||
impl<S> Source for CaseAdapter<S> |
||||
where |
||||
S: Source + Clone + Send + Sync + 'static, |
||||
{ |
||||
fn clone_into_box(&self) -> Box<dyn Source + Send + Sync> { |
||||
Box::new(self.clone()) |
||||
} |
||||
|
||||
fn collect(&self) -> Result<Map<String, Value>, ConfigError> { |
||||
self.inner.collect().map(|m| { |
||||
m.into_iter() |
||||
.map(|(k, v)| recase_pair(k, v, self.casing)) |
||||
.collect() |
||||
}) |
||||
} |
||||
} |
||||
|
||||
fn recase_pair(key: String, mut val: Value, case: Case) -> (String, Value) { |
||||
let key = split_and_recase_key(".", Some(case), key); |
||||
match &mut val.kind { |
||||
ValueKind::Table(table) => { |
||||
let tmp = table |
||||
.drain() |
||||
.map(|(k, v)| recase_pair(k, v, case)) |
||||
.collect_vec(); |
||||
table.extend(tmp.into_iter()); |
||||
} |
||||
ValueKind::Array(ary) => { |
||||
let tmp = ary |
||||
.drain(..) |
||||
.map(|v| recase_pair(String::new(), v, case).1) |
||||
.collect_vec(); |
||||
ary.extend(tmp.into_iter()) |
||||
} |
||||
_ => {} |
||||
} |
||||
(key, val) |
||||
} |
||||
|
||||
/// Load a settings object from the config locations and re-join the components with the standard
|
||||
/// `config` crate separator `.`.
|
||||
fn split_and_recase_key(sep: &str, case: Option<Case>, key: String) -> String { |
||||
if let Some(case) = case { |
||||
// if case is given, replace case of each key component and separate them with `.`
|
||||
key.split(sep).map(|s| s.to_case(case)).join(".") |
||||
} else if !sep.is_empty() && sep != "." { |
||||
// Just standardize the separator to `.`
|
||||
key.replace(sep, ".") |
||||
} else { |
||||
// no changes needed if there was no separator defined and we are preserving case.
|
||||
key |
||||
} |
||||
} |
@ -1,343 +0,0 @@ |
||||
// TODO: Remove this file after deprecated config parsing has been removed.
|
||||
|
||||
use std::ffi::{OsStr, OsString}; |
||||
|
||||
use config::{ConfigError, Map, Source, Value, ValueKind}; |
||||
use convert_case::Case; |
||||
|
||||
use crate::settings::loader::split_and_recase_key; |
||||
|
||||
/// A source for loading configuration from command line arguments.
|
||||
/// Command line argument keys are case-insensitive, and the following forms are
|
||||
/// supported:
|
||||
///
|
||||
/// * `--key=value`
|
||||
/// * `--key="value"`
|
||||
/// * `--key='value'`
|
||||
/// * `--key value`
|
||||
/// * `--key` (value is an empty string)
|
||||
#[must_use] |
||||
#[derive(Clone, Debug, Default)] |
||||
pub struct DeprecatedCommandLineArguments { |
||||
/// Optional character sequence that separates each key segment in an
|
||||
/// environment key pattern. Consider a nested configuration such as
|
||||
/// `redis.password`, a separator of `-` would allow an environment key
|
||||
/// of `redis-password` to match.
|
||||
separator: Option<String>, |
||||
|
||||
/// Ignore empty env values (treat as unset).
|
||||
ignore_empty: bool, |
||||
|
||||
/// Alternate source for the environment. This can be used when you want to
|
||||
/// test your own code using this source, without the need to change the
|
||||
/// actual system environment variables.
|
||||
source: Option<Vec<OsString>>, |
||||
} |
||||
|
||||
#[allow(unused)] |
||||
impl DeprecatedCommandLineArguments { |
||||
pub fn separator(mut self, s: &str) -> Self { |
||||
self.separator = Some(s.into()); |
||||
self |
||||
} |
||||
|
||||
pub fn ignore_empty(mut self, ignore: bool) -> Self { |
||||
self.ignore_empty = ignore; |
||||
self |
||||
} |
||||
|
||||
pub fn source<I, S>(mut self, source: I) -> Self |
||||
where |
||||
I: IntoIterator<Item = S>, |
||||
S: AsRef<OsStr>, |
||||
{ |
||||
self.source = Some(source.into_iter().map(|s| s.as_ref().to_owned()).collect()); |
||||
self |
||||
} |
||||
} |
||||
|
||||
impl Source for DeprecatedCommandLineArguments { |
||||
fn clone_into_box(&self) -> Box<dyn Source + Send + Sync> { |
||||
Box::new((*self).clone()) |
||||
} |
||||
|
||||
fn collect(&self) -> Result<Map<String, Value>, ConfigError> { |
||||
let mut m = Map::new(); |
||||
let uri: String = "program argument".into(); |
||||
|
||||
let separator = self.separator.as_deref().unwrap_or("-"); |
||||
|
||||
let mut args = if let Some(source) = &self.source { |
||||
ArgumentParser::from_vec(source.clone()) |
||||
} else { |
||||
ArgumentParser::from_env() |
||||
}; |
||||
|
||||
while let Some((key, value)) = args |
||||
.next() |
||||
.transpose() |
||||
.map_err(|e| ConfigError::Foreign(Box::new(e)))? |
||||
{ |
||||
if self.ignore_empty && value.is_empty() { |
||||
continue; |
||||
} |
||||
|
||||
let mut key = split_and_recase_key(separator, Some(Case::Flat), key); |
||||
if key.ends_with("interchaingaspaymaster") { |
||||
key = key.replace("interchaingaspaymaster", "interchainGasPaymaster"); |
||||
} else if key.ends_with("validatorannounce") { |
||||
key = key.replace("validatorannounce", "validatorAnnounce"); |
||||
} |
||||
|
||||
m.insert(key, Value::new(Some(&uri), ValueKind::String(value))); |
||||
} |
||||
|
||||
let remaining = args.finish(); |
||||
if remaining.is_empty() { |
||||
Ok(m) |
||||
} else { |
||||
Err(ConfigError::Message("Could not parse all arguments".into())) |
||||
} |
||||
} |
||||
} |
||||
|
||||
/// An ultra simple CLI arguments parser.
|
||||
/// Adapted from pico-args 0.5.0.
|
||||
#[derive(Clone, Debug)] |
||||
pub struct ArgumentParser(Vec<OsString>); |
||||
|
||||
impl ArgumentParser { |
||||
/// Creates a parser from a vector of arguments.
|
||||
///
|
||||
/// The executable path **must** be removed.
|
||||
///
|
||||
/// This can be used for supporting `--` arguments to forward to another
|
||||
/// program.
|
||||
fn from_vec(args: Vec<OsString>) -> Self { |
||||
ArgumentParser(args) |
||||
} |
||||
|
||||
/// Creates a parser from [`env::args_os`].
|
||||
///
|
||||
/// The executable path will be removed.
|
||||
///
|
||||
/// [`env::args_os`]: https://doc.rust-lang.org/stable/std/env/fn.args_os.html
|
||||
fn from_env() -> Self { |
||||
let mut args: Vec<_> = std::env::args_os().collect(); |
||||
args.remove(0); |
||||
ArgumentParser(args) |
||||
} |
||||
|
||||
/// Returns a list of remaining arguments.
|
||||
///
|
||||
/// It's up to the caller what to do with them.
|
||||
/// One can report an error about unused arguments,
|
||||
/// other can use them for further processing.
|
||||
fn finish(self) -> Vec<OsString> { |
||||
self.0 |
||||
} |
||||
} |
||||
|
||||
impl Iterator for ArgumentParser { |
||||
type Item = Result<(String, String), Error>; |
||||
|
||||
fn next(&mut self) -> Option<Self::Item> { |
||||
let (k, v, kind, idx) = match self.find_next_kv_pair() { |
||||
Ok(Some(tup)) => tup, |
||||
Ok(None) => return None, |
||||
Err(e) => return Some(Err(e)), |
||||
}; |
||||
|
||||
match kind { |
||||
PairKind::SingleArgument => { |
||||
self.0.remove(idx); |
||||
} |
||||
PairKind::TwoArguments => { |
||||
self.0.remove(idx + 1); |
||||
self.0.remove(idx); |
||||
} |
||||
} |
||||
|
||||
Some(Ok((k, v))) |
||||
} |
||||
} |
||||
|
||||
// internal workings
|
||||
impl ArgumentParser { |
||||
#[inline(never)] |
||||
fn find_next_kv_pair(&mut self) -> Result<Option<(String, String, PairKind, usize)>, Error> { |
||||
let Some(idx) = self.index_of_next_key() else { |
||||
return Ok(None); |
||||
}; |
||||
// full term without leading '--'
|
||||
let term = &os_to_str(&self.0[idx])?[2..]; |
||||
if term.is_empty() { |
||||
return Err(Error::EmptyKey); |
||||
} |
||||
|
||||
if let Some((key, value)) = term.split_once('=') { |
||||
// Parse a `--key=value` pair.
|
||||
let key = key.to_owned(); |
||||
|
||||
// Check for quoted value.
|
||||
let value = if starts_with(value, b'"') { |
||||
if !ends_with(value, b'"') { |
||||
// A closing quote must be the same as an opening one.
|
||||
return Err(Error::UnmatchedQuote(key)); |
||||
} |
||||
&value[1..value.len() - 1] |
||||
} else if starts_with(value, b'\'') { |
||||
if !ends_with(value, b'\'') { |
||||
// A closing quote must be the same as an opening one.
|
||||
return Err(Error::UnmatchedQuote(key)); |
||||
} |
||||
&value[1..value.len() - 1] |
||||
} else { |
||||
value |
||||
}; |
||||
|
||||
Ok(Some((key, value.to_owned(), PairKind::SingleArgument, idx))) |
||||
} else { |
||||
// Parse a `--key value` pair.
|
||||
let key = term.to_owned(); |
||||
let value = self |
||||
.0 |
||||
.get(idx + 1) |
||||
.map(|v| os_to_str(v)) |
||||
.transpose()? |
||||
.unwrap_or(""); |
||||
|
||||
if value.is_empty() || value.starts_with('-') { |
||||
// the next value is another key
|
||||
Ok(Some((key, "".to_owned(), PairKind::SingleArgument, idx))) |
||||
} else { |
||||
Ok(Some((key, value.to_owned(), PairKind::TwoArguments, idx))) |
||||
} |
||||
} |
||||
} |
||||
|
||||
fn index_of_next_key(&self) -> Option<usize> { |
||||
self.0.iter().position(|v| { |
||||
#[cfg(unix)] |
||||
{ |
||||
use std::os::unix::ffi::OsStrExt; |
||||
v.len() >= 2 && &v.as_bytes()[0..2] == b"--" |
||||
} |
||||
#[cfg(not(unix))] |
||||
{ |
||||
v.len() >= 2 && v.to_str().map(|v| v.starts_with("--")).unwrap_or(false) |
||||
} |
||||
}) |
||||
} |
||||
} |
||||
|
||||
#[inline] |
||||
fn starts_with(text: &str, c: u8) -> bool { |
||||
if text.is_empty() { |
||||
false |
||||
} else { |
||||
text.as_bytes()[0] == c |
||||
} |
||||
} |
||||
|
||||
#[inline] |
||||
fn ends_with(text: &str, c: u8) -> bool { |
||||
if text.is_empty() { |
||||
false |
||||
} else { |
||||
text.as_bytes()[text.len() - 1] == c |
||||
} |
||||
} |
||||
|
||||
#[inline] |
||||
fn os_to_str(text: &OsStr) -> Result<&str, Error> { |
||||
text.to_str().ok_or(Error::NonUtf8Argument) |
||||
} |
||||
|
||||
/// A list of possible errors.
|
||||
#[derive(Clone, Debug, thiserror::Error)] |
||||
pub enum Error { |
||||
/// Arguments must be a valid UTF-8 strings.
|
||||
#[error("argument is not a UTF-8 string")] |
||||
NonUtf8Argument, |
||||
|
||||
/// Found '--` or a key with nothing after the prefix
|
||||
#[error("key name is empty (possibly after removing prefix)")] |
||||
EmptyKey, |
||||
|
||||
/// Could not find closing quote for a value.
|
||||
#[error("unmatched quote in `{0}`")] |
||||
UnmatchedQuote(String), |
||||
} |
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)] |
||||
enum PairKind { |
||||
SingleArgument, |
||||
TwoArguments, |
||||
} |
||||
|
||||
#[cfg(test)] |
||||
mod test { |
||||
use super::*; |
||||
|
||||
macro_rules! assert_arg { |
||||
($config:expr, $key:literal, $value:literal) => { |
||||
let origin = "program argument".to_owned(); |
||||
assert_eq!( |
||||
$config.remove($key), |
||||
Some(Value::new( |
||||
Some(&origin), |
||||
ValueKind::String($value.to_owned()) |
||||
)) |
||||
); |
||||
}; |
||||
} |
||||
|
||||
const ARGUMENTS: &[&str] = &[ |
||||
"--key-a", |
||||
"value-a", |
||||
"--keY-b=value-b", |
||||
"--key-c=\"value c\"", |
||||
"--KEY-d='valUE d'", |
||||
"--key-e=''", |
||||
"--key-F", |
||||
"--key-g=value-g", |
||||
"--key-h", |
||||
]; |
||||
|
||||
#[test] |
||||
fn default_case() { |
||||
let mut config = DeprecatedCommandLineArguments::default() |
||||
.source(ARGUMENTS) |
||||
.collect() |
||||
.unwrap(); |
||||
|
||||
assert_arg!(config, "key.a", "value-a"); |
||||
assert_arg!(config, "key.b", "value-b"); |
||||
assert_arg!(config, "key.c", "value c"); |
||||
assert_arg!(config, "key.d", "valUE d"); |
||||
assert_arg!(config, "key.e", ""); |
||||
assert_arg!(config, "key.f", ""); |
||||
assert_arg!(config, "key.g", "value-g"); |
||||
assert_arg!(config, "key.h", ""); |
||||
|
||||
assert!(config.is_empty()); |
||||
} |
||||
|
||||
#[test] |
||||
fn ignore_empty() { |
||||
let mut config = DeprecatedCommandLineArguments::default() |
||||
.source(ARGUMENTS) |
||||
.ignore_empty(true) |
||||
.collect() |
||||
.unwrap(); |
||||
|
||||
assert_arg!(config, "key.a", "value-a"); |
||||
assert_arg!(config, "key.b", "value-b"); |
||||
assert_arg!(config, "key.c", "value c"); |
||||
assert_arg!(config, "key.d", "valUE d"); |
||||
assert_arg!(config, "key.g", "value-g"); |
||||
|
||||
assert!(config.is_empty()); |
||||
} |
||||
} |
@ -0,0 +1,33 @@ |
||||
use std::fmt::Debug; |
||||
use std::num::NonZeroU64; |
||||
|
||||
use async_trait::async_trait; |
||||
use auto_impl::auto_impl; |
||||
|
||||
use crate::{ |
||||
accumulator::incremental::IncrementalMerkle, ChainResult, Checkpoint, HyperlaneContract, |
||||
}; |
||||
|
||||
/// Interface for the MerkleTreeHook chain contract. Allows abstraction over different
|
||||
/// chains
|
||||
#[async_trait] |
||||
#[auto_impl(&, Box, Arc)] |
||||
pub trait MerkleTreeHook: HyperlaneContract + Send + Sync + Debug { |
||||
/// Return the incremental merkle tree in storage
|
||||
///
|
||||
/// - `lag` is how far behind the current block to query, if not specified
|
||||
/// it will query at the latest block.
|
||||
async fn tree(&self, lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle>; |
||||
|
||||
/// Gets the current leaf count of the merkle tree
|
||||
///
|
||||
/// - `lag` is how far behind the current block to query, if not specified
|
||||
/// it will query at the latest block.
|
||||
async fn count(&self, lag: Option<NonZeroU64>) -> ChainResult<u32>; |
||||
|
||||
/// Get the latest checkpoint.
|
||||
///
|
||||
/// - `lag` is how far behind the current block to query, if not specified
|
||||
/// it will query at the latest block.
|
||||
async fn latest_checkpoint(&self, lag: Option<NonZeroU64>) -> ChainResult<Checkpoint>; |
||||
} |
@ -0,0 +1,45 @@ |
||||
use derive_new::new; |
||||
use std::io::{Read, Write}; |
||||
|
||||
use crate::{Decode, Encode, HyperlaneProtocolError, H256}; |
||||
|
||||
/// Merkle Tree Hook insertion event
|
||||
#[derive(Debug, Copy, Clone, new)] |
||||
pub struct MerkleTreeInsertion { |
||||
leaf_index: u32, |
||||
message_id: H256, |
||||
} |
||||
|
||||
impl MerkleTreeInsertion { |
||||
/// The leaf index of this insertion
|
||||
pub fn index(&self) -> u32 { |
||||
self.leaf_index |
||||
} |
||||
|
||||
/// ID of the message inserted
|
||||
pub fn message_id(&self) -> H256 { |
||||
self.message_id |
||||
} |
||||
} |
||||
|
||||
impl Encode for MerkleTreeInsertion { |
||||
fn write_to<W>(&self, writer: &mut W) -> std::io::Result<usize> |
||||
where |
||||
W: Write, |
||||
{ |
||||
Ok(self.leaf_index.write_to(writer)? + self.message_id.write_to(writer)?) |
||||
} |
||||
} |
||||
|
||||
impl Decode for MerkleTreeInsertion { |
||||
fn read_from<R>(reader: &mut R) -> Result<Self, HyperlaneProtocolError> |
||||
where |
||||
R: Read, |
||||
Self: Sized, |
||||
{ |
||||
Ok(Self { |
||||
leaf_index: u32::read_from(reader)?, |
||||
message_id: H256::read_from(reader)?, |
||||
}) |
||||
} |
||||
} |
@ -1,10 +1,10 @@ |
||||
{ |
||||
"sealeveltest2": { |
||||
"hex": "0x2317f9615d4ebc2419ad4b88580e2a80a03b2c7a60bc960de7d6934dbc37a87e", |
||||
"base58": "3MzUPjP5LEkiHH82nEAe28Xtz9ztuMqWc8UmuKxrpVQH" |
||||
}, |
||||
"sealeveltest1": { |
||||
"hex": "0xa77b4e2ed231894cc8cb8eee21adcc705d8489bccc6b2fcf40a358de23e60b7b", |
||||
"base58": "CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga" |
||||
}, |
||||
"sealeveltest2": { |
||||
"hex": "0x2317f9615d4ebc2419ad4b88580e2a80a03b2c7a60bc960de7d6934dbc37a87e", |
||||
"base58": "3MzUPjP5LEkiHH82nEAe28Xtz9ztuMqWc8UmuKxrpVQH" |
||||
} |
||||
} |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue