V3 agents rebase (#2746)

### Description

It's your favourite PR coming right back... V3 agents!

Closes https://github.com/hyperlane-xyz/issues/issues/561

Builds on top of
https://github.com/hyperlane-xyz/hyperlane-monorepo/pull/2742

Depends on https://github.com/hyperlane-xyz/hyperlane-monorepo/pull/2681
for e2e testing

This PR includes:
- [x] Merkle tree hook indexer
- [x] Merkle tree builder task
- [x] Update submitter to trigger retries if no proof is available yet

Slightly more detailed overview of the work here:
https://github.com/hyperlane-xyz/hyperlane-monorepo/issues/2720#issuecomment-1724038643

<!--
What's included in this PR?
-->

### Drive-by changes

<!--
Are there any minor or drive-by changes also included?
-->

### Related issues

<!--
- Fixes #[issue number here]
-->

### Backward compatibility

<!--
Are these changes backward compatible? Are there any infrastructure
implications, e.g. changes that would prohibit deploying older commits
using this infra tooling?

Yes/No
-->

### Testing

<!--
What kind of testing have these changes undergone?

None/Manual/Unit Tests
-->

---------

Co-authored-by: -f <kunalarora1729@gmail.com>
Co-authored-by: Trevor Porter <trkporter@ucdavis.edu>
Co-authored-by: Kunal Arora <55632507+aroralanuk@users.noreply.github.com>
Co-authored-by: Mattie Conover <git@mconover.dev>
Co-authored-by: Guillaume Bouvignies <guillaumebouvignies@gmail.com>
Co-authored-by: Yorke Rhodes <yorke@hyperlane.xyz>
Co-authored-by: Guillaume Bouvignies <guillaume.bouvignies@kurtosistech.com>
pull/2777/head
Daniel Savu 1 year ago committed by GitHub
parent a60ec18237
commit af5bd88aa8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      .github/workflows/e2e.yml
  2. 1
      .gitignore
  3. 16
      rust/Cargo.lock
  4. 1
      rust/Cargo.toml
  5. 2
      rust/agents/relayer/Cargo.toml
  6. 3
      rust/agents/relayer/src/main.rs
  7. 71
      rust/agents/relayer/src/merkle_tree/builder.rs
  8. 2
      rust/agents/relayer/src/merkle_tree/mod.rs
  9. 101
      rust/agents/relayer/src/merkle_tree/processor.rs
  10. 21
      rust/agents/relayer/src/msg/gas_payment/mod.rs
  11. 48
      rust/agents/relayer/src/msg/metadata/base.rs
  12. 89
      rust/agents/relayer/src/msg/metadata/multisig/base.rs
  13. 10
      rust/agents/relayer/src/msg/metadata/multisig/legacy_multisig.rs
  14. 16
      rust/agents/relayer/src/msg/metadata/multisig/merkle_root_multisig.rs
  15. 10
      rust/agents/relayer/src/msg/metadata/multisig/message_id_multisig.rs
  16. 146
      rust/agents/relayer/src/msg/processor.rs
  17. 37
      rust/agents/relayer/src/processor.rs
  18. 60
      rust/agents/relayer/src/relayer.rs
  19. 83
      rust/agents/relayer/src/settings/matching_list.rs
  20. 395
      rust/agents/relayer/src/settings/mod.rs
  21. 71
      rust/agents/scraper/src/settings.rs
  22. 127
      rust/agents/validator/src/settings.rs
  23. 25
      rust/agents/validator/src/submit.rs
  24. 54
      rust/agents/validator/src/validator.rs
  25. 271
      rust/chains/hyperlane-ethereum/abis/IInterchainGasPaymaster.abi.json
  26. 182
      rust/chains/hyperlane-ethereum/abis/IMailbox.abi.json
  27. 347
      rust/chains/hyperlane-ethereum/abis/Mailbox.abi.json
  28. 156
      rust/chains/hyperlane-ethereum/abis/MerkleTreeHook.abi.json
  29. 95
      rust/chains/hyperlane-ethereum/src/config.rs
  30. 2
      rust/chains/hyperlane-ethereum/src/interchain_gas.rs
  31. 8
      rust/chains/hyperlane-ethereum/src/lib.rs
  32. 122
      rust/chains/hyperlane-ethereum/src/mailbox.rs
  33. 282
      rust/chains/hyperlane-ethereum/src/merkle_tree_hook.rs
  34. 2
      rust/chains/hyperlane-ethereum/src/signers.rs
  35. 7
      rust/chains/hyperlane-ethereum/tests/signer_output.rs
  36. 35
      rust/chains/hyperlane-fuel/src/mailbox.rs
  37. 29
      rust/chains/hyperlane-fuel/src/trait_builder.rs
  38. 2
      rust/chains/hyperlane-sealevel/src/lib.rs
  39. 69
      rust/chains/hyperlane-sealevel/src/mailbox.rs
  40. 101
      rust/chains/hyperlane-sealevel/src/merkle_tree_hook.rs
  41. 32
      rust/chains/hyperlane-sealevel/src/trait_builder.rs
  42. 46
      rust/config/test_sealevel_config.json
  43. 20
      rust/helm/agent-common/templates/_helpers.tpl
  44. 44
      rust/helm/hyperlane-agent/README.md
  45. 10
      rust/helm/hyperlane-agent/templates/configmap.yaml
  46. 12
      rust/helm/hyperlane-agent/templates/external-secret.yaml
  47. 6
      rust/helm/hyperlane-agent/templates/relayer-external-secret.yaml
  48. 9
      rust/helm/hyperlane-agent/templates/relayer-statefulset.yaml
  49. 2
      rust/helm/hyperlane-agent/templates/scraper-external-secret.yaml
  50. 9
      rust/helm/hyperlane-agent/templates/scraper-statefulset.yaml
  51. 6
      rust/helm/hyperlane-agent/templates/validator-configmap.yaml
  52. 14
      rust/helm/hyperlane-agent/templates/validator-external-secret.yaml
  53. 37
      rust/helm/hyperlane-agent/values.yaml
  54. 6
      rust/hyperlane-base/src/contract_sync/cursor.rs
  55. 49
      rust/hyperlane-base/src/db/rocks/hyperlane_db.rs
  56. 4
      rust/hyperlane-base/src/settings/base.rs
  57. 80
      rust/hyperlane-base/src/settings/chains.rs
  58. 435
      rust/hyperlane-base/src/settings/deprecated_parser.rs
  59. 15
      rust/hyperlane-base/src/settings/loader/arguments.rs
  60. 66
      rust/hyperlane-base/src/settings/loader/case_adapter.rs
  61. 343
      rust/hyperlane-base/src/settings/loader/deprecated_arguments.rs
  62. 38
      rust/hyperlane-base/src/settings/loader/environment.rs
  63. 132
      rust/hyperlane-base/src/settings/loader/mod.rs
  64. 30
      rust/hyperlane-base/src/settings/mod.rs
  65. 37
      rust/hyperlane-base/src/settings/parser/json_value_parser.rs
  66. 152
      rust/hyperlane-base/src/settings/parser/mod.rs
  67. 6
      rust/hyperlane-base/tests/chain_config.rs
  68. 4
      rust/hyperlane-core/src/config/config_path.rs
  69. 16
      rust/hyperlane-core/src/traits/mailbox.rs
  70. 33
      rust/hyperlane-core/src/traits/merkle_tree_hook.rs
  71. 2
      rust/hyperlane-core/src/traits/mod.rs
  72. 12
      rust/hyperlane-core/src/types/checkpoint.rs
  73. 45
      rust/hyperlane-core/src/types/merkle_tree.rs
  74. 2
      rust/hyperlane-core/src/types/mod.rs
  75. 8
      rust/hyperlane-test/src/mocks/mailbox.rs
  76. 8
      rust/sealevel/environments/local-e2e/warp-routes/testwarproute/program-ids.json
  77. 2
      rust/sealevel/libraries/multisig-ism/src/test_data.rs
  78. 10
      rust/sealevel/programs/ism/multisig-ism-message-id/src/processor.rs
  79. 2
      rust/sealevel/programs/ism/multisig-ism-message-id/tests/functional.rs
  80. 27
      rust/utils/run-locally/src/ethereum.rs
  81. 35
      rust/utils/run-locally/src/invariants.rs
  82. 96
      rust/utils/run-locally/src/main.rs
  83. 41
      rust/utils/run-locally/src/program.rs
  84. 8
      rust/utils/run-locally/src/solana.rs
  85. 40
      solidity/contracts/test/TestSendReceiver.sol
  86. 1
      typescript/infra/.gitignore
  87. 16
      typescript/infra/config/environments/mainnet2/agent.ts
  88. 28
      typescript/infra/config/environments/mainnet2/core.ts
  89. 4
      typescript/infra/config/environments/mainnet2/funding.ts
  90. 6
      typescript/infra/config/environments/mainnet2/helloworld.ts
  91. 4
      typescript/infra/config/environments/mainnet2/index.ts
  92. 4
      typescript/infra/config/environments/mainnet2/liquidityLayer.ts
  93. 10
      typescript/infra/config/environments/test/agent.ts
  94. 7
      typescript/infra/config/environments/test/core.ts
  95. 2
      typescript/infra/config/environments/test/hooks.ts
  96. 16
      typescript/infra/config/environments/testnet3/agent.ts
  97. 8
      typescript/infra/config/environments/testnet3/core.ts
  98. 4
      typescript/infra/config/environments/testnet3/funding.ts
  99. 6
      typescript/infra/config/environments/testnet3/helloworld.ts
  100. 4
      typescript/infra/config/environments/testnet3/index.ts
  101. Some files were not shown because too many files have changed in this diff Show More

@ -4,7 +4,7 @@ on:
push: push:
branches: [main] branches: [main]
pull_request: pull_request:
branches: [main] branches: '*'
workflow_dispatch: workflow_dispatch:
concurrency: concurrency:

1
.gitignore vendored

@ -28,3 +28,4 @@ yarn-error.log
**/*.ignore **/*.ignore
.vscode .vscode
tsconfig.editor.json

16
rust/Cargo.lock generated

@ -428,6 +428,20 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "backoff"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b62ddb9cb1ec0a098ad4bbf9344d0713fa193ae1a80af55febcff2627b6a00c1"
dependencies = [
"futures-core",
"getrandom 0.2.10",
"instant",
"pin-project-lite",
"rand 0.8.5",
"tokio",
]
[[package]] [[package]]
name = "backtrace" name = "backtrace"
version = "0.3.68" version = "0.3.68"
@ -6049,7 +6063,9 @@ name = "relayer"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"backoff",
"config", "config",
"convert_case 0.6.0",
"derive-new", "derive-new",
"derive_more", "derive_more",
"enum_dispatch", "enum_dispatch",

@ -52,6 +52,7 @@ Inflector = "0.11.4"
anyhow = "1.0" anyhow = "1.0"
async-trait = "0.1" async-trait = "0.1"
auto_impl = "1.0" auto_impl = "1.0"
backoff = { version = "0.4.0", features = ["tokio"] }
backtrace = "0.3" backtrace = "0.3"
base64 = "0.21.2" base64 = "0.21.2"
bincode = "1.3" bincode = "1.3"

@ -11,7 +11,9 @@ version.workspace = true
[dependencies] [dependencies]
async-trait.workspace = true async-trait.workspace = true
backoff.workspace = true
config.workspace = true config.workspace = true
convert_case.workspace = true
derive-new.workspace = true derive-new.workspace = true
derive_more.workspace = true derive_more.workspace = true
enum_dispatch.workspace = true enum_dispatch.workspace = true

@ -13,8 +13,9 @@ use hyperlane_base::agent_main;
use crate::relayer::Relayer; use crate::relayer::Relayer;
mod merkle_tree_builder; mod merkle_tree;
mod msg; mod msg;
mod processor;
mod prover; mod prover;
mod relayer; mod relayer;
mod settings; mod settings;

@ -1,6 +1,6 @@
use std::fmt::Display; use std::fmt::Display;
use eyre::Result; use eyre::{Context, Result};
use tracing::{debug, error, instrument}; use tracing::{debug, error, instrument};
use hyperlane_base::db::{DbError, HyperlaneRocksDB}; use hyperlane_base::db::{DbError, HyperlaneRocksDB};
@ -50,12 +50,6 @@ pub enum MerkleTreeBuilderError {
/// Root of the incremental merkle tree /// Root of the incremental merkle tree
incremental_root: H256, incremental_root: H256,
}, },
/// Nonce was not found in DB, despite batch providing messages after
#[error("Nonce was not found {nonce:?}")]
UnavailableNonce {
/// Root of prover's local merkle tree
nonce: u32,
},
/// MerkleTreeBuilder attempts Prover operation and receives ProverError /// MerkleTreeBuilder attempts Prover operation and receives ProverError
#[error(transparent)] #[error(transparent)]
ProverError(#[from] ProverError), ProverError(#[from] ProverError),
@ -65,6 +59,9 @@ pub enum MerkleTreeBuilderError {
/// DB Error /// DB Error
#[error("{0}")] #[error("{0}")]
DbError(#[from] DbError), DbError(#[from] DbError),
/// Some other error occured.
#[error("Failed to build the merkle tree: {0}")]
Other(String),
} }
impl MerkleTreeBuilder { impl MerkleTreeBuilder {
@ -81,54 +78,38 @@ impl MerkleTreeBuilder {
#[instrument(err, skip(self), level="debug", fields(prover_latest_index=self.count()-1))] #[instrument(err, skip(self), level="debug", fields(prover_latest_index=self.count()-1))]
pub fn get_proof( pub fn get_proof(
&self, &self,
leaf_index: u32, message_nonce: u32,
root_index: u32, root_index: u32,
) -> Result<Proof, MerkleTreeBuilderError> { ) -> Result<Option<Proof>, MerkleTreeBuilderError> {
let Some(leaf_index) = self
.db
.retrieve_message_id_by_nonce(&message_nonce)?
.and_then(|message_id| self.db.retrieve_merkle_leaf_index_by_message_id(&message_id).ok().flatten())
else {
return Ok(None);
};
self.prover self.prover
.prove_against_previous(leaf_index as usize, root_index as usize) .prove_against_previous(leaf_index as usize, root_index as usize)
.map(Option::from)
.map_err(Into::into) .map_err(Into::into)
} }
fn ingest_nonce(&mut self, nonce: u32) -> Result<(), MerkleTreeBuilderError> {
match self.db.retrieve_message_id_by_nonce(&nonce) {
Ok(Some(leaf)) => {
debug!(nonce, "Ingesting leaf");
self.prover.ingest(leaf).expect("!tree full");
self.incremental.ingest(leaf);
assert_eq!(self.prover.root(), self.incremental.root());
Ok(())
}
Ok(None) => {
error!("We should not arrive here");
Err(MerkleTreeBuilderError::UnavailableNonce { nonce })
}
Err(e) => Err(e.into()),
}
}
pub fn count(&self) -> u32 { pub fn count(&self) -> u32 {
self.prover.count() as u32 self.prover.count() as u32
} }
#[instrument(err, skip(self), level = "debug")] pub async fn ingest_message_id(&mut self, message_id: H256) -> Result<()> {
pub async fn update_to_index(&mut self, index: u32) -> Result<(), MerkleTreeBuilderError> { const CTX: &str = "When ingesting message id";
if index >= self.count() { debug!(?message_id, "Ingesting leaf");
let starting_index = self.prover.count() as u32; self.prover.ingest(message_id).expect("tree full");
for i in starting_index..=index { self.incremental.ingest(message_id);
self.db.wait_for_message_nonce(i).await?; match self.prover.root().eq(&self.incremental.root()) {
self.ingest_nonce(i)?; true => Ok(()),
} false => Err(MerkleTreeBuilderError::MismatchedRoots {
prover_root: self.prover.root(),
let prover_root = self.prover.root(); incremental_root: self.incremental.root(),
let incremental_root = self.incremental.root(); }),
if prover_root != incremental_root {
return Err(MerkleTreeBuilderError::MismatchedRoots {
prover_root,
incremental_root,
});
}
} }
.context(CTX)
Ok(())
} }
} }

@ -0,0 +1,2 @@
pub(crate) mod builder;
pub(crate) mod processor;

@ -0,0 +1,101 @@
use std::{
fmt::{Debug, Formatter},
sync::Arc,
time::Duration,
};
use async_trait::async_trait;
use derive_new::new;
use eyre::Result;
use hyperlane_base::db::HyperlaneRocksDB;
use hyperlane_core::{HyperlaneDomain, MerkleTreeInsertion};
use prometheus::IntGauge;
use tokio::sync::RwLock;
use tracing::debug;
use crate::processor::ProcessorExt;
use super::builder::MerkleTreeBuilder;
/// Finds unprocessed merkle tree insertions and adds them to the prover sync
#[derive(new)]
pub struct MerkleTreeProcessor {
db: HyperlaneRocksDB,
metrics: MerkleTreeProcessorMetrics,
prover_sync: Arc<RwLock<MerkleTreeBuilder>>,
#[new(default)]
leaf_index: u32,
}
impl Debug for MerkleTreeProcessor {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"MerkleTreeProcessor {{ leaf_index: {:?} }}",
self.leaf_index
)
}
}
#[async_trait]
impl ProcessorExt for MerkleTreeProcessor {
/// The domain this processor is getting merkle tree hook insertions from.
fn domain(&self) -> &HyperlaneDomain {
self.db.domain()
}
/// One round of processing, extracted from infinite work loop for
/// testing purposes.
async fn tick(&mut self) -> Result<()> {
if let Some(insertion) = self.next_unprocessed_leaf()? {
// Feed the message to the prover sync
self.prover_sync
.write()
.await
.ingest_message_id(insertion.message_id())
.await?;
// Increase the leaf index to move on to the next leaf
self.leaf_index += 1;
} else {
tokio::time::sleep(Duration::from_secs(1)).await;
}
Ok(())
}
}
impl MerkleTreeProcessor {
fn next_unprocessed_leaf(&mut self) -> Result<Option<MerkleTreeInsertion>> {
let leaf = if let Some(insertion) = self
.db
.retrieve_merkle_tree_insertion_by_leaf_index(&self.leaf_index)?
{
// Update the metrics
self.metrics
.max_leaf_index_gauge
.set(insertion.index() as i64);
Some(insertion)
} else {
debug!(leaf_index=?self.leaf_index, "No message found in DB for leaf index");
None
};
Ok(leaf)
}
}
#[derive(Debug)]
pub struct MerkleTreeProcessorMetrics {
max_leaf_index_gauge: IntGauge,
}
impl MerkleTreeProcessorMetrics {
pub fn new() -> Self {
Self {
max_leaf_index_gauge: IntGauge::new(
"max_leaf_index_gauge",
"The max merkle tree leaf index",
)
.unwrap(),
}
}
}

@ -2,20 +2,20 @@ use std::fmt::Debug;
use async_trait::async_trait; use async_trait::async_trait;
use eyre::Result; use eyre::Result;
use tracing::{debug, error, trace};
use hyperlane_base::db::HyperlaneRocksDB; use hyperlane_base::db::HyperlaneRocksDB;
use hyperlane_core::{ use hyperlane_core::{
GasPaymentKey, HyperlaneMessage, InterchainGasExpenditure, InterchainGasPayment, GasPaymentKey, HyperlaneMessage, InterchainGasExpenditure, InterchainGasPayment,
TxCostEstimate, TxOutcome, U256, TxCostEstimate, TxOutcome, U256,
}; };
use tracing::{debug, error, trace};
use crate::msg::gas_payment::policies::GasPaymentPolicyOnChainFeeQuoting;
use crate::settings::{
matching_list::MatchingList, GasPaymentEnforcementConf, GasPaymentEnforcementPolicy,
};
use self::policies::{GasPaymentPolicyMinimum, GasPaymentPolicyNone}; use self::policies::{GasPaymentPolicyMinimum, GasPaymentPolicyNone};
use crate::{
msg::gas_payment::policies::GasPaymentPolicyOnChainFeeQuoting,
settings::{
matching_list::MatchingList, GasPaymentEnforcementConf, GasPaymentEnforcementPolicy,
},
};
mod policies; mod policies;
@ -148,12 +148,11 @@ mod test {
H256, U256, H256, U256,
}; };
use super::GasPaymentEnforcer;
use crate::settings::{ use crate::settings::{
matching_list::MatchingList, GasPaymentEnforcementConf, GasPaymentEnforcementPolicy, matching_list::MatchingList, GasPaymentEnforcementConf, GasPaymentEnforcementPolicy,
}; };
use super::GasPaymentEnforcer;
#[tokio::test] #[tokio::test]
async fn test_empty_whitelist() { async fn test_empty_whitelist() {
test_utils::run_test_db(|db| async move { test_utils::run_test_db(|db| async move {
@ -195,7 +194,7 @@ mod test {
test_utils::run_test_db(|db| async move { test_utils::run_test_db(|db| async move {
let hyperlane_db = let hyperlane_db =
HyperlaneRocksDB::new(&HyperlaneDomain::new_test_domain("test_no_match"), db); HyperlaneRocksDB::new(&HyperlaneDomain::new_test_domain("test_no_match"), db);
let matching_list = serde_json::from_str(r#"[{"originDomain": 234}]"#).unwrap(); let matching_list = serde_json::from_str(r#"[{"origindomain": 234}]"#).unwrap();
let enforcer = GasPaymentEnforcer::new( let enforcer = GasPaymentEnforcer::new(
// Require a payment // Require a payment
vec![GasPaymentEnforcementConf { vec![GasPaymentEnforcementConf {
@ -339,7 +338,7 @@ mod test {
let recipient_address = "0xbb000000000000000000000000000000000000bb"; let recipient_address = "0xbb000000000000000000000000000000000000bb";
let matching_list = serde_json::from_str( let matching_list = serde_json::from_str(
&format!(r#"[{{"senderAddress": "{sender_address}", "recipientAddress": "{recipient_address}"}}]"#) &format!(r#"[{{"senderaddress": "{sender_address}", "recipientaddress": "{recipient_address}"}}]"#)
).unwrap(); ).unwrap();
let enforcer = GasPaymentEnforcer::new( let enforcer = GasPaymentEnforcer::new(

@ -1,8 +1,11 @@
use std::{collections::HashMap, fmt::Debug, str::FromStr, sync::Arc}; use std::{collections::HashMap, fmt::Debug, str::FromStr, sync::Arc};
use async_trait::async_trait; use async_trait::async_trait;
use backoff::Error as BackoffError;
use backoff::{future::retry, ExponentialBackoff};
use derive_new::new; use derive_new::new;
use eyre::{Context, Result}; use eyre::{Context, Result};
use hyperlane_base::db::HyperlaneRocksDB;
use hyperlane_base::{ use hyperlane_base::{
settings::{ChainConf, CheckpointSyncerConf}, settings::{ChainConf, CheckpointSyncerConf},
CheckpointSyncer, CoreMetrics, MultisigCheckpointSyncer, CheckpointSyncer, CoreMetrics, MultisigCheckpointSyncer,
@ -16,7 +19,7 @@ use tokio::sync::RwLock;
use tracing::{debug, info, instrument, warn}; use tracing::{debug, info, instrument, warn};
use crate::{ use crate::{
merkle_tree_builder::MerkleTreeBuilder, merkle_tree::builder::{MerkleTreeBuilder, MerkleTreeBuilderError},
msg::metadata::{ msg::metadata::{
multisig::{ multisig::{
LegacyMultisigMetadataBuilder, MerkleRootMultisigMetadataBuilder, LegacyMultisigMetadataBuilder, MerkleRootMultisigMetadataBuilder,
@ -49,6 +52,7 @@ pub struct BaseMetadataBuilder {
origin_validator_announce: Arc<dyn ValidatorAnnounce>, origin_validator_announce: Arc<dyn ValidatorAnnounce>,
allow_local_checkpoint_syncers: bool, allow_local_checkpoint_syncers: bool,
metrics: Arc<CoreMetrics>, metrics: Arc<CoreMetrics>,
db: HyperlaneRocksDB,
/// ISMs can be structured recursively. We keep track of the depth /// ISMs can be structured recursively. We keep track of the depth
/// of the recursion to avoid infinite loops. /// of the recursion to avoid infinite loops.
#[new(default)] #[new(default)]
@ -98,6 +102,15 @@ impl MetadataBuilder for BaseMetadataBuilder {
} }
} }
fn constant_backoff() -> ExponentialBackoff {
ExponentialBackoff {
initial_interval: std::time::Duration::from_secs(1),
multiplier: 1.0,
max_elapsed_time: None,
..ExponentialBackoff::default()
}
}
impl BaseMetadataBuilder { impl BaseMetadataBuilder {
pub fn domain(&self) -> &HyperlaneDomain { pub fn domain(&self) -> &HyperlaneDomain {
&self.destination_chain_setup.domain &self.destination_chain_setup.domain
@ -115,13 +128,21 @@ impl BaseMetadataBuilder {
pub async fn get_proof(&self, nonce: u32, checkpoint: Checkpoint) -> Result<Option<Proof>> { pub async fn get_proof(&self, nonce: u32, checkpoint: Checkpoint) -> Result<Option<Proof>> {
const CTX: &str = "When fetching message proof"; const CTX: &str = "When fetching message proof";
let proof = self let proof = retry(constant_backoff(), || async {
.origin_prover_sync self.origin_prover_sync
.read() .read()
.await .await
.get_proof(nonce, checkpoint.index) .get_proof(nonce, checkpoint.index)
.context(CTX)?; .context(CTX)
// If no proof is found, `get_proof(...)` returns `Ok(None)`,
// so errors should break the retry loop.
.map_err(BackoffError::permanent)?
.ok_or(MerkleTreeBuilderError::Other("No proof found in DB".into()))
.context(CTX)
// Transient errors are retried
.map_err(BackoffError::transient)
})
.await?;
// checkpoint may be fraudulent if the root does not // checkpoint may be fraudulent if the root does not
// match the canonical root at the checkpoint's index // match the canonical root at the checkpoint's index
if proof.root() != checkpoint.root { if proof.root() != checkpoint.root {
@ -136,8 +157,15 @@ impl BaseMetadataBuilder {
} }
} }
pub async fn highest_known_nonce(&self) -> u32 { pub async fn highest_known_nonce(&self) -> Option<u32> {
self.origin_prover_sync.read().await.count() - 1 self.origin_prover_sync.read().await.count().checked_sub(1)
}
pub async fn get_merkle_leaf_id_by_message_id(&self, message_id: H256) -> Result<Option<u32>> {
let merkle_leaf = self
.db
.retrieve_merkle_leaf_index_by_message_id(&message_id)?;
Ok(merkle_leaf)
} }
pub async fn build_ism(&self, address: H256) -> Result<Box<dyn InterchainSecurityModule>> { pub async fn build_ism(&self, address: H256) -> Result<Box<dyn InterchainSecurityModule>> {

@ -19,17 +19,19 @@ use crate::msg::metadata::MetadataBuilder;
pub struct MultisigMetadata { pub struct MultisigMetadata {
checkpoint: Checkpoint, checkpoint: Checkpoint,
signatures: Vec<SignatureWithSigner>, signatures: Vec<SignatureWithSigner>,
merkle_leaf_id: Option<u32>,
message_id: Option<H256>, message_id: Option<H256>,
proof: Option<Proof>, proof: Option<Proof>,
} }
#[derive(Debug, Display, PartialEq, Eq, Clone)] #[derive(Debug, Display, PartialEq, Eq, Clone)]
pub enum MetadataToken { pub enum MetadataToken {
CheckpointRoot, MerkleRoot,
CheckpointIndex, CheckpointIndex,
CheckpointMailbox, CheckpointMerkleTree,
MessageId, MessageId,
MerkleProof, MerkleProof,
MerkleIndex,
Threshold, Threshold,
Signatures, Signatures,
Validators, Validators,
@ -52,40 +54,53 @@ pub trait MultisigIsmMetadataBuilder: AsRef<BaseMetadataBuilder> + Send + Sync {
validators: &[H256], validators: &[H256],
threshold: u8, threshold: u8,
metadata: MultisigMetadata, metadata: MultisigMetadata,
) -> Vec<u8> { ) -> Result<Vec<u8>> {
let build_token = |token: &MetadataToken| match token { let build_token = |token: &MetadataToken| -> Result<Vec<u8>> {
MetadataToken::CheckpointRoot => metadata.checkpoint.root.to_fixed_bytes().into(), match token {
MetadataToken::CheckpointIndex => metadata.checkpoint.index.to_be_bytes().into(), MetadataToken::MerkleRoot => Ok(metadata.checkpoint.root.to_fixed_bytes().into()),
MetadataToken::CheckpointMailbox => { MetadataToken::MerkleIndex => Ok(metadata
metadata.checkpoint.mailbox_address.to_fixed_bytes().into() .merkle_leaf_id
} .ok_or(eyre::eyre!("Failed to fetch metadata"))?
MetadataToken::MessageId => metadata.message_id.unwrap().to_fixed_bytes().into(), .to_be_bytes()
MetadataToken::Threshold => Vec::from([threshold]), .into()),
MetadataToken::MerkleProof => { MetadataToken::CheckpointIndex => {
let proof_tokens: Vec<Token> = metadata Ok(metadata.checkpoint.index.to_be_bytes().into())
.proof }
.unwrap() MetadataToken::CheckpointMerkleTree => Ok(metadata
.path .checkpoint
.iter() .merkle_tree_hook_address
.map(|x| Token::FixedBytes(x.to_fixed_bytes().into())) .to_fixed_bytes()
.collect(); .into()),
ethers::abi::encode(&proof_tokens) MetadataToken::MessageId => {
} Ok(metadata.message_id.unwrap().to_fixed_bytes().into())
MetadataToken::Validators => { }
let validator_tokens: Vec<Token> = validators MetadataToken::Threshold => Ok(Vec::from([threshold])),
.iter() MetadataToken::MerkleProof => {
.map(|x| Token::FixedBytes(x.to_fixed_bytes().into())) let proof_tokens: Vec<Token> = metadata
.collect(); .proof
ethers::abi::encode(&[Token::FixedArray(validator_tokens)]) .unwrap()
} .path
MetadataToken::Signatures => { .iter()
let ordered_signatures = order_signatures(validators, &metadata.signatures); .map(|x| Token::FixedBytes(x.to_fixed_bytes().into()))
let threshold_signatures = &ordered_signatures[..threshold as usize]; .collect();
threshold_signatures.concat() Ok(ethers::abi::encode(&proof_tokens))
}
MetadataToken::Validators => {
let validator_tokens: Vec<Token> = validators
.iter()
.map(|x| Token::FixedBytes(x.to_fixed_bytes().into()))
.collect();
Ok(ethers::abi::encode(&[Token::FixedArray(validator_tokens)]))
}
MetadataToken::Signatures => {
let ordered_signatures = order_signatures(validators, &metadata.signatures);
let threshold_signatures = &ordered_signatures[..threshold as usize];
Ok(threshold_signatures.concat())
}
} }
}; };
let metas: Result<Vec<Vec<u8>>> = self.token_layout().iter().map(build_token).collect();
self.token_layout().iter().flat_map(build_token).collect() Ok(metas?.into_iter().flatten().collect())
} }
} }
@ -126,7 +141,11 @@ impl<T: MultisigIsmMetadataBuilder> MetadataBuilder for T {
.context(CTX)? .context(CTX)?
{ {
debug!(?message, ?metadata.checkpoint, "Found checkpoint with quorum"); debug!(?message, ?metadata.checkpoint, "Found checkpoint with quorum");
Ok(Some(self.format_metadata(&validators, threshold, metadata))) Ok(Some(self.format_metadata(
&validators,
threshold,
metadata,
)?))
} else { } else {
info!( info!(
?message, ?validators, threshold, ism=%multisig_ism.address(), ?message, ?validators, threshold, ism=%multisig_ism.address(),

@ -19,9 +19,9 @@ pub struct LegacyMultisigMetadataBuilder(BaseMetadataBuilder);
impl MultisigIsmMetadataBuilder for LegacyMultisigMetadataBuilder { impl MultisigIsmMetadataBuilder for LegacyMultisigMetadataBuilder {
fn token_layout(&self) -> Vec<MetadataToken> { fn token_layout(&self) -> Vec<MetadataToken> {
vec![ vec![
MetadataToken::CheckpointRoot, MetadataToken::MerkleRoot,
MetadataToken::CheckpointIndex, MetadataToken::CheckpointIndex,
MetadataToken::CheckpointMailbox, MetadataToken::CheckpointMerkleTree,
MetadataToken::MerkleProof, MetadataToken::MerkleProof,
MetadataToken::Threshold, MetadataToken::Threshold,
MetadataToken::Signatures, MetadataToken::Signatures,
@ -37,7 +37,10 @@ impl MultisigIsmMetadataBuilder for LegacyMultisigMetadataBuilder {
checkpoint_syncer: &MultisigCheckpointSyncer, checkpoint_syncer: &MultisigCheckpointSyncer,
) -> Result<Option<MultisigMetadata>> { ) -> Result<Option<MultisigMetadata>> {
const CTX: &str = "When fetching LegacyMultisig metadata"; const CTX: &str = "When fetching LegacyMultisig metadata";
let highest_nonce = self.highest_known_nonce().await; let Some(highest_nonce) = self.highest_known_nonce().await
else {
return Ok(None);
};
let Some(quorum_checkpoint) = checkpoint_syncer let Some(quorum_checkpoint) = checkpoint_syncer
.legacy_fetch_checkpoint_in_range( .legacy_fetch_checkpoint_in_range(
validators, validators,
@ -63,6 +66,7 @@ impl MultisigIsmMetadataBuilder for LegacyMultisigMetadataBuilder {
quorum_checkpoint.checkpoint, quorum_checkpoint.checkpoint,
quorum_checkpoint.signatures, quorum_checkpoint.signatures,
None, None,
None,
Some(proof), Some(proof),
))) )))
} }

@ -18,10 +18,11 @@ pub struct MerkleRootMultisigMetadataBuilder(BaseMetadataBuilder);
impl MultisigIsmMetadataBuilder for MerkleRootMultisigMetadataBuilder { impl MultisigIsmMetadataBuilder for MerkleRootMultisigMetadataBuilder {
fn token_layout(&self) -> Vec<MetadataToken> { fn token_layout(&self) -> Vec<MetadataToken> {
vec![ vec![
MetadataToken::CheckpointMailbox, MetadataToken::CheckpointMerkleTree,
MetadataToken::CheckpointIndex, MetadataToken::MerkleIndex,
MetadataToken::MessageId, MetadataToken::MessageId,
MetadataToken::MerkleProof, MetadataToken::MerkleProof,
MetadataToken::CheckpointIndex,
MetadataToken::Signatures, MetadataToken::Signatures,
] ]
} }
@ -34,7 +35,10 @@ impl MultisigIsmMetadataBuilder for MerkleRootMultisigMetadataBuilder {
checkpoint_syncer: &MultisigCheckpointSyncer, checkpoint_syncer: &MultisigCheckpointSyncer,
) -> Result<Option<MultisigMetadata>> { ) -> Result<Option<MultisigMetadata>> {
const CTX: &str = "When fetching MerkleRootMultisig metadata"; const CTX: &str = "When fetching MerkleRootMultisig metadata";
let highest_nonce = self.highest_known_nonce().await; let Some(highest_nonce) = self.highest_known_nonce().await
else {
return Ok(None);
};
let Some(quorum_checkpoint) = checkpoint_syncer let Some(quorum_checkpoint) = checkpoint_syncer
.fetch_checkpoint_in_range(validators, threshold as usize, message.nonce, highest_nonce) .fetch_checkpoint_in_range(validators, threshold as usize, message.nonce, highest_nonce)
.await .await
@ -51,9 +55,15 @@ impl MultisigIsmMetadataBuilder for MerkleRootMultisigMetadataBuilder {
return Ok(None); return Ok(None);
}; };
let merkle_leaf_id = self
.get_merkle_leaf_id_by_message_id(message.id())
.await
.context(CTX)?;
Ok(Some(MultisigMetadata::new( Ok(Some(MultisigMetadata::new(
quorum_checkpoint.checkpoint.checkpoint, quorum_checkpoint.checkpoint.checkpoint,
quorum_checkpoint.signatures, quorum_checkpoint.signatures,
merkle_leaf_id,
Some(quorum_checkpoint.checkpoint.message_id), Some(quorum_checkpoint.checkpoint.message_id),
Some(proof), Some(proof),
))) )))

@ -20,8 +20,9 @@ pub struct MessageIdMultisigMetadataBuilder(BaseMetadataBuilder);
impl MultisigIsmMetadataBuilder for MessageIdMultisigMetadataBuilder { impl MultisigIsmMetadataBuilder for MessageIdMultisigMetadataBuilder {
fn token_layout(&self) -> Vec<MetadataToken> { fn token_layout(&self) -> Vec<MetadataToken> {
vec![ vec![
MetadataToken::CheckpointMailbox, MetadataToken::CheckpointMerkleTree,
MetadataToken::CheckpointRoot, MetadataToken::MerkleRoot,
MetadataToken::MerkleIndex,
MetadataToken::Signatures, MetadataToken::Signatures,
] ]
} }
@ -50,10 +51,15 @@ impl MultisigIsmMetadataBuilder for MessageIdMultisigMetadataBuilder {
); );
return Ok(None); return Ok(None);
} }
let merkle_leaf_id = self
.get_merkle_leaf_id_by_message_id(message.id())
.await
.context(CTX)?;
Ok(Some(MultisigMetadata::new( Ok(Some(MultisigMetadata::new(
quorum_checkpoint.checkpoint.checkpoint, quorum_checkpoint.checkpoint.checkpoint,
quorum_checkpoint.signatures, quorum_checkpoint.signatures,
merkle_leaf_id,
None, None,
None, None,
))) )))

@ -5,22 +5,18 @@ use std::{
time::Duration, time::Duration,
}; };
use async_trait::async_trait;
use derive_new::new; use derive_new::new;
use eyre::Result; use eyre::Result;
use hyperlane_base::{db::HyperlaneRocksDB, CoreMetrics}; use hyperlane_base::{db::HyperlaneRocksDB, CoreMetrics};
use hyperlane_core::{HyperlaneDomain, HyperlaneMessage}; use hyperlane_core::{HyperlaneDomain, HyperlaneMessage};
use prometheus::IntGauge; use prometheus::IntGauge;
use tokio::{ use tokio::sync::mpsc::UnboundedSender;
sync::{mpsc::UnboundedSender, RwLock}, use tracing::{debug, trace};
task::JoinHandle,
};
use tracing::{debug, info_span, instrument, instrument::Instrumented, trace, Instrument};
use super::pending_message::*; use super::pending_message::*;
use crate::{ use crate::msg::pending_operation::DynPendingOperation;
merkle_tree_builder::MerkleTreeBuilder, msg::pending_operation::DynPendingOperation, use crate::{processor::ProcessorExt, settings::matching_list::MatchingList};
settings::matching_list::MatchingList,
};
/// Finds unprocessed messages from an origin and submits then through a channel /// Finds unprocessed messages from an origin and submits then through a channel
/// for to the appropriate destination. /// for to the appropriate destination.
@ -30,7 +26,6 @@ pub struct MessageProcessor {
whitelist: Arc<MatchingList>, whitelist: Arc<MatchingList>,
blacklist: Arc<MatchingList>, blacklist: Arc<MatchingList>,
metrics: MessageProcessorMetrics, metrics: MessageProcessorMetrics,
prover_sync: Arc<RwLock<MerkleTreeBuilder>>,
/// channel for each destination chain to send operations (i.e. message /// channel for each destination chain to send operations (i.e. message
/// submissions) to /// submissions) to
send_channels: HashMap<u32, UnboundedSender<Box<DynPendingOperation>>>, send_channels: HashMap<u32, UnboundedSender<Box<DynPendingOperation>>>,
@ -44,76 +39,26 @@ impl Debug for MessageProcessor {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!( write!(
f, f,
"MessageProcessor {{ whitelist: {:?}, blacklist: {:?}, prover_sync: {:?}, message_nonce: {:?} }}", "MessageProcessor {{ whitelist: {:?}, blacklist: {:?}, message_nonce: {:?} }}",
self.whitelist, self.whitelist, self.blacklist, self.message_nonce
self.blacklist,
self.prover_sync,
self.message_nonce
) )
} }
} }
impl MessageProcessor { #[async_trait]
impl ProcessorExt for MessageProcessor {
/// The domain this processor is getting messages from. /// The domain this processor is getting messages from.
pub fn domain(&self) -> &HyperlaneDomain { fn domain(&self) -> &HyperlaneDomain {
self.db.domain() self.db.domain()
} }
pub fn spawn(self) -> Instrumented<JoinHandle<Result<()>>> { /// One round of processing, extracted from infinite work loop for
let span = info_span!("MessageProcessor"); /// testing purposes.
tokio::spawn(async move { self.main_loop().await }).instrument(span) async fn tick(&mut self) -> Result<()> {
}
#[instrument(ret, err, skip(self), level = "info", fields(domain=%self.domain()))]
async fn main_loop(mut self) -> Result<()> {
// Forever, scan HyperlaneRocksDB looking for new messages to send. When criteria are // Forever, scan HyperlaneRocksDB looking for new messages to send. When criteria are
// satisfied or the message is disqualified, push the message onto // satisfied or the message is disqualified, push the message onto
// self.tx_msg and then continue the scan at the next highest // self.tx_msg and then continue the scan at the next highest
// nonce. // nonce.
loop {
self.tick().await?;
}
}
/// Tries to get the next message to process.
///
/// If no message with self.message_nonce is found, returns None.
/// If the message with self.message_nonce is found and has previously
/// been marked as processed, increments self.message_nonce and returns
/// None.
fn try_get_unprocessed_message(&mut self) -> Result<Option<HyperlaneMessage>> {
loop {
// First, see if we can find the message so we can update the gauge.
if let Some(message) = self.db.retrieve_message_by_nonce(self.message_nonce)? {
// Update the latest nonce gauges
self.metrics
.max_last_known_message_nonce_gauge
.set(message.nonce as i64);
if let Some(metrics) = self.metrics.get(message.destination) {
metrics.set(message.nonce as i64);
}
// If this message has already been processed, on to the next one.
if !self
.db
.retrieve_processed_by_nonce(&self.message_nonce)?
.unwrap_or(false)
{
return Ok(Some(message));
} else {
debug!(nonce=?self.message_nonce, "Message already marked as processed in DB");
self.message_nonce += 1;
}
} else {
trace!(nonce=?self.message_nonce, "No message found in DB for nonce");
return Ok(None);
}
}
}
/// One round of processing, extracted from infinite work loop for
/// testing purposes.
async fn tick(&mut self) -> Result<()> {
// Scan until we find next nonce without delivery confirmation. // Scan until we find next nonce without delivery confirmation.
if let Some(msg) = self.try_get_unprocessed_message()? { if let Some(msg) = self.try_get_unprocessed_message()? {
debug!(?msg, "Processor working on message"); debug!(?msg, "Processor working on message");
@ -147,13 +92,6 @@ impl MessageProcessor {
return Ok(()); return Ok(());
} }
// Feed the message to the prover sync
self.prover_sync
.write()
.await
.update_to_index(msg.nonce)
.await?;
debug!(%msg, "Sending message to submitter"); debug!(%msg, "Sending message to submitter");
// Finally, build the submit arg and dispatch it to the submitter. // Finally, build the submit arg and dispatch it to the submitter.
@ -170,6 +108,38 @@ impl MessageProcessor {
} }
} }
impl MessageProcessor {
fn try_get_unprocessed_message(&mut self) -> Result<Option<HyperlaneMessage>> {
loop {
// First, see if we can find the message so we can update the gauge.
if let Some(message) = self.db.retrieve_message_by_nonce(self.message_nonce)? {
// Update the latest nonce gauges
self.metrics
.max_last_known_message_nonce_gauge
.set(message.nonce as i64);
if let Some(metrics) = self.metrics.get(message.destination) {
metrics.set(message.nonce as i64);
}
// If this message has already been processed, on to the next one.
if !self
.db
.retrieve_processed_by_nonce(&self.message_nonce)?
.unwrap_or(false)
{
return Ok(Some(message));
} else {
debug!(nonce=?self.message_nonce, "Message already marked as processed in DB");
self.message_nonce += 1;
}
} else {
trace!(nonce=?self.message_nonce, "No message found in DB for nonce");
return Ok(None);
}
}
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct MessageProcessorMetrics { pub struct MessageProcessorMetrics {
max_last_known_message_nonce_gauge: IntGauge, max_last_known_message_nonce_gauge: IntGauge,
@ -210,6 +180,16 @@ impl MessageProcessorMetrics {
mod test { mod test {
use std::time::Instant; use std::time::Instant;
use crate::{
merkle_tree::builder::MerkleTreeBuilder,
msg::{
gas_payment::GasPaymentEnforcer, metadata::BaseMetadataBuilder,
pending_operation::PendingOperation,
},
processor::Processor,
};
use super::*;
use hyperlane_base::{ use hyperlane_base::{
db::{test_utils, HyperlaneRocksDB}, db::{test_utils, HyperlaneRocksDB},
settings::{ChainConf, ChainConnectionConf, Settings}, settings::{ChainConf, ChainConnectionConf, Settings},
@ -217,16 +197,13 @@ mod test {
use hyperlane_test::mocks::{MockMailboxContract, MockValidatorAnnounceContract}; use hyperlane_test::mocks::{MockMailboxContract, MockValidatorAnnounceContract};
use prometheus::{IntCounter, Registry}; use prometheus::{IntCounter, Registry};
use tokio::{ use tokio::{
sync::mpsc::{self, UnboundedReceiver}, sync::{
mpsc::{self, UnboundedReceiver},
RwLock,
},
time::sleep, time::sleep,
}; };
use super::*;
use crate::msg::{
gas_payment::GasPaymentEnforcer, metadata::BaseMetadataBuilder,
pending_operation::PendingOperation,
};
fn dummy_processor_metrics(domain_id: u32) -> MessageProcessorMetrics { fn dummy_processor_metrics(domain_id: u32) -> MessageProcessorMetrics {
MessageProcessorMetrics { MessageProcessorMetrics {
max_last_known_message_nonce_gauge: IntGauge::new( max_last_known_message_nonce_gauge: IntGauge::new(
@ -278,6 +255,7 @@ mod test {
Arc::new(MockValidatorAnnounceContract::default()), Arc::new(MockValidatorAnnounceContract::default()),
false, false,
Arc::new(core_metrics), Arc::new(core_metrics),
db.clone(),
5, 5,
) )
} }
@ -307,7 +285,6 @@ mod test {
Default::default(), Default::default(),
Default::default(), Default::default(),
dummy_processor_metrics(origin_domain.id()), dummy_processor_metrics(origin_domain.id()),
Arc::new(RwLock::new(MerkleTreeBuilder::new(db.clone()))),
HashMap::from([(destination_domain.id(), send_channel)]), HashMap::from([(destination_domain.id(), send_channel)]),
HashMap::from([(destination_domain.id(), message_context)]), HashMap::from([(destination_domain.id(), message_context)]),
), ),
@ -373,7 +350,8 @@ mod test {
let (message_processor, mut receive_channel) = let (message_processor, mut receive_channel) =
dummy_message_processor(origin_domain, destination_domain, db); dummy_message_processor(origin_domain, destination_domain, db);
let process_fut = message_processor.spawn(); let processor = Processor::new(Box::new(message_processor));
let process_fut = processor.spawn();
let mut pending_messages = vec![]; let mut pending_messages = vec![];
let pending_message_accumulator = async { let pending_message_accumulator = async {
while let Some(pm) = receive_channel.recv().await { while let Some(pm) = receive_channel.recv().await {

@ -0,0 +1,37 @@
use std::fmt::Debug;
use async_trait::async_trait;
use derive_new::new;
use eyre::Result;
use hyperlane_core::HyperlaneDomain;
use tokio::task::JoinHandle;
use tracing::{info_span, instrument, instrument::Instrumented, Instrument};
#[async_trait]
pub trait ProcessorExt: Send + Debug {
/// The domain this processor is getting messages from.
fn domain(&self) -> &HyperlaneDomain;
/// One round of processing, extracted from infinite work loop for
/// testing purposes.
async fn tick(&mut self) -> Result<()>;
}
#[derive(new)]
pub struct Processor {
ticker: Box<dyn ProcessorExt>,
}
impl Processor {
pub fn spawn(self) -> Instrumented<JoinHandle<Result<()>>> {
let span = info_span!("MessageProcessor");
tokio::spawn(async move { self.main_loop().await }).instrument(span)
}
#[instrument(ret, err, skip(self), level = "info", fields(domain=%self.ticker.domain()))]
async fn main_loop(mut self) -> Result<()> {
loop {
self.ticker.tick().await?;
}
}
}

@ -12,7 +12,7 @@ use hyperlane_base::{
run_all, BaseAgent, ContractSyncMetrics, CoreMetrics, HyperlaneAgentCore, MessageContractSync, run_all, BaseAgent, ContractSyncMetrics, CoreMetrics, HyperlaneAgentCore, MessageContractSync,
WatermarkContractSync, WatermarkContractSync,
}; };
use hyperlane_core::{HyperlaneDomain, InterchainGasPayment, U256}; use hyperlane_core::{HyperlaneDomain, InterchainGasPayment, MerkleTreeInsertion, U256};
use tokio::{ use tokio::{
sync::{ sync::{
mpsc::{self, UnboundedReceiver, UnboundedSender}, mpsc::{self, UnboundedReceiver, UnboundedSender},
@ -22,8 +22,10 @@ use tokio::{
}; };
use tracing::{info, info_span, instrument::Instrumented, Instrument}; use tracing::{info, info_span, instrument::Instrumented, Instrument};
use crate::merkle_tree::processor::{MerkleTreeProcessor, MerkleTreeProcessorMetrics};
use crate::processor::{Processor, ProcessorExt};
use crate::{ use crate::{
merkle_tree_builder::MerkleTreeBuilder, merkle_tree::builder::MerkleTreeBuilder,
msg::{ msg::{
gas_payment::GasPaymentEnforcer, gas_payment::GasPaymentEnforcer,
metadata::BaseMetadataBuilder, metadata::BaseMetadataBuilder,
@ -55,6 +57,8 @@ pub struct Relayer {
/// sent between /// sent between
msg_ctxs: HashMap<ContextKey, Arc<MessageContext>>, msg_ctxs: HashMap<ContextKey, Arc<MessageContext>>,
prover_syncs: HashMap<HyperlaneDomain, Arc<RwLock<MerkleTreeBuilder>>>, prover_syncs: HashMap<HyperlaneDomain, Arc<RwLock<MerkleTreeBuilder>>>,
merkle_tree_hook_syncs:
HashMap<HyperlaneDomain, Arc<WatermarkContractSync<MerkleTreeInsertion>>>,
dbs: HashMap<HyperlaneDomain, HyperlaneRocksDB>, dbs: HashMap<HyperlaneDomain, HyperlaneRocksDB>,
whitelist: Arc<MatchingList>, whitelist: Arc<MatchingList>,
blacklist: Arc<MatchingList>, blacklist: Arc<MatchingList>,
@ -127,6 +131,16 @@ impl BaseAgent for Relayer {
.collect(), .collect(),
) )
.await?; .await?;
let merkle_tree_hook_syncs = settings
.build_merkle_tree_hook_indexers(
settings.origin_chains.iter(),
&metrics,
&contract_sync_metrics,
dbs.iter()
.map(|(d, db)| (d.clone(), Arc::new(db.clone()) as _))
.collect(),
)
.await?;
let whitelist = Arc::new(settings.whitelist); let whitelist = Arc::new(settings.whitelist);
let blacklist = Arc::new(settings.blacklist); let blacklist = Arc::new(settings.blacklist);
@ -184,12 +198,14 @@ impl BaseAgent for Relayer {
}; };
for origin in &settings.origin_chains { for origin in &settings.origin_chains {
let db = dbs.get(origin).unwrap().clone();
let metadata_builder = BaseMetadataBuilder::new( let metadata_builder = BaseMetadataBuilder::new(
destination_chain_setup.clone(), destination_chain_setup.clone(),
prover_syncs[origin].clone(), prover_syncs[origin].clone(),
validator_announces[origin].clone(), validator_announces[origin].clone(),
settings.allow_local_checkpoint_syncers, settings.allow_local_checkpoint_syncers,
core.metrics.clone(), core.metrics.clone(),
db,
5, 5,
); );
@ -219,6 +235,7 @@ impl BaseAgent for Relayer {
message_syncs, message_syncs,
interchain_gas_payment_syncs, interchain_gas_payment_syncs,
prover_syncs, prover_syncs,
merkle_tree_hook_syncs,
whitelist, whitelist,
blacklist, blacklist,
transaction_gas_limit, transaction_gas_limit,
@ -244,11 +261,13 @@ impl BaseAgent for Relayer {
for origin in &self.origin_chains { for origin in &self.origin_chains {
tasks.push(self.run_message_sync(origin).await); tasks.push(self.run_message_sync(origin).await);
tasks.push(self.run_interchain_gas_payment_sync(origin).await); tasks.push(self.run_interchain_gas_payment_sync(origin).await);
tasks.push(self.run_merkle_tree_hook_syncs(origin).await);
} }
// each message process attempts to send messages from a chain // each message process attempts to send messages from a chain
for origin in &self.origin_chains { for origin in &self.origin_chains {
tasks.push(self.run_message_processor(origin, send_channels.clone())); tasks.push(self.run_message_processor(origin, send_channels.clone()));
tasks.push(self.run_merkle_tree_processor(origin));
} }
run_all(tasks) run_all(tasks)
@ -289,6 +308,17 @@ impl Relayer {
.instrument(info_span!("ContractSync")) .instrument(info_span!("ContractSync"))
} }
async fn run_merkle_tree_hook_syncs(
&self,
origin: &HyperlaneDomain,
) -> Instrumented<JoinHandle<eyre::Result<()>>> {
let index_settings = self.as_ref().settings.chains[origin.name()].index.clone();
let contract_sync = self.merkle_tree_hook_syncs.get(origin).unwrap().clone();
let cursor = contract_sync.rate_limited_cursor(index_settings).await;
tokio::spawn(async move { contract_sync.clone().sync("merkle_tree_hook", cursor).await })
.instrument(info_span!("ContractSync"))
}
fn run_message_processor( fn run_message_processor(
&self, &self,
origin: &HyperlaneDomain, origin: &HyperlaneDomain,
@ -319,21 +349,41 @@ impl Relayer {
self.whitelist.clone(), self.whitelist.clone(),
self.blacklist.clone(), self.blacklist.clone(),
metrics, metrics,
self.prover_syncs[origin].clone(),
send_channels, send_channels,
destination_ctxs, destination_ctxs,
); );
let span = info_span!("MessageProcessor", origin=%message_processor.domain()); let span = info_span!("MessageProcessor", origin=%message_processor.domain());
let process_fut = message_processor.spawn(); let processor = Processor::new(Box::new(message_processor));
tokio::spawn(async move { tokio::spawn(async move {
let res = tokio::try_join!(process_fut)?; let res = tokio::try_join!(processor.spawn())?;
info!(?res, "try_join finished for message processor"); info!(?res, "try_join finished for message processor");
Ok(()) Ok(())
}) })
.instrument(span) .instrument(span)
} }
fn run_merkle_tree_processor(
&self,
origin: &HyperlaneDomain,
) -> Instrumented<JoinHandle<Result<()>>> {
let metrics = MerkleTreeProcessorMetrics::new();
let merkle_tree_processor = MerkleTreeProcessor::new(
self.dbs.get(origin).unwrap().clone(),
metrics,
self.prover_syncs[origin].clone(),
);
let span = info_span!("MerkleTreeProcessor", origin=%merkle_tree_processor.domain());
let processor = Processor::new(Box::new(merkle_tree_processor));
tokio::spawn(async move {
let res = tokio::try_join!(processor.spawn())?;
info!(?res, "try_join finished for merkle tree processor");
Ok(())
})
.instrument(span)
}
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
#[tracing::instrument(skip(self, receiver))] #[tracing::instrument(skip(self, receiver))]
fn run_destination_submitter( fn run_destination_submitter(

@ -22,8 +22,7 @@ use serde::{
/// - wildcard "*" /// - wildcard "*"
/// - single value in decimal or hex (must start with `0x`) format /// - single value in decimal or hex (must start with `0x`) format
/// - list of values in decimal or hex format /// - list of values in decimal or hex format
#[derive(Debug, Deserialize, Default, Clone)] #[derive(Debug, Default, Clone)]
#[serde(transparent)]
pub struct MatchingList(Option<Vec<ListElement>>); pub struct MatchingList(Option<Vec<ListElement>>);
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
@ -63,6 +62,55 @@ impl<T: Debug> Display for Filter<T> {
} }
} }
struct MatchingListVisitor;
impl<'de> Visitor<'de> for MatchingListVisitor {
type Value = MatchingList;
fn expecting(&self, fmt: &mut Formatter) -> fmt::Result {
write!(fmt, "an optional list of matching rules")
}
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: Error,
{
Ok(MatchingList(None))
}
fn visit_some<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
let list: Vec<ListElement> = deserializer.deserialize_seq(MatchingListArrayVisitor)?;
Ok(if list.is_empty() {
// this allows for empty matching lists to be treated as if no matching list was set
MatchingList(None)
} else {
MatchingList(Some(list))
})
}
}
struct MatchingListArrayVisitor;
impl<'de> Visitor<'de> for MatchingListArrayVisitor {
type Value = Vec<ListElement>;
fn expecting(&self, fmt: &mut Formatter) -> fmt::Result {
write!(fmt, "a list of matching rules")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let mut rules = seq.size_hint().map(Vec::with_capacity).unwrap_or_default();
while let Some(rule) = seq.next_element::<ListElement>()? {
rules.push(rule);
}
Ok(rules)
}
}
struct FilterVisitor<T>(PhantomData<T>); struct FilterVisitor<T>(PhantomData<T>);
impl<'de> Visitor<'de> for FilterVisitor<u32> { impl<'de> Visitor<'de> for FilterVisitor<u32> {
type Value = Filter<u32>; type Value = Filter<u32>;
@ -145,6 +193,15 @@ impl<'de> Visitor<'de> for FilterVisitor<H256> {
} }
} }
impl<'de> Deserialize<'de> for MatchingList {
fn deserialize<D>(d: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
d.deserialize_option(MatchingListVisitor)
}
}
impl<'de> Deserialize<'de> for Filter<u32> { impl<'de> Deserialize<'de> for Filter<u32> {
fn deserialize<D>(d: D) -> Result<Self, D::Error> fn deserialize<D>(d: D) -> Result<Self, D::Error>
where where
@ -166,13 +223,13 @@ impl<'de> Deserialize<'de> for Filter<H256> {
#[derive(Debug, Deserialize, Clone)] #[derive(Debug, Deserialize, Clone)]
#[serde(tag = "type")] #[serde(tag = "type")]
struct ListElement { struct ListElement {
#[serde(default, rename = "originDomain")] #[serde(default, rename = "origindomain")]
origin_domain: Filter<u32>, origin_domain: Filter<u32>,
#[serde(default, rename = "senderAddress")] #[serde(default, rename = "senderaddress")]
sender_address: Filter<H256>, sender_address: Filter<H256>,
#[serde(default, rename = "destinationDomain")] #[serde(default, rename = "destinationdomain")]
destination_domain: Filter<u32>, destination_domain: Filter<u32>,
#[serde(default, rename = "recipientAddress")] #[serde(default, rename = "recipientaddress")]
recipient_address: Filter<H256>, recipient_address: Filter<H256>,
} }
@ -266,7 +323,7 @@ mod test {
#[test] #[test]
fn basic_config() { fn basic_config() {
let list: MatchingList = serde_json::from_str(r#"[{"originDomain": "*", "senderAddress": "*", "destinationDomain": "*", "recipientAddress": "*"}, {}]"#).unwrap(); let list: MatchingList = serde_json::from_str(r#"[{"origindomain": "*", "senderaddress": "*", "destinationdomain": "*", "recipientaddress": "*"}, {}]"#).unwrap();
assert!(list.0.is_some()); assert!(list.0.is_some());
assert_eq!(list.0.as_ref().unwrap().len(), 2); assert_eq!(list.0.as_ref().unwrap().len(), 2);
let elem = &list.0.as_ref().unwrap()[0]; let elem = &list.0.as_ref().unwrap()[0];
@ -307,7 +364,7 @@ mod test {
#[test] #[test]
fn config_with_address() { fn config_with_address() {
let list: MatchingList = serde_json::from_str(r#"[{"senderAddress": "0x9d4454B023096f34B160D6B654540c56A1F81688", "recipientAddress": "0x9d4454B023096f34B160D6B654540c56A1F81688"}]"#).unwrap(); let list: MatchingList = serde_json::from_str(r#"[{"senderaddress": "0x9d4454B023096f34B160D6B654540c56A1F81688", "recipientaddress": "0x9d4454B023096f34B160D6B654540c56A1F81688"}]"#).unwrap();
assert!(list.0.is_some()); assert!(list.0.is_some());
assert_eq!(list.0.as_ref().unwrap().len(), 1); assert_eq!(list.0.as_ref().unwrap().len(), 1);
let elem = &list.0.as_ref().unwrap()[0]; let elem = &list.0.as_ref().unwrap()[0];
@ -361,7 +418,7 @@ mod test {
#[test] #[test]
fn config_with_multiple_domains() { fn config_with_multiple_domains() {
let whitelist: MatchingList = let whitelist: MatchingList =
serde_json::from_str(r#"[{"destinationDomain": ["13372", "13373"]}]"#).unwrap(); serde_json::from_str(r#"[{"destinationdomain": ["13372", "13373"]}]"#).unwrap();
assert!(whitelist.0.is_some()); assert!(whitelist.0.is_some());
assert_eq!(whitelist.0.as_ref().unwrap().len(), 1); assert_eq!(whitelist.0.as_ref().unwrap().len(), 1);
let elem = &whitelist.0.as_ref().unwrap()[0]; let elem = &whitelist.0.as_ref().unwrap()[0];
@ -371,6 +428,12 @@ mod test {
assert_eq!(elem.sender_address, Wildcard); assert_eq!(elem.sender_address, Wildcard);
} }
#[test]
fn config_with_empty_list_is_none() {
let whitelist: MatchingList = serde_json::from_str(r#"[]"#).unwrap();
assert!(whitelist.0.is_none());
}
#[test] #[test]
fn matches_empty_list() { fn matches_empty_list() {
let info = MatchInfo { let info = MatchInfo {
@ -388,7 +451,7 @@ mod test {
#[test] #[test]
fn supports_base58() { fn supports_base58() {
serde_json::from_str::<MatchingList>( serde_json::from_str::<MatchingList>(
r#"[{"originDomain":1399811151,"senderAddress":"DdTMkk9nuqH5LnD56HLkPiKMV3yB3BNEYSQfgmJHa5i7","destinationDomain":11155111,"recipientAddress":"0x6AD4DEBA8A147d000C09de6465267a9047d1c217"}]"#, r#"[{"origindomain":1399811151,"senderaddress":"DdTMkk9nuqH5LnD56HLkPiKMV3yB3BNEYSQfgmJHa5i7","destinationdomain":11155111,"recipientaddress":"0x6AD4DEBA8A147d000C09de6465267a9047d1c217"}]"#,
).unwrap(); ).unwrap();
} }
} }

@ -6,13 +6,13 @@
use std::{collections::HashSet, path::PathBuf}; use std::{collections::HashSet, path::PathBuf};
use convert_case::Case;
use derive_more::{AsMut, AsRef, Deref, DerefMut}; use derive_more::{AsMut, AsRef, Deref, DerefMut};
use eyre::{eyre, Context}; use eyre::{eyre, Context};
use hyperlane_base::{ use hyperlane_base::{
impl_loadable_from_settings, impl_loadable_from_settings,
settings::{ settings::{
deprecated_parser::DeprecatedRawSettings, parser::{recase_json_value, RawAgentConf, ValueParser},
parser::{RawAgentConf, ValueParser},
Settings, Settings,
}, },
}; };
@ -20,128 +20,11 @@ use hyperlane_core::{cfg_unwrap_all, config::*, HyperlaneDomain, U256};
use itertools::Itertools; use itertools::Itertools;
use serde::Deserialize; use serde::Deserialize;
use serde_json::Value; use serde_json::Value;
use tracing::warn;
use crate::settings::matching_list::MatchingList; use crate::settings::matching_list::MatchingList;
pub mod matching_list; pub mod matching_list;
/// Config for a GasPaymentEnforcementPolicy
#[derive(Debug, Clone, Default)]
pub enum GasPaymentEnforcementPolicy {
/// No requirement - all messages are processed regardless of gas payment
#[default]
None,
/// Messages that have paid a minimum amount will be processed
Minimum { payment: U256 },
/// The required amount of gas on the foreign chain has been paid according
/// to on-chain fee quoting.
OnChainFeeQuoting {
gas_fraction_numerator: u64,
gas_fraction_denominator: u64,
},
}
#[derive(Debug, Deserialize)]
#[serde(tag = "type", rename_all = "camelCase")]
enum RawGasPaymentEnforcementPolicy {
None,
Minimum {
payment: Option<StrOrInt>,
},
OnChainFeeQuoting {
/// Optional fraction of gas which must be paid before attempting to run
/// the transaction. Must be written as `"numerator /
/// denominator"` where both are integers.
#[serde(default = "default_gasfraction")]
gasfraction: String,
},
#[serde(other)]
Unknown,
}
impl FromRawConf<RawGasPaymentEnforcementPolicy> for GasPaymentEnforcementPolicy {
fn from_config_filtered(
raw: RawGasPaymentEnforcementPolicy,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
use RawGasPaymentEnforcementPolicy::*;
match raw {
None => Ok(Self::None),
Minimum { payment } => Ok(Self::Minimum {
payment: payment
.ok_or_else(|| {
eyre!("Missing `payment` for Minimum gas payment enforcement policy")
})
.into_config_result(|| cwp + "payment")?
.try_into()
.into_config_result(|| cwp + "payment")?,
}),
OnChainFeeQuoting { gasfraction } => {
let (numerator, denominator) =
gasfraction
.replace(' ', "")
.split_once('/')
.map(|(a, b)| (a.to_owned(), b.to_owned()))
.ok_or_else(|| eyre!("Invalid `gasfraction` for OnChainFeeQuoting gas payment enforcement policy; expected `numerator / denominator`"))
.into_config_result(|| cwp + "gasfraction")?;
Ok(Self::OnChainFeeQuoting {
gas_fraction_numerator: numerator
.parse()
.into_config_result(|| cwp + "gasfraction")?,
gas_fraction_denominator: denominator
.parse()
.into_config_result(|| cwp + "gasfraction")?,
})
}
Unknown => Err(eyre!("Unknown gas payment enforcement policy"))
.into_config_result(|| cwp.clone()),
}
}
}
/// Config for gas payment enforcement
#[derive(Debug, Clone, Default)]
pub struct GasPaymentEnforcementConf {
/// The gas payment enforcement policy
pub policy: GasPaymentEnforcementPolicy,
/// An optional matching list, any message that matches will use this
/// policy. By default all messages will match.
pub matching_list: MatchingList,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct RawGasPaymentEnforcementConf {
#[serde(flatten)]
policy: Option<RawGasPaymentEnforcementPolicy>,
#[serde(default)]
matching_list: Option<MatchingList>,
}
impl FromRawConf<RawGasPaymentEnforcementConf> for GasPaymentEnforcementConf {
fn from_config_filtered(
raw: RawGasPaymentEnforcementConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
let policy = raw.policy
.ok_or_else(|| eyre!("Missing policy for gas payment enforcement config; required if a matching list is provided"))
.take_err(&mut err, || cwp.clone()).and_then(|r| {
r.parse_config(cwp).take_config_err(&mut err)
});
let matching_list = raw.matching_list.unwrap_or_default();
err.into_result(Self {
policy: policy.unwrap(),
matching_list,
})
}
}
/// Settings for `Relayer` /// Settings for `Relayer`
#[derive(Debug, AsRef, AsMut, Deref, DerefMut)] #[derive(Debug, AsRef, AsMut, Deref, DerefMut)]
pub struct RelayerSettings { pub struct RelayerSettings {
@ -173,48 +56,38 @@ pub struct RelayerSettings {
pub allow_local_checkpoint_syncers: bool, pub allow_local_checkpoint_syncers: bool,
} }
#[derive(Debug, Deserialize, AsMut)] /// Config for gas payment enforcement
#[serde(rename_all = "camelCase")] #[derive(Debug, Clone, Default)]
pub struct DeprecatedRawRelayerSettings { pub struct GasPaymentEnforcementConf {
#[serde(flatten)] /// The gas payment enforcement policy
#[as_mut] pub policy: GasPaymentEnforcementPolicy,
base: DeprecatedRawSettings, /// An optional matching list, any message that matches will use this
/// Database path (path on the fs) /// policy. By default all messages will match.
db: Option<String>, pub matching_list: MatchingList,
// Comma separated list of chains to relay between.
relaychains: Option<String>,
// Comma separated list of origin chains.
#[deprecated(note = "Use `relaychains` instead")]
originchainname: Option<String>,
// Comma separated list of destination chains.
#[deprecated(note = "Use `relaychains` instead")]
destinationchainnames: Option<String>,
/// The gas payment enforcement configuration as JSON. Expects an ordered array of `GasPaymentEnforcementConfig`.
gaspaymentenforcement: Option<String>,
/// This is optional. If no whitelist is provided ALL messages will be considered on the
/// whitelist.
whitelist: Option<String>,
/// This is optional. If no blacklist is provided ALL will be considered to not be on
/// the blacklist.
blacklist: Option<String>,
/// This is optional. If not specified, any amount of gas will be valid, otherwise this
/// is the max allowed gas in wei to relay a transaction.
transactiongaslimit: Option<StrOrInt>,
// TODO: this should be a list of chain names to be consistent
/// Comma separated List of domain ids to skip applying the transaction gas limit to.
skiptransactiongaslimitfor: Option<String>,
/// If true, allows local storage based checkpoint syncers.
/// Not intended for production use. Defaults to false.
#[serde(default)]
allowlocalcheckpointsyncers: bool,
} }
impl_loadable_from_settings!(Relayer, DeprecatedRawRelayerSettings -> RelayerSettings); /// Config for a GasPaymentEnforcementPolicy
#[derive(Debug, Clone, Default)]
pub enum GasPaymentEnforcementPolicy {
/// No requirement - all messages are processed regardless of gas payment
#[default]
None,
/// Messages that have paid a minimum amount will be processed
Minimum { payment: U256 },
/// The required amount of gas on the foreign chain has been paid according
/// to on-chain fee quoting.
OnChainFeeQuoting {
gas_fraction_numerator: u64,
gas_fraction_denominator: u64,
},
}
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(transparent)] #[serde(transparent)]
struct RawRelayerSettings(Value); struct RawRelayerSettings(Value);
impl_loadable_from_settings!(Relayer, RawRelayerSettings -> RelayerSettings);
impl FromRawConf<RawRelayerSettings> for RelayerSettings { impl FromRawConf<RawRelayerSettings> for RelayerSettings {
fn from_config_filtered( fn from_config_filtered(
raw: RawRelayerSettings, raw: RawRelayerSettings,
@ -256,7 +129,7 @@ impl FromRawConf<RawRelayerSettings> for RelayerSettings {
}) => serde_json::from_str::<Value>(policy_str) }) => serde_json::from_str::<Value>(policy_str)
.context("Expected JSON string") .context("Expected JSON string")
.take_err(&mut err, || cwp.clone()) .take_err(&mut err, || cwp.clone())
.map(|v| (cwp, v)), .map(|v| (cwp, recase_json_value(v, Case::Flat))),
Some(ValueParser { Some(ValueParser {
val: value @ Value::Array(_), val: value @ Value::Array(_),
cwp, cwp,
@ -287,7 +160,7 @@ impl FromRawConf<RawRelayerSettings> for RelayerSettings {
.get_opt_key("gasFraction") .get_opt_key("gasFraction")
.parse_string() .parse_string()
.map(|v| v.replace(' ', "")) .map(|v| v.replace(' ', ""))
.unwrap_or_else(|| default_gasfraction().to_owned()); .unwrap_or_else(|| "1/2".to_owned());
let (numerator, denominator) = gas_fraction let (numerator, denominator) = gas_fraction
.split_once('/') .split_once('/')
.ok_or_else(|| eyre!("Invalid `gas_fraction` for OnChainFeeQuoting gas payment enforcement policy; expected `numerator / denominator`")) .ok_or_else(|| eyre!("Invalid `gas_fraction` for OnChainFeeQuoting gas payment enforcement policy; expected `numerator / denominator`"))
@ -394,7 +267,8 @@ fn parse_matching_list(p: ValueParser) -> ConfigResult<MatchingList> {
cwp, cwp,
} => serde_json::from_str::<Value>(matching_list_str) } => serde_json::from_str::<Value>(matching_list_str)
.context("Expected JSON string") .context("Expected JSON string")
.take_err(&mut err, || cwp.clone()), .take_err(&mut err, || cwp.clone())
.map(|v| recase_json_value(v, Case::Flat)),
ValueParser { ValueParser {
val: value @ Value::Array(_), val: value @ Value::Array(_),
.. ..
@ -413,210 +287,3 @@ fn parse_matching_list(p: ValueParser) -> ConfigResult<MatchingList> {
err.into_result(ml) err.into_result(ml)
} }
impl FromRawConf<DeprecatedRawRelayerSettings> for RelayerSettings {
fn from_config_filtered(
raw: DeprecatedRawRelayerSettings,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
let gas_payment_enforcement = raw
.gaspaymentenforcement
.and_then(|j| {
serde_json::from_str::<Vec<RawGasPaymentEnforcementConf>>(&j)
.take_err(&mut err, || cwp + "gaspaymentenforcement")
})
.map(|rv| {
let cwp = cwp + "gaspaymentenforcement";
rv.into_iter()
.enumerate()
.filter_map(|(i, r)| {
r.parse_config(&cwp.join(i.to_string()))
.take_config_err(&mut err)
})
.collect()
})
.unwrap_or_else(|| vec![Default::default()]);
let whitelist = raw
.whitelist
.and_then(|j| {
serde_json::from_str::<MatchingList>(&j).take_err(&mut err, || cwp + "whitelist")
})
.unwrap_or_default();
let blacklist = raw
.blacklist
.and_then(|j| {
serde_json::from_str::<MatchingList>(&j).take_err(&mut err, || cwp + "blacklist")
})
.unwrap_or_default();
let transaction_gas_limit = raw.transactiongaslimit.and_then(|r| {
r.try_into()
.take_err(&mut err, || cwp + "transactiongaslimit")
});
let skip_transaction_gas_limit_for = raw
.skiptransactiongaslimitfor
.and_then(|r| {
r.split(',')
.map(str::parse)
.collect::<Result<_, _>>()
.context("Error parsing domain id")
.take_err(&mut err, || cwp + "skiptransactiongaslimitfor")
})
.unwrap_or_default();
let mut origin_chain_names = {
#[allow(deprecated)]
raw.originchainname
}
.map(parse_chains);
if origin_chain_names.is_some() {
warn!(
path = (cwp + "originchainname").json_name(),
"`originchainname` is deprecated, use `relaychains` instead"
);
}
let mut destination_chain_names = {
#[allow(deprecated)]
raw.destinationchainnames
}
.map(parse_chains);
if destination_chain_names.is_some() {
warn!(
path = (cwp + "destinationchainnames").json_name(),
"`destinationchainnames` is deprecated, use `relaychains` instead"
);
}
if let Some(relay_chain_names) = raw.relaychains.map(parse_chains) {
if origin_chain_names.is_some() {
err.push(
cwp + "originchainname",
eyre!("Cannot use `relaychains` and `originchainname` at the same time"),
);
}
if destination_chain_names.is_some() {
err.push(
cwp + "destinationchainnames",
eyre!("Cannot use `relaychains` and `destinationchainnames` at the same time"),
);
}
if relay_chain_names.len() < 2 {
err.push(
cwp + "relaychains",
eyre!(
"The relayer must be configured with at least two chains to relay between"
),
)
}
origin_chain_names = Some(relay_chain_names.clone());
destination_chain_names = Some(relay_chain_names);
} else if origin_chain_names.is_none() && destination_chain_names.is_none() {
err.push(
cwp + "relaychains",
eyre!("The relayer must be configured with at least two chains to relay between"),
);
} else if origin_chain_names.is_none() {
err.push(
cwp + "originchainname",
eyre!("The relayer must be configured with an origin chain (alternatively use `relaychains`)"),
);
} else if destination_chain_names.is_none() {
err.push(
cwp + "destinationchainnames",
eyre!("The relayer must be configured with at least one destination chain (alternatively use `relaychains`)"),
);
}
let db = raw
.db
.and_then(|r| r.parse().take_err(&mut err, || cwp + "db"))
.unwrap_or_else(|| std::env::current_dir().unwrap().join("hyperlane_db"));
let (Some(origin_chain_names), Some(destination_chain_names)) =
(origin_chain_names, destination_chain_names)
else { return Err(err) };
let chain_filter = origin_chain_names
.iter()
.chain(&destination_chain_names)
.map(String::as_str)
.collect();
let base = raw
.base
.parse_config_with_filter::<Settings>(cwp, Some(&chain_filter))
.take_config_err(&mut err);
let origin_chains = base
.as_ref()
.map(|base| {
origin_chain_names
.iter()
.filter_map(|origin| {
base.lookup_domain(origin)
.context("Missing configuration for an origin chain")
.take_err(&mut err, || cwp + "chains" + origin)
})
.collect()
})
.unwrap_or_default();
// validate all destination chains are present and get their HyperlaneDomain.
let destination_chains: HashSet<_> = base
.as_ref()
.map(|base| {
destination_chain_names
.iter()
.filter_map(|destination| {
base.lookup_domain(destination)
.context("Missing configuration for a destination chain")
.take_err(&mut err, || cwp + "chains" + destination)
})
.collect()
})
.unwrap_or_default();
if let Some(base) = &base {
for domain in &destination_chains {
base.chain_setup(domain)
.unwrap()
.signer
.as_ref()
.ok_or_else(|| eyre!("Signer is required for destination chains"))
.take_err(&mut err, || cwp + "chains" + domain.name() + "signer");
}
}
cfg_unwrap_all!(cwp, err: [base]);
err.into_result(Self {
base,
db,
origin_chains,
destination_chains,
gas_payment_enforcement,
whitelist,
blacklist,
transaction_gas_limit,
skip_transaction_gas_limit_for,
allow_local_checkpoint_syncers: raw.allowlocalcheckpointsyncers,
})
}
}
fn default_gasfraction() -> String {
"1/2".into()
}
fn parse_chains(chains_str: String) -> Vec<String> {
chains_str.split(',').map(str::to_ascii_lowercase).collect()
}

@ -7,17 +7,15 @@
use std::{collections::HashSet, default::Default}; use std::{collections::HashSet, default::Default};
use derive_more::{AsMut, AsRef, Deref, DerefMut}; use derive_more::{AsMut, AsRef, Deref, DerefMut};
use eyre::{eyre, Context}; use eyre::Context;
use hyperlane_base::{ use hyperlane_base::{
impl_loadable_from_settings, impl_loadable_from_settings,
settings::{ settings::{
deprecated_parser::DeprecatedRawSettings,
parser::{RawAgentConf, ValueParser}, parser::{RawAgentConf, ValueParser},
Settings, Settings,
}, },
}; };
use hyperlane_core::{cfg_unwrap_all, config::*, HyperlaneDomain}; use hyperlane_core::{cfg_unwrap_all, config::*, HyperlaneDomain};
use itertools::Itertools;
use serde::Deserialize; use serde::Deserialize;
use serde_json::Value; use serde_json::Value;
@ -34,25 +32,12 @@ pub struct ScraperSettings {
pub chains_to_scrape: Vec<HyperlaneDomain>, pub chains_to_scrape: Vec<HyperlaneDomain>,
} }
/// Raw settings for `Scraper`
#[derive(Debug, Deserialize, AsMut)]
#[serde(rename_all = "camelCase")]
pub struct DeprecatedRawScraperSettings {
#[serde(flatten, default)]
#[as_mut]
base: DeprecatedRawSettings,
/// Database connection string
db: Option<String>,
/// Comma separated list of chains to scrape
chainstoscrape: Option<String>,
}
impl_loadable_from_settings!(Scraper, DeprecatedRawScraperSettings -> ScraperSettings);
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(transparent)] #[serde(transparent)]
struct RawScraperSettings(Value); struct RawScraperSettings(Value);
impl_loadable_from_settings!(Scraper, RawScraperSettings -> ScraperSettings);
impl FromRawConf<RawScraperSettings> for ScraperSettings { impl FromRawConf<RawScraperSettings> for ScraperSettings {
fn from_config_filtered( fn from_config_filtered(
raw: RawScraperSettings, raw: RawScraperSettings,
@ -107,53 +92,3 @@ impl FromRawConf<RawScraperSettings> for ScraperSettings {
}) })
} }
} }
impl FromRawConf<DeprecatedRawScraperSettings> for ScraperSettings {
fn from_config_filtered(
raw: DeprecatedRawScraperSettings,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
let db = raw
.db
.ok_or_else(|| eyre!("Missing `db` connection string"))
.take_err(&mut err, || cwp + "db");
let Some(chains_to_scrape) = raw
.chainstoscrape
.ok_or_else(|| eyre!("Missing `chainstoscrape` list"))
.take_err(&mut err, || cwp + "chainstoscrape")
.map(|s| s.split(',').map(str::to_ascii_lowercase).collect::<Vec<_>>())
else { return Err(err) };
let base = raw
.base
.parse_config_with_filter::<Settings>(
cwp,
Some(&chains_to_scrape.iter().map(String::as_str).collect()),
)
.take_config_err(&mut err);
let chains_to_scrape = base
.as_ref()
.map(|base| {
chains_to_scrape
.iter()
.filter_map(|chain| {
base.lookup_domain(chain)
.context("Missing configuration for a chain in `chainstoscrape`")
.take_err(&mut err, || cwp + "chains" + chain)
})
.collect_vec()
})
.unwrap_or_default();
err.into_result(Self {
base: base.unwrap(),
db: db.unwrap(),
chains_to_scrape,
})
}
}

@ -11,9 +11,6 @@ use eyre::{eyre, Context};
use hyperlane_base::{ use hyperlane_base::{
impl_loadable_from_settings, impl_loadable_from_settings,
settings::{ settings::{
deprecated_parser::{
DeprecatedRawCheckpointSyncerConf, DeprecatedRawSettings, DeprecatedRawSignerConf,
},
parser::{RawAgentConf, RawAgentSignerConf, ValueParser}, parser::{RawAgentConf, RawAgentSignerConf, ValueParser},
CheckpointSyncerConf, Settings, SignerConf, CheckpointSyncerConf, Settings, SignerConf,
}, },
@ -45,34 +42,12 @@ pub struct ValidatorSettings {
pub interval: Duration, pub interval: Duration,
} }
/// Raw settings for `Validator`
#[derive(Debug, Deserialize, AsMut)]
#[serde(rename_all = "camelCase")]
pub struct DeprecatedRawValidatorSettings {
#[serde(flatten, default)]
#[as_mut]
base: DeprecatedRawSettings,
/// Database path (path on the fs)
db: Option<String>,
// Name of the chain to validate message on
originchainname: Option<String>,
/// The validator attestation signer
#[serde(default)]
validator: DeprecatedRawSignerConf,
/// The checkpoint syncer configuration
checkpointsyncer: Option<DeprecatedRawCheckpointSyncerConf>,
/// The reorg_period in blocks
reorgperiod: Option<StrOrInt>,
/// How frequently to check for new checkpoints
interval: Option<StrOrInt>,
}
impl_loadable_from_settings!(Validator, DeprecatedRawValidatorSettings -> ValidatorSettings);
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(transparent)] #[serde(transparent)]
struct RawValidatorSettings(Value); struct RawValidatorSettings(Value);
impl_loadable_from_settings!(Validator, RawValidatorSettings -> ValidatorSettings);
impl FromRawConf<RawValidatorSettings> for ValidatorSettings { impl FromRawConf<RawValidatorSettings> for ValidatorSettings {
fn from_config_filtered( fn from_config_filtered(
raw: RawValidatorSettings, raw: RawValidatorSettings,
@ -151,6 +126,14 @@ impl FromRawConf<RawValidatorSettings> for ValidatorSettings {
cfg_unwrap_all!(cwp, err: [base, origin_chain, validator, checkpoint_syncer]); cfg_unwrap_all!(cwp, err: [base, origin_chain, validator, checkpoint_syncer]);
let mut base: Settings = base;
// If the origin chain is an EVM chain, then we can use the validator as the signer if needed.
if origin_chain.domain_protocol() == HyperlaneDomainProtocol::Ethereum {
if let Some(origin) = base.chains.get_mut(origin_chain.name()) {
origin.signer.get_or_insert_with(|| validator.clone());
}
}
err.into_result(Self { err.into_result(Self {
base, base,
db, db,
@ -210,93 +193,3 @@ fn parse_checkpoint_syncer(syncer: ValueParser) -> ConfigResult<CheckpointSyncer
None => Err(err), None => Err(err),
} }
} }
impl FromRawConf<DeprecatedRawValidatorSettings> for ValidatorSettings {
fn from_config_filtered(
raw: DeprecatedRawValidatorSettings,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
let validator = raw
.validator
.parse_config::<SignerConf>(&cwp.join("validator"))
.take_config_err(&mut err);
let checkpoint_syncer = raw
.checkpointsyncer
.ok_or_else(|| eyre!("Missing `checkpointsyncer`"))
.take_err(&mut err, || cwp + "checkpointsyncer")
.and_then(|r| {
r.parse_config(&cwp.join("checkpointsyncer"))
.take_config_err(&mut err)
});
let reorg_period = raw
.reorgperiod
.ok_or_else(|| eyre!("Missing `reorgperiod`"))
.take_err(&mut err, || cwp + "reorgperiod")
.and_then(|r| r.try_into().take_err(&mut err, || cwp + "reorgperiod"));
let interval = raw
.interval
.and_then(|r| {
r.try_into()
.map(Duration::from_secs)
.take_err(&mut err, || cwp + "interval")
})
.unwrap_or(Duration::from_secs(5));
let Some(origin_chain_name) = raw
.originchainname
.ok_or_else(|| eyre!("Missing `originchainname`"))
.take_err(&mut err, || cwp + "originchainname")
.map(|s| s.to_ascii_lowercase())
else { return Err(err) };
let db = raw
.db
.and_then(|r| r.parse().take_err(&mut err, || cwp + "db"))
.unwrap_or_else(|| {
std::env::current_dir()
.unwrap()
.join(format!("validator_db_{origin_chain_name}"))
});
let base = raw
.base
.parse_config_with_filter::<Settings>(
cwp,
Some(&[origin_chain_name.as_ref()].into_iter().collect()),
)
.take_config_err(&mut err);
let origin_chain = base.as_ref().and_then(|base| {
base.lookup_domain(&origin_chain_name)
.context("Missing configuration for the origin chain")
.take_err(&mut err, || cwp + "chains" + &origin_chain_name)
});
cfg_unwrap_all!(cwp, err: [base, origin_chain, validator, checkpoint_syncer, reorg_period]);
let mut base = base;
if origin_chain.domain_protocol() == HyperlaneDomainProtocol::Ethereum {
// if an EVM chain we can assume the chain signer is the validator signer when not
// specified
if let Some(chain) = base.chains.get_mut(origin_chain.name()) {
chain.signer.get_or_insert_with(|| validator.clone());
}
}
err.into_result(Self {
base,
db,
origin_chain,
validator,
checkpoint_syncer,
reorg_period,
interval,
})
}
}

@ -4,6 +4,7 @@ use std::time::{Duration, Instant};
use std::vec; use std::vec;
use eyre::Result; use eyre::Result;
use hyperlane_core::MerkleTreeHook;
use prometheus::IntGauge; use prometheus::IntGauge;
use tokio::time::sleep; use tokio::time::sleep;
use tracing::instrument; use tracing::instrument;
@ -12,7 +13,7 @@ use tracing::{debug, info};
use hyperlane_base::{db::HyperlaneRocksDB, CheckpointSyncer, CoreMetrics}; use hyperlane_base::{db::HyperlaneRocksDB, CheckpointSyncer, CoreMetrics};
use hyperlane_core::{ use hyperlane_core::{
accumulator::incremental::IncrementalMerkle, Checkpoint, CheckpointWithMessageId, accumulator::incremental::IncrementalMerkle, Checkpoint, CheckpointWithMessageId,
HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneSignerExt, Mailbox, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneSignerExt,
}; };
use hyperlane_ethereum::SingletonSignerHandle; use hyperlane_ethereum::SingletonSignerHandle;
@ -21,7 +22,7 @@ pub(crate) struct ValidatorSubmitter {
interval: Duration, interval: Duration,
reorg_period: Option<NonZeroU64>, reorg_period: Option<NonZeroU64>,
signer: SingletonSignerHandle, signer: SingletonSignerHandle,
mailbox: Arc<dyn Mailbox>, merkle_tree_hook: Arc<dyn MerkleTreeHook>,
checkpoint_syncer: Arc<dyn CheckpointSyncer>, checkpoint_syncer: Arc<dyn CheckpointSyncer>,
message_db: HyperlaneRocksDB, message_db: HyperlaneRocksDB,
metrics: ValidatorSubmitterMetrics, metrics: ValidatorSubmitterMetrics,
@ -31,7 +32,7 @@ impl ValidatorSubmitter {
pub(crate) fn new( pub(crate) fn new(
interval: Duration, interval: Duration,
reorg_period: u64, reorg_period: u64,
mailbox: Arc<dyn Mailbox>, merkle_tree_hook: Arc<dyn MerkleTreeHook>,
signer: SingletonSignerHandle, signer: SingletonSignerHandle,
checkpoint_syncer: Arc<dyn CheckpointSyncer>, checkpoint_syncer: Arc<dyn CheckpointSyncer>,
message_db: HyperlaneRocksDB, message_db: HyperlaneRocksDB,
@ -40,7 +41,7 @@ impl ValidatorSubmitter {
Self { Self {
reorg_period: NonZeroU64::new(reorg_period), reorg_period: NonZeroU64::new(reorg_period),
interval, interval,
mailbox, merkle_tree_hook,
signer, signer,
checkpoint_syncer, checkpoint_syncer,
message_db, message_db,
@ -52,12 +53,12 @@ impl ValidatorSubmitter {
Checkpoint { Checkpoint {
root: tree.root(), root: tree.root(),
index: tree.index(), index: tree.index(),
mailbox_address: self.mailbox.address(), merkle_tree_hook_address: self.merkle_tree_hook.address(),
mailbox_domain: self.mailbox.domain().id(), mailbox_domain: self.merkle_tree_hook.domain().id(),
} }
} }
#[instrument(err, skip(self, tree), fields(domain=%self.mailbox.domain()))] #[instrument(err, skip(self, tree), fields(domain=%self.merkle_tree_hook.domain()))]
pub(crate) async fn checkpoint_submitter( pub(crate) async fn checkpoint_submitter(
self, self,
mut tree: IncrementalMerkle, mut tree: IncrementalMerkle,
@ -72,7 +73,10 @@ impl ValidatorSubmitter {
c c
} else { } else {
// lag by reorg period to match message indexing // lag by reorg period to match message indexing
let latest_checkpoint = self.mailbox.latest_checkpoint(self.reorg_period).await?; let latest_checkpoint = self
.merkle_tree_hook
.latest_checkpoint(self.reorg_period)
.await?;
self.metrics self.metrics
.latest_checkpoint_observed .latest_checkpoint_observed
.set(latest_checkpoint.index as i64); .set(latest_checkpoint.index as i64);
@ -177,7 +181,10 @@ impl ValidatorSubmitter {
loop { loop {
// Check the latest checkpoint // Check the latest checkpoint
let latest_checkpoint = self.mailbox.latest_checkpoint(self.reorg_period).await?; let latest_checkpoint = self
.merkle_tree_hook
.latest_checkpoint(self.reorg_period)
.await?;
self.metrics self.metrics
.legacy_latest_checkpoint_observed .legacy_latest_checkpoint_observed

@ -3,6 +3,7 @@ use std::{num::NonZeroU64, sync::Arc, time::Duration};
use async_trait::async_trait; use async_trait::async_trait;
use derive_more::AsRef; use derive_more::AsRef;
use eyre::Result; use eyre::Result;
use futures_util::future::ready;
use hyperlane_base::{ use hyperlane_base::{
db::{HyperlaneRocksDB, DB}, db::{HyperlaneRocksDB, DB},
run_all, BaseAgent, CheckpointSyncer, ContractSyncMetrics, CoreMetrics, HyperlaneAgentCore, run_all, BaseAgent, CheckpointSyncer, ContractSyncMetrics, CoreMetrics, HyperlaneAgentCore,
@ -10,8 +11,8 @@ use hyperlane_base::{
}; };
use hyperlane_core::{ use hyperlane_core::{
accumulator::incremental::IncrementalMerkle, Announcement, ChainResult, HyperlaneChain, accumulator::incremental::IncrementalMerkle, Announcement, ChainResult, HyperlaneChain,
HyperlaneContract, HyperlaneDomain, HyperlaneSigner, HyperlaneSignerExt, Mailbox, TxOutcome, HyperlaneContract, HyperlaneDomain, HyperlaneSigner, HyperlaneSignerExt, Mailbox,
ValidatorAnnounce, H256, U256, MerkleTreeHook, TxOutcome, ValidatorAnnounce, H256, U256,
}; };
use hyperlane_ethereum::{SingletonSigner, SingletonSignerHandle}; use hyperlane_ethereum::{SingletonSigner, SingletonSignerHandle};
use tokio::{task::JoinHandle, time::sleep}; use tokio::{task::JoinHandle, time::sleep};
@ -31,6 +32,7 @@ pub struct Validator {
db: HyperlaneRocksDB, db: HyperlaneRocksDB,
message_sync: Arc<MessageContractSync>, message_sync: Arc<MessageContractSync>,
mailbox: Arc<dyn Mailbox>, mailbox: Arc<dyn Mailbox>,
merkle_tree_hook: Arc<dyn MerkleTreeHook>,
validator_announce: Arc<dyn ValidatorAnnounce>, validator_announce: Arc<dyn ValidatorAnnounce>,
signer: SingletonSignerHandle, signer: SingletonSignerHandle,
// temporary holder until `run` is called // temporary holder until `run` is called
@ -39,6 +41,7 @@ pub struct Validator {
interval: Duration, interval: Duration,
checkpoint_syncer: Arc<dyn CheckpointSyncer>, checkpoint_syncer: Arc<dyn CheckpointSyncer>,
} }
#[async_trait] #[async_trait]
impl BaseAgent for Validator { impl BaseAgent for Validator {
const AGENT_NAME: &'static str = "validator"; const AGENT_NAME: &'static str = "validator";
@ -62,6 +65,10 @@ impl BaseAgent for Validator {
.build_mailbox(&settings.origin_chain, &metrics) .build_mailbox(&settings.origin_chain, &metrics)
.await?; .await?;
let merkle_tree_hook = settings
.build_merkle_tree_hook(&settings.origin_chain, &metrics)
.await?;
let validator_announce = settings let validator_announce = settings
.build_validator_announce(&settings.origin_chain, &metrics) .build_validator_announce(&settings.origin_chain, &metrics)
.await?; .await?;
@ -83,6 +90,7 @@ impl BaseAgent for Validator {
core, core,
db: msg_db, db: msg_db,
mailbox: mailbox.into(), mailbox: mailbox.into(),
merkle_tree_hook: merkle_tree_hook.into(),
message_sync, message_sync,
validator_announce: validator_announce.into(), validator_announce: validator_announce.into(),
signer, signer,
@ -112,22 +120,26 @@ impl BaseAgent for Validator {
let reorg_period = NonZeroU64::new(self.reorg_period); let reorg_period = NonZeroU64::new(self.reorg_period);
// Ensure that the mailbox has count > 0 before we begin indexing // Ensure that the merkle tree hook has count > 0 before we begin indexing
// messages or submitting checkpoints. // messages or submitting checkpoints.
while self loop {
.mailbox match self.merkle_tree_hook.count(reorg_period).await {
.count(reorg_period) Ok(0) => {
.await info!("Waiting for first message in merkle tree hook");
.expect("Failed to get count of mailbox") sleep(self.interval).await;
== 0 }
{ Ok(_) => {
info!("Waiting for first message to mailbox"); tasks.push(self.run_message_sync().await);
sleep(self.interval).await; for checkpoint_sync_task in self.run_checkpoint_submitters().await {
} tasks.push(checkpoint_sync_task);
}
tasks.push(self.run_message_sync().await); break;
for checkpoint_sync_task in self.run_checkpoint_submitters().await { }
tasks.push(checkpoint_sync_task); _ => {
// Future that immediately resolves
return tokio::spawn(ready(Ok(()))).instrument(info_span!("Validator"));
}
}
} }
run_all(tasks) run_all(tasks)
@ -155,7 +167,7 @@ impl Validator {
let submitter = ValidatorSubmitter::new( let submitter = ValidatorSubmitter::new(
self.interval, self.interval,
self.reorg_period, self.reorg_period,
self.mailbox.clone(), self.merkle_tree_hook.clone(),
self.signer.clone(), self.signer.clone(),
self.checkpoint_syncer.clone(), self.checkpoint_syncer.clone(),
self.db.clone(), self.db.clone(),
@ -165,11 +177,11 @@ impl Validator {
let empty_tree = IncrementalMerkle::default(); let empty_tree = IncrementalMerkle::default();
let reorg_period = NonZeroU64::new(self.reorg_period); let reorg_period = NonZeroU64::new(self.reorg_period);
let tip_tree = self let tip_tree = self
.mailbox .merkle_tree_hook
.tree(reorg_period) .tree(reorg_period)
.await .await
.expect("failed to get mailbox tree"); .expect("failed to get merkle tree");
assert!(tip_tree.count() > 0, "mailbox tree is empty"); assert!(tip_tree.count() > 0, "merkle tree is empty");
let backfill_target = submitter.checkpoint(&tip_tree); let backfill_target = submitter.checkpoint(&tip_tree);
let legacy_submitter = submitter.clone(); let legacy_submitter = submitter.clone();

@ -1,36 +1,4 @@
[ [
{
"anonymous": false,
"inputs": [
{
"indexed": false,
"internalType": "address",
"name": "beneficiary",
"type": "address"
}
],
"name": "BeneficiarySet",
"type": "event"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "uint32",
"name": "remoteDomain",
"type": "uint32"
},
{
"indexed": false,
"internalType": "address",
"name": "gasOracle",
"type": "address"
}
],
"name": "GasOracleSet",
"type": "event"
},
{ {
"anonymous": false, "anonymous": false,
"inputs": [ "inputs": [
@ -62,145 +30,6 @@
"name": "GasPayment", "name": "GasPayment",
"type": "event" "type": "event"
}, },
{
"anonymous": false,
"inputs": [
{
"indexed": false,
"internalType": "uint8",
"name": "version",
"type": "uint8"
}
],
"name": "Initialized",
"type": "event"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "previousOwner",
"type": "address"
},
{
"indexed": true,
"internalType": "address",
"name": "newOwner",
"type": "address"
}
],
"name": "OwnershipTransferred",
"type": "event"
},
{
"inputs": [],
"name": "beneficiary",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "claim",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [],
"name": "deployedBlock",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "",
"type": "uint32"
}
],
"name": "gasOracles",
"outputs": [
{
"internalType": "contract IGasOracle",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "_destinationDomain",
"type": "uint32"
}
],
"name": "getExchangeRateAndGasPrice",
"outputs": [
{
"internalType": "uint128",
"name": "tokenExchangeRate",
"type": "uint128"
},
{
"internalType": "uint128",
"name": "gasPrice",
"type": "uint128"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "_owner",
"type": "address"
},
{
"internalType": "address",
"name": "_beneficiary",
"type": "address"
}
],
"name": "initialize",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [],
"name": "owner",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{ {
"inputs": [ "inputs": [
{ {
@ -229,48 +58,6 @@
"stateMutability": "payable", "stateMutability": "payable",
"type": "function" "type": "function"
}, },
{
"inputs": [
{
"internalType": "bytes",
"name": "metadata",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "message",
"type": "bytes"
}
],
"name": "postDispatch",
"outputs": [],
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "bytes",
"name": "metadata",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "message",
"type": "bytes"
}
],
"name": "quoteDispatch",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{ {
"inputs": [ "inputs": [
{ {
@ -294,63 +81,5 @@
], ],
"stateMutability": "view", "stateMutability": "view",
"type": "function" "type": "function"
},
{
"inputs": [],
"name": "renounceOwnership",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "_beneficiary",
"type": "address"
}
],
"name": "setBeneficiary",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"components": [
{
"internalType": "uint32",
"name": "remoteDomain",
"type": "uint32"
},
{
"internalType": "address",
"name": "gasOracle",
"type": "address"
}
],
"internalType": "struct InterchainGasPaymaster.GasOracleConfig[]",
"name": "_configs",
"type": "tuple[]"
}
],
"name": "setGasOracles",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "newOwner",
"type": "address"
}
],
"name": "transferOwnership",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
} }
] ]

@ -83,12 +83,12 @@
}, },
{ {
"inputs": [], "inputs": [],
"name": "count", "name": "defaultHook",
"outputs": [ "outputs": [
{ {
"internalType": "uint32", "internalType": "contract IPostDispatchHook",
"name": "", "name": "",
"type": "uint32" "type": "address"
} }
], ],
"stateMutability": "view", "stateMutability": "view",
@ -130,17 +130,22 @@
"inputs": [ "inputs": [
{ {
"internalType": "uint32", "internalType": "uint32",
"name": "_destinationDomain", "name": "destinationDomain",
"type": "uint32" "type": "uint32"
}, },
{ {
"internalType": "bytes32", "internalType": "bytes32",
"name": "_recipientAddress", "name": "recipientAddress",
"type": "bytes32" "type": "bytes32"
}, },
{ {
"internalType": "bytes", "internalType": "bytes",
"name": "_messageBody", "name": "body",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "defaultHookMetadata",
"type": "bytes" "type": "bytes"
} }
], ],
@ -148,22 +153,98 @@
"outputs": [ "outputs": [
{ {
"internalType": "bytes32", "internalType": "bytes32",
"name": "", "name": "messageId",
"type": "bytes32"
}
],
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "body",
"type": "bytes"
},
{
"internalType": "contract IPostDispatchHook",
"name": "customHook",
"type": "address"
},
{
"internalType": "bytes",
"name": "customHookMetadata",
"type": "bytes"
}
],
"name": "dispatch",
"outputs": [
{
"internalType": "bytes32",
"name": "messageId",
"type": "bytes32"
}
],
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
}
],
"name": "dispatch",
"outputs": [
{
"internalType": "bytes32",
"name": "messageId",
"type": "bytes32" "type": "bytes32"
} }
], ],
"stateMutability": "nonpayable", "stateMutability": "payable",
"type": "function" "type": "function"
}, },
{ {
"inputs": [], "inputs": [],
"name": "latestCheckpoint", "name": "latestDispatchedId",
"outputs": [ "outputs": [
{ {
"internalType": "bytes32", "internalType": "bytes32",
"name": "", "name": "",
"type": "bytes32" "type": "bytes32"
}, }
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "localDomain",
"outputs": [
{ {
"internalType": "uint32", "internalType": "uint32",
"name": "", "name": "",
@ -175,7 +256,7 @@
}, },
{ {
"inputs": [], "inputs": [],
"name": "localDomain", "name": "nonce",
"outputs": [ "outputs": [
{ {
"internalType": "uint32", "internalType": "uint32",
@ -190,25 +271,88 @@
"inputs": [ "inputs": [
{ {
"internalType": "bytes", "internalType": "bytes",
"name": "_metadata", "name": "metadata",
"type": "bytes" "type": "bytes"
}, },
{ {
"internalType": "bytes", "internalType": "bytes",
"name": "_message", "name": "message",
"type": "bytes" "type": "bytes"
} }
], ],
"name": "process", "name": "process",
"outputs": [], "outputs": [],
"stateMutability": "nonpayable", "stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
}
],
"name": "quoteDispatch",
"outputs": [
{
"internalType": "uint256",
"name": "fee",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "defaultHookMetadata",
"type": "bytes"
}
],
"name": "quoteDispatch",
"outputs": [
{
"internalType": "uint256",
"name": "fee",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function" "type": "function"
}, },
{ {
"inputs": [ "inputs": [
{ {
"internalType": "address", "internalType": "address",
"name": "_recipient", "name": "recipient",
"type": "address" "type": "address"
} }
], ],
@ -216,7 +360,7 @@
"outputs": [ "outputs": [
{ {
"internalType": "contract IInterchainSecurityModule", "internalType": "contract IInterchainSecurityModule",
"name": "", "name": "module",
"type": "address" "type": "address"
} }
], ],
@ -225,12 +369,12 @@
}, },
{ {
"inputs": [], "inputs": [],
"name": "root", "name": "requiredHook",
"outputs": [ "outputs": [
{ {
"internalType": "bytes32", "internalType": "contract IPostDispatchHook",
"name": "", "name": "",
"type": "bytes32" "type": "address"
} }
], ],
"stateMutability": "view", "stateMutability": "view",

@ -10,6 +10,19 @@
"stateMutability": "nonpayable", "stateMutability": "nonpayable",
"type": "constructor" "type": "constructor"
}, },
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "hook",
"type": "address"
}
],
"name": "DefaultHookSet",
"type": "event"
},
{ {
"anonymous": false, "anonymous": false,
"inputs": [ "inputs": [
@ -99,12 +112,6 @@
"name": "OwnershipTransferred", "name": "OwnershipTransferred",
"type": "event" "type": "event"
}, },
{
"anonymous": false,
"inputs": [],
"name": "Paused",
"type": "event"
},
{ {
"anonymous": false, "anonymous": false,
"inputs": [ "inputs": [
@ -145,22 +152,16 @@
}, },
{ {
"anonymous": false, "anonymous": false,
"inputs": [], "inputs": [
"name": "Unpaused",
"type": "event"
},
{
"inputs": [],
"name": "MAX_MESSAGE_BODY_BYTES",
"outputs": [
{ {
"internalType": "uint256", "indexed": true,
"name": "", "internalType": "address",
"type": "uint256" "name": "hook",
"type": "address"
} }
], ],
"stateMutability": "view", "name": "RequiredHookSet",
"type": "function" "type": "event"
}, },
{ {
"inputs": [], "inputs": [],
@ -177,12 +178,12 @@
}, },
{ {
"inputs": [], "inputs": [],
"name": "count", "name": "defaultHook",
"outputs": [ "outputs": [
{ {
"internalType": "uint32", "internalType": "contract IPostDispatchHook",
"name": "", "name": "",
"type": "uint32" "type": "address"
} }
], ],
"stateMutability": "view", "stateMutability": "view",
@ -205,7 +206,7 @@
"inputs": [ "inputs": [
{ {
"internalType": "bytes32", "internalType": "bytes32",
"name": "", "name": "_id",
"type": "bytes32" "type": "bytes32"
} }
], ],
@ -220,6 +221,92 @@
"stateMutability": "view", "stateMutability": "view",
"type": "function" "type": "function"
}, },
{
"inputs": [],
"name": "deployedBlock",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "metadata",
"type": "bytes"
},
{
"internalType": "contract IPostDispatchHook",
"name": "hook",
"type": "address"
}
],
"name": "dispatch",
"outputs": [
{
"internalType": "bytes32",
"name": "",
"type": "bytes32"
}
],
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "hookMetadata",
"type": "bytes"
}
],
"name": "dispatch",
"outputs": [
{
"internalType": "bytes32",
"name": "",
"type": "bytes32"
}
],
"stateMutability": "payable",
"type": "function"
},
{ {
"inputs": [ "inputs": [
{ {
@ -246,7 +333,7 @@
"type": "bytes32" "type": "bytes32"
} }
], ],
"stateMutability": "nonpayable", "stateMutability": "payable",
"type": "function" "type": "function"
}, },
{ {
@ -260,6 +347,16 @@
"internalType": "address", "internalType": "address",
"name": "_defaultIsm", "name": "_defaultIsm",
"type": "address" "type": "address"
},
{
"internalType": "address",
"name": "_defaultHook",
"type": "address"
},
{
"internalType": "address",
"name": "_requiredHook",
"type": "address"
} }
], ],
"name": "initialize", "name": "initialize",
@ -269,12 +366,12 @@
}, },
{ {
"inputs": [], "inputs": [],
"name": "isPaused", "name": "latestDispatchedId",
"outputs": [ "outputs": [
{ {
"internalType": "bool", "internalType": "bytes32",
"name": "", "name": "",
"type": "bool" "type": "bytes32"
} }
], ],
"stateMutability": "view", "stateMutability": "view",
@ -282,13 +379,8 @@
}, },
{ {
"inputs": [], "inputs": [],
"name": "latestCheckpoint", "name": "localDomain",
"outputs": [ "outputs": [
{
"internalType": "bytes32",
"name": "",
"type": "bytes32"
},
{ {
"internalType": "uint32", "internalType": "uint32",
"name": "", "name": "",
@ -300,7 +392,7 @@
}, },
{ {
"inputs": [], "inputs": [],
"name": "localDomain", "name": "nonce",
"outputs": [ "outputs": [
{ {
"internalType": "uint32", "internalType": "uint32",
@ -324,13 +416,6 @@
"stateMutability": "view", "stateMutability": "view",
"type": "function" "type": "function"
}, },
{
"inputs": [],
"name": "pause",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{ {
"inputs": [ "inputs": [
{ {
@ -346,7 +431,147 @@
], ],
"name": "process", "name": "process",
"outputs": [], "outputs": [],
"stateMutability": "nonpayable", "stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "bytes32",
"name": "_id",
"type": "bytes32"
}
],
"name": "processedAt",
"outputs": [
{
"internalType": "uint48",
"name": "",
"type": "uint48"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "bytes32",
"name": "_id",
"type": "bytes32"
}
],
"name": "processor",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "metadata",
"type": "bytes"
},
{
"internalType": "contract IPostDispatchHook",
"name": "hook",
"type": "address"
}
],
"name": "quoteDispatch",
"outputs": [
{
"internalType": "uint256",
"name": "fee",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
}
],
"name": "quoteDispatch",
"outputs": [
{
"internalType": "uint256",
"name": "fee",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "defaultHookMetadata",
"type": "bytes"
}
],
"name": "quoteDispatch",
"outputs": [
{
"internalType": "uint256",
"name": "fee",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function" "type": "function"
}, },
{ {
@ -377,12 +602,12 @@
}, },
{ {
"inputs": [], "inputs": [],
"name": "root", "name": "requiredHook",
"outputs": [ "outputs": [
{ {
"internalType": "bytes32", "internalType": "contract IPostDispatchHook",
"name": "", "name": "",
"type": "bytes32" "type": "address"
} }
], ],
"stateMutability": "view", "stateMutability": "view",
@ -392,11 +617,11 @@
"inputs": [ "inputs": [
{ {
"internalType": "address", "internalType": "address",
"name": "_module", "name": "_hook",
"type": "address" "type": "address"
} }
], ],
"name": "setDefaultIsm", "name": "setDefaultHook",
"outputs": [], "outputs": [],
"stateMutability": "nonpayable", "stateMutability": "nonpayable",
"type": "function" "type": "function"
@ -405,31 +630,37 @@
"inputs": [ "inputs": [
{ {
"internalType": "address", "internalType": "address",
"name": "newOwner", "name": "_module",
"type": "address" "type": "address"
} }
], ],
"name": "transferOwnership", "name": "setDefaultIsm",
"outputs": [], "outputs": [],
"stateMutability": "nonpayable", "stateMutability": "nonpayable",
"type": "function" "type": "function"
}, },
{ {
"inputs": [], "inputs": [
"name": "tree",
"outputs": [
{ {
"internalType": "uint256", "internalType": "address",
"name": "count", "name": "_hook",
"type": "uint256" "type": "address"
} }
], ],
"stateMutability": "view", "name": "setRequiredHook",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function" "type": "function"
}, },
{ {
"inputs": [], "inputs": [
"name": "unpause", {
"internalType": "address",
"name": "newOwner",
"type": "address"
}
],
"name": "transferOwnership",
"outputs": [], "outputs": [],
"stateMutability": "nonpayable", "stateMutability": "nonpayable",
"type": "function" "type": "function"

@ -0,0 +1,156 @@
[
{
"inputs": [
{
"internalType": "address",
"name": "_mailbox",
"type": "address"
}
],
"stateMutability": "nonpayable",
"type": "constructor"
},
{
"anonymous": false,
"inputs": [
{
"indexed": false,
"internalType": "bytes32",
"name": "messageId",
"type": "bytes32"
},
{
"indexed": false,
"internalType": "uint32",
"name": "index",
"type": "uint32"
}
],
"name": "InsertedIntoTree",
"type": "event"
},
{
"inputs": [],
"name": "count",
"outputs": [
{
"internalType": "uint32",
"name": "",
"type": "uint32"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "deployedBlock",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "latestCheckpoint",
"outputs": [
{
"internalType": "bytes32",
"name": "",
"type": "bytes32"
},
{
"internalType": "uint32",
"name": "",
"type": "uint32"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "bytes",
"name": "",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "message",
"type": "bytes"
}
],
"name": "postDispatch",
"outputs": [],
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "bytes",
"name": "",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "",
"type": "bytes"
}
],
"name": "quoteDispatch",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "pure",
"type": "function"
},
{
"inputs": [],
"name": "root",
"outputs": [
{
"internalType": "bytes32",
"name": "",
"type": "bytes32"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "tree",
"outputs": [
{
"components": [
{
"internalType": "bytes32[32]",
"name": "branch",
"type": "bytes32[32]"
},
{
"internalType": "uint256",
"name": "count",
"type": "uint256"
}
],
"internalType": "struct MerkleLib.Tree",
"name": "",
"type": "tuple"
}
],
"stateMutability": "view",
"type": "function"
}
]

@ -1,5 +1,3 @@
use hyperlane_core::config::*;
use serde::Deserialize;
use url::Url; use url::Url;
/// Ethereum connection configuration /// Ethereum connection configuration
@ -26,96 +24,3 @@ pub enum ConnectionConf {
url: Url, url: Url,
}, },
} }
/// Ethereum connection configuration
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RawConnectionConf {
/// The type of connection to use
#[serde(rename = "type")]
connection_type: Option<String>,
/// A single url to connect to
url: Option<String>,
/// A comma separated list of urls to connect to
urls: Option<String>,
}
/// Error type when parsing a connection configuration.
#[derive(Debug, thiserror::Error)]
pub enum ConnectionConfError {
/// Unknown connection type was specified
#[error("Unsupported connection type '{0}'")]
UnsupportedConnectionType(String),
/// The url was not specified
#[error("Missing `url` for connection configuration")]
MissingConnectionUrl,
/// The urls were not specified
#[error("Missing `urls` for connection configuration")]
MissingConnectionUrls,
/// The could not be parsed
#[error("Invalid `url` for connection configuration: `{0}` ({1})")]
InvalidConnectionUrl(String, url::ParseError),
/// One of the urls could not be parsed
#[error("Invalid `urls` list for connection configuration: `{0}` ({1})")]
InvalidConnectionUrls(String, url::ParseError),
/// The url was empty
#[error("The `url` value is empty")]
EmptyUrl,
/// The urls were empty
#[error("The `urls` value is empty")]
EmptyUrls,
}
impl FromRawConf<RawConnectionConf> for ConnectionConf {
fn from_config_filtered(
raw: RawConnectionConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
use ConnectionConfError::*;
let connection_type = raw.connection_type.as_deref().unwrap_or("http");
let urls = (|| -> ConfigResult<Vec<Url>> {
raw.urls
.as_ref()
.ok_or(MissingConnectionUrls)
.into_config_result(|| cwp + "urls")?
.split(',')
.map(|s| s.parse())
.collect::<Result<Vec<_>, _>>()
.map_err(|e| InvalidConnectionUrls(raw.urls.clone().unwrap(), e))
.into_config_result(|| cwp + "urls")
})();
let url = (|| -> ConfigResult<Url> {
raw.url
.as_ref()
.ok_or(MissingConnectionUrl)
.into_config_result(|| cwp + "url")?
.parse()
.map_err(|e| InvalidConnectionUrl(raw.url.clone().unwrap(), e))
.into_config_result(|| cwp + "url")
})();
macro_rules! make_with_urls {
($variant:ident) => {
if let Ok(urls) = urls {
Ok(Self::$variant { urls })
} else if let Ok(url) = url {
Ok(Self::$variant { urls: vec![url] })
} else {
Err(urls.unwrap_err())
}
};
}
match connection_type {
"httpQuorum" => make_with_urls!(HttpQuorum),
"httpFallback" => make_with_urls!(HttpFallback),
"http" => Ok(Self::Http { url: url? }),
"ws" => Ok(Self::Ws { url: url? }),
t => Err(UnsupportedConnectionType(t.into())).into_config_result(|| cwp.join("type")),
}
}
}

@ -97,6 +97,8 @@ where
.query_with_meta() .query_with_meta()
.await?; .await?;
println!("found gas payment events: {:?}", events);
Ok(events Ok(events
.into_iter() .into_iter()
.map(|(log, log_meta)| { .map(|(log, log_meta)| {

@ -12,8 +12,8 @@ use ethers::prelude::{abi, Lazy, Middleware};
pub use self::{ pub use self::{
aggregation_ism::*, ccip_read_ism::*, config::*, config::*, interchain_gas::*, aggregation_ism::*, ccip_read_ism::*, config::*, config::*, interchain_gas::*,
interchain_gas::*, interchain_security_module::*, interchain_security_module::*, mailbox::*, interchain_gas::*, interchain_security_module::*, interchain_security_module::*, mailbox::*,
mailbox::*, multisig_ism::*, provider::*, routing_ism::*, rpc_clients::*, signers::*, mailbox::*, merkle_tree_hook::*, multisig_ism::*, provider::*, routing_ism::*, rpc_clients::*,
singleton_signer::*, trait_builder::*, validator_announce::*, signers::*, singleton_signer::*, trait_builder::*, validator_announce::*,
}; };
#[cfg(not(doctest))] #[cfg(not(doctest))]
@ -38,6 +38,10 @@ mod interchain_gas;
#[cfg(not(doctest))] #[cfg(not(doctest))]
mod interchain_security_module; mod interchain_security_module;
/// Merkle tree hook abi
#[cfg(not(doctest))]
mod merkle_tree_hook;
/// MultisigIsm abi /// MultisigIsm abi
#[cfg(not(doctest))] #[cfg(not(doctest))]
mod multisig_ism; mod multisig_ism;

@ -10,15 +10,14 @@ use async_trait::async_trait;
use ethers::abi::AbiEncode; use ethers::abi::AbiEncode;
use ethers::prelude::Middleware; use ethers::prelude::Middleware;
use ethers_contract::builders::ContractCall; use ethers_contract::builders::ContractCall;
use ethers_core::types::BlockNumber;
use tracing::instrument; use tracing::instrument;
use hyperlane_core::accumulator::incremental::IncrementalMerkle;
use hyperlane_core::accumulator::TREE_DEPTH;
use hyperlane_core::{ use hyperlane_core::{
utils::fmt_bytes, ChainCommunicationError, ChainResult, Checkpoint, ContractLocator, utils::fmt_bytes, ChainCommunicationError, ChainResult, ContractLocator, HyperlaneAbi,
HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneMessage, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneMessage, HyperlaneProtocolError,
HyperlaneProtocolError, HyperlaneProvider, Indexer, LogMeta, Mailbox, RawHyperlaneMessage, HyperlaneProvider, Indexer, LogMeta, Mailbox, RawHyperlaneMessage, SequenceIndexer,
SequenceIndexer, TxCostEstimate, TxOutcome, H160, H256, U256, TxCostEstimate, TxOutcome, H160, H256, U256,
}; };
use crate::contracts::arbitrum_node_interface::ArbitrumNodeInterface; use crate::contracts::arbitrum_node_interface::ArbitrumNodeInterface;
@ -27,9 +26,6 @@ use crate::trait_builder::BuildableWithProvider;
use crate::tx::{fill_tx_gas_params, report_tx}; use crate::tx::{fill_tx_gas_params, report_tx};
use crate::EthereumProvider; use crate::EthereumProvider;
/// derived from `forge inspect Mailbox storage --pretty`
const MERKLE_TREE_CONTRACT_SLOT: u32 = 152;
impl<M> std::fmt::Display for EthereumMailboxInternal<M> impl<M> std::fmt::Display for EthereumMailboxInternal<M>
where where
M: Middleware, M: Middleware,
@ -159,9 +155,7 @@ where
#[instrument(err, skip(self))] #[instrument(err, skip(self))]
async fn sequence_and_tip(&self) -> ChainResult<(Option<u32>, u32)> { async fn sequence_and_tip(&self) -> ChainResult<(Option<u32>, u32)> {
let tip = Indexer::<HyperlaneMessage>::get_finalized_block_number(self).await?; let tip = Indexer::<HyperlaneMessage>::get_finalized_block_number(self).await?;
let base_call = self.contract.count(); let sequence = self.contract.nonce().block(u64::from(tip)).call().await?;
let call_at_tip = base_call.block(u64::from(tip));
let sequence = call_at_tip.call().await?;
Ok((Some(sequence), tip)) Ok((Some(sequence), tip))
} }
} }
@ -307,59 +301,8 @@ where
{ {
#[instrument(skip(self))] #[instrument(skip(self))]
async fn count(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<u32> { async fn count(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<u32> {
let base_call = self.contract.count(); let lag = maybe_lag.map(|v| v.get()).unwrap_or(0).into();
let call_with_lag = if let Some(lag) = maybe_lag { let fixed_block_number: BlockNumber = self
let tip = self
.provider
.get_block_number()
.await
.map_err(ChainCommunicationError::from_other)?
.as_u64();
base_call.block(tip.saturating_sub(lag.get()))
} else {
base_call
};
let count = call_with_lag.call().await?;
Ok(count)
}
#[instrument(skip(self))]
async fn delivered(&self, id: H256) -> ChainResult<bool> {
Ok(self.contract.delivered(id.into()).call().await?)
}
#[instrument(skip(self))]
async fn latest_checkpoint(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> {
let base_call = self.contract.latest_checkpoint();
let call_with_lag = match maybe_lag {
Some(lag) => {
let tip = self
.provider
.get_block_number()
.await
.map_err(ChainCommunicationError::from_other)?
.as_u64();
base_call.block(tip.saturating_sub(lag.get()))
}
None => base_call,
};
let (root, index) = call_with_lag.call().await?;
Ok(Checkpoint {
mailbox_address: self.address(),
mailbox_domain: self.domain.id(),
root: root.into(),
index,
})
}
#[instrument(skip(self))]
#[allow(clippy::needless_range_loop)]
async fn tree(&self, lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> {
let lag = lag.map(|v| v.get()).unwrap_or(0).into();
// use consistent block for all storage slot or view calls to prevent
// race conditions where tree contents change between calls
let fixed_block_number = self
.provider .provider
.get_block_number() .get_block_number()
.await .await
@ -367,51 +310,18 @@ where
.saturating_sub(lag) .saturating_sub(lag)
.into(); .into();
let expected_root = self let nonce = self
.contract
.root()
.block(fixed_block_number)
.call()
.await?
.into();
// TODO: migrate to single contract view call once mailbox is upgraded
// see https://github.com/hyperlane-xyz/hyperlane-monorepo/issues/2250
// let branch = self.contract.branch().block(block_number).call().await;
let mut branch = [H256::zero(); TREE_DEPTH];
for index in 0..TREE_DEPTH {
let slot = U256::from(MERKLE_TREE_CONTRACT_SLOT) + index;
let mut location = [0u8; 32];
slot.to_big_endian(&mut location);
branch[index] = self
.provider
.get_storage_at(
self.contract.address(),
location.into(),
Some(fixed_block_number),
)
.await
.map(Into::into)
.map_err(ChainCommunicationError::from_other)?;
}
let count = self
.contract .contract
.count() .nonce()
.block(fixed_block_number) .block(fixed_block_number)
.call() .call()
.await? as usize; .await?;
Ok(nonce)
let tree = IncrementalMerkle::new(branch, count); }
// validate tree built from storage slot lookups matches expected
// result from root() view call at consistent block
assert_eq!(tree.root(), expected_root);
Ok(tree) #[instrument(skip(self))]
async fn delivered(&self, id: H256) -> ChainResult<bool> {
Ok(self.contract.delivered(id.into()).call().await?)
} }
#[instrument(skip(self))] #[instrument(skip(self))]

@ -0,0 +1,282 @@
#![allow(missing_docs)]
use std::num::NonZeroU64;
use std::ops::RangeInclusive;
use std::sync::Arc;
use async_trait::async_trait;
use ethers::prelude::Middleware;
use ethers_core::types::BlockNumber;
use hyperlane_core::accumulator::incremental::IncrementalMerkle;
use tracing::instrument;
use hyperlane_core::{
ChainCommunicationError, ChainResult, Checkpoint, ContractLocator, HyperlaneChain,
HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexer, LogMeta, MerkleTreeHook,
MerkleTreeInsertion, SequenceIndexer, H256,
};
use crate::contracts::merkle_tree_hook::MerkleTreeHook as MerkleTreeHookContract;
use crate::trait_builder::BuildableWithProvider;
use crate::EthereumProvider;
pub struct MerkleTreeHookBuilder {}
#[async_trait]
impl BuildableWithProvider for MerkleTreeHookBuilder {
type Output = Box<dyn MerkleTreeHook>;
async fn build_with_provider<M: Middleware + 'static>(
&self,
provider: M,
locator: &ContractLocator,
) -> Self::Output {
Box::new(EthereumMerkleTreeHook::new(Arc::new(provider), locator))
}
}
pub struct MerkleTreeHookIndexerBuilder {
pub finality_blocks: u32,
}
#[async_trait]
impl BuildableWithProvider for MerkleTreeHookIndexerBuilder {
type Output = Box<dyn SequenceIndexer<MerkleTreeInsertion>>;
async fn build_with_provider<M: Middleware + 'static>(
&self,
provider: M,
locator: &ContractLocator,
) -> Self::Output {
Box::new(EthereumMerkleTreeHookIndexer::new(
Arc::new(provider),
locator,
self.finality_blocks,
))
}
}
#[derive(Debug)]
/// Struct that retrieves event data for an Ethereum MerkleTreeHook
pub struct EthereumMerkleTreeHookIndexer<M>
where
M: Middleware,
{
contract: Arc<MerkleTreeHookContract<M>>,
provider: Arc<M>,
finality_blocks: u32,
}
impl<M> EthereumMerkleTreeHookIndexer<M>
where
M: Middleware + 'static,
{
/// Create new EthereumMerkleTreeHookIndexer
pub fn new(provider: Arc<M>, locator: &ContractLocator, finality_blocks: u32) -> Self {
Self {
contract: Arc::new(MerkleTreeHookContract::new(
locator.address,
provider.clone(),
)),
provider,
finality_blocks,
}
}
}
#[async_trait]
impl<M> Indexer<MerkleTreeInsertion> for EthereumMerkleTreeHookIndexer<M>
where
M: Middleware + 'static,
{
#[instrument(err, skip(self))]
async fn fetch_logs(
&self,
range: RangeInclusive<u32>,
) -> ChainResult<Vec<(MerkleTreeInsertion, LogMeta)>> {
let events = self
.contract
.inserted_into_tree_filter()
.from_block(*range.start())
.to_block(*range.end())
.query_with_meta()
.await?;
let logs = events
.into_iter()
.map(|(log, log_meta)| {
(
MerkleTreeInsertion::new(log.index, H256::from(log.message_id)),
log_meta.into(),
)
})
.collect();
Ok(logs)
}
#[instrument(level = "debug", err, ret, skip(self))]
async fn get_finalized_block_number(&self) -> ChainResult<u32> {
Ok(self
.provider
.get_block_number()
.await
.map_err(ChainCommunicationError::from_other)?
.as_u32()
.saturating_sub(self.finality_blocks))
}
}
#[async_trait]
impl<M> SequenceIndexer<MerkleTreeInsertion> for EthereumMerkleTreeHookIndexer<M>
where
M: Middleware + 'static,
{
async fn sequence_and_tip(&self) -> ChainResult<(Option<u32>, u32)> {
// The InterchainGasPaymasterIndexerBuilder must return a `SequenceIndexer` type.
// It's fine if only a blanket implementation is provided for EVM chains, since their
// indexing only uses the `Index` trait, which is a supertrait of `SequenceIndexer`.
// TODO: if `SequenceIndexer` turns out to not depend on `Indexer` at all, then the supertrait
// dependency could be removed, even if the builder would still need to return a type that is both
// ``SequenceIndexer` and `Indexer`.
let tip = self.get_finalized_block_number().await?;
Ok((None, tip))
}
}
/// A reference to a Mailbox contract on some Ethereum chain
#[derive(Debug)]
pub struct EthereumMerkleTreeHook<M>
where
M: Middleware,
{
contract: Arc<MerkleTreeHookContract<M>>,
domain: HyperlaneDomain,
provider: Arc<M>,
}
impl<M> EthereumMerkleTreeHook<M>
where
M: Middleware,
{
/// Create a reference to a mailbox at a specific Ethereum address on some
/// chain
pub fn new(provider: Arc<M>, locator: &ContractLocator) -> Self {
Self {
contract: Arc::new(MerkleTreeHookContract::new(
locator.address,
provider.clone(),
)),
domain: locator.domain.clone(),
provider,
}
}
}
impl<M> HyperlaneChain for EthereumMerkleTreeHook<M>
where
M: Middleware + 'static,
{
fn domain(&self) -> &HyperlaneDomain {
&self.domain
}
fn provider(&self) -> Box<dyn HyperlaneProvider> {
Box::new(EthereumProvider::new(
self.provider.clone(),
self.domain.clone(),
))
}
}
impl<M> HyperlaneContract for EthereumMerkleTreeHook<M>
where
M: Middleware + 'static,
{
fn address(&self) -> H256 {
self.contract.address().into()
}
}
#[async_trait]
impl<M> MerkleTreeHook for EthereumMerkleTreeHook<M>
where
M: Middleware + 'static,
{
#[instrument(skip(self))]
async fn latest_checkpoint(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> {
let lag = maybe_lag.map(|v| v.get()).unwrap_or(0).into();
let fixed_block_number: BlockNumber = self
.provider
.get_block_number()
.await
.map_err(ChainCommunicationError::from_other)?
.saturating_sub(lag)
.into();
let (root, index) = self
.contract
.latest_checkpoint()
.block(fixed_block_number)
.call()
.await?;
Ok(Checkpoint {
merkle_tree_hook_address: self.address(),
mailbox_domain: self.domain.id(),
root: root.into(),
index,
})
}
#[instrument(skip(self))]
#[allow(clippy::needless_range_loop)]
async fn tree(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> {
let lag = maybe_lag.map(|v| v.get()).unwrap_or(0).into();
let fixed_block_number: BlockNumber = self
.provider
.get_block_number()
.await
.map_err(ChainCommunicationError::from_other)?
.saturating_sub(lag)
.into();
// TODO: implement From<Tree> for IncrementalMerkle
let raw_tree = self
.contract
.tree()
.block(fixed_block_number)
.call()
.await?;
let branch = raw_tree
.branch
.iter()
.map(|v| v.into())
.collect::<Vec<_>>()
.try_into()
.unwrap();
let tree = IncrementalMerkle::new(branch, raw_tree.count.as_usize());
Ok(tree)
}
#[instrument(skip(self))]
async fn count(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<u32> {
let lag = maybe_lag.map(|v| v.get()).unwrap_or(0).into();
let fixed_block_number: BlockNumber = self
.provider
.get_block_number()
.await
.map_err(ChainCommunicationError::from_other)?
.saturating_sub(lag)
.into();
let count = self
.contract
.count()
.block(fixed_block_number)
.call()
.await?;
Ok(count)
}
}

@ -129,7 +129,7 @@ mod test {
.unwrap() .unwrap()
.into(); .into();
let message = Checkpoint { let message = Checkpoint {
mailbox_address: H256::repeat_byte(2), merkle_tree_hook_address: H256::repeat_byte(2),
mailbox_domain: 5, mailbox_domain: 5,
root: H256::repeat_byte(1), root: H256::repeat_byte(1),
index: 123, index: 123,

@ -125,7 +125,8 @@ pub fn output_domain_hashes() {
/// Outputs signed checkpoint test cases in /vector/signedCheckpoint.json /// Outputs signed checkpoint test cases in /vector/signedCheckpoint.json
#[test] #[test]
pub fn output_signed_checkpoints() { pub fn output_signed_checkpoints() {
let mailbox = H256::from(H160::from_str("0x2222222222222222222222222222222222222222").unwrap()); let merkle_tree_hook_address =
H256::from(H160::from_str("0x2222222222222222222222222222222222222222").unwrap());
let t = async { let t = async {
let signer: Signers = "1111111111111111111111111111111111111111111111111111111111111111" let signer: Signers = "1111111111111111111111111111111111111111111111111111111111111111"
.parse::<ethers::signers::LocalWallet>() .parse::<ethers::signers::LocalWallet>()
@ -138,7 +139,7 @@ pub fn output_signed_checkpoints() {
for i in 1..=3 { for i in 1..=3 {
let signed_checkpoint = signer let signed_checkpoint = signer
.sign(Checkpoint { .sign(Checkpoint {
mailbox_address: mailbox, merkle_tree_hook_address,
mailbox_domain: 1000, mailbox_domain: 1000,
root: H256::repeat_byte(i + 1), root: H256::repeat_byte(i + 1),
index: i as u32, index: i as u32,
@ -147,7 +148,7 @@ pub fn output_signed_checkpoints() {
.expect("!sign_with"); .expect("!sign_with");
test_cases.push(json!({ test_cases.push(json!({
"mailbox": signed_checkpoint.value.mailbox_address, "merkle_tree_hook": signed_checkpoint.value.merkle_tree_hook_address,
"domain": signed_checkpoint.value.mailbox_domain, "domain": signed_checkpoint.value.mailbox_domain,
"root": signed_checkpoint.value.root, "root": signed_checkpoint.value.root,
"index": signed_checkpoint.value.index, "index": signed_checkpoint.value.index,

@ -5,13 +5,12 @@ use std::ops::RangeInclusive;
use async_trait::async_trait; use async_trait::async_trait;
use fuels::prelude::{Bech32ContractId, WalletUnlocked}; use fuels::prelude::{Bech32ContractId, WalletUnlocked};
use hyperlane_core::accumulator::incremental::IncrementalMerkle;
use tracing::instrument; use tracing::instrument;
use hyperlane_core::{ use hyperlane_core::{
utils::fmt_bytes, ChainCommunicationError, ChainResult, Checkpoint, ContractLocator, utils::fmt_bytes, ChainCommunicationError, ChainResult, ContractLocator, HyperlaneAbi,
HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneMessage, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneMessage, HyperlaneProvider,
HyperlaneProvider, Indexer, LogMeta, Mailbox, TxCostEstimate, TxOutcome, H256, U256, Indexer, LogMeta, Mailbox, TxCostEstimate, TxOutcome, H256, U256,
}; };
use crate::{ use crate::{
@ -81,39 +80,11 @@ impl Mailbox for FuelMailbox {
.map_err(ChainCommunicationError::from_other) .map_err(ChainCommunicationError::from_other)
} }
#[instrument(level = "debug", err, ret, skip(self))]
async fn tree(&self, lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> {
todo!()
}
#[instrument(level = "debug", err, ret, skip(self))] #[instrument(level = "debug", err, ret, skip(self))]
async fn delivered(&self, id: H256) -> ChainResult<bool> { async fn delivered(&self, id: H256) -> ChainResult<bool> {
todo!() todo!()
} }
#[instrument(level = "debug", err, ret, skip(self))]
async fn latest_checkpoint(&self, lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> {
assert!(
lag.is_none(),
"Fuel does not support querying point-in-time"
);
let (root, index) = self
.contract
.methods()
.latest_checkpoint()
.simulate()
.await
.map_err(ChainCommunicationError::from_other)?
.value;
Ok(Checkpoint {
mailbox_address: self.address(),
mailbox_domain: self.domain.id(),
root: root.into_h256(),
index,
})
}
#[instrument(err, ret, skip(self))] #[instrument(err, ret, skip(self))]
async fn default_ism(&self) -> ChainResult<H256> { async fn default_ism(&self) -> ChainResult<H256> {
todo!() todo!()

@ -1,5 +1,5 @@
use fuels::{client::FuelClient, prelude::Provider}; use fuels::{client::FuelClient, prelude::Provider};
use hyperlane_core::{config::*, ChainCommunicationError, ChainResult}; use hyperlane_core::{ChainCommunicationError, ChainResult};
use url::Url; use url::Url;
/// Fuel connection configuration /// Fuel connection configuration
@ -9,12 +9,6 @@ pub struct ConnectionConf {
pub url: Url, pub url: Url,
} }
/// Raw fuel connection configuration used for better deserialization errors.
#[derive(Debug, serde::Deserialize)]
pub struct DeprecatedRawConnectionConf {
url: Option<String>,
}
/// An error type when parsing a connection configuration. /// An error type when parsing a connection configuration.
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum ConnectionConfError { pub enum ConnectionConfError {
@ -26,27 +20,6 @@ pub enum ConnectionConfError {
InvalidConnectionUrl(String, url::ParseError), InvalidConnectionUrl(String, url::ParseError),
} }
impl FromRawConf<DeprecatedRawConnectionConf> for ConnectionConf {
fn from_config_filtered(
raw: DeprecatedRawConnectionConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
use ConnectionConfError::*;
match raw {
DeprecatedRawConnectionConf { url: Some(url) } => Ok(Self {
url: url
.parse()
.map_err(|e| InvalidConnectionUrl(url, e))
.into_config_result(|| cwp.join("url"))?,
}),
DeprecatedRawConnectionConf { url: None } => {
Err(MissingConnectionUrl).into_config_result(|| cwp.join("url"))
}
}
}
}
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
#[error(transparent)] #[error(transparent)]
struct FuelNewConnectionError(#[from] anyhow::Error); struct FuelNewConnectionError(#[from] anyhow::Error);

@ -9,6 +9,7 @@ pub(crate) use client::RpcClientWithDebug;
pub use interchain_gas::*; pub use interchain_gas::*;
pub use interchain_security_module::*; pub use interchain_security_module::*;
pub use mailbox::*; pub use mailbox::*;
pub use merkle_tree_hook::*;
pub use provider::*; pub use provider::*;
pub use solana_sdk::signer::keypair::Keypair; pub use solana_sdk::signer::keypair::Keypair;
pub use trait_builder::*; pub use trait_builder::*;
@ -17,6 +18,7 @@ pub use validator_announce::*;
mod interchain_gas; mod interchain_gas;
mod interchain_security_module; mod interchain_security_module;
mod mailbox; mod mailbox;
mod merkle_tree_hook;
mod multisig_ism; mod multisig_ism;
mod provider; mod provider;
mod trait_builder; mod trait_builder;

@ -11,7 +11,7 @@ use hyperlane_core::{
accumulator::incremental::IncrementalMerkle, ChainCommunicationError, ChainResult, Checkpoint, accumulator::incremental::IncrementalMerkle, ChainCommunicationError, ChainResult, Checkpoint,
ContractLocator, Decode as _, Encode as _, HyperlaneAbi, HyperlaneChain, HyperlaneContract, ContractLocator, Decode as _, Encode as _, HyperlaneAbi, HyperlaneChain, HyperlaneContract,
HyperlaneDomain, HyperlaneMessage, HyperlaneProvider, Indexer, LogMeta, Mailbox, HyperlaneDomain, HyperlaneMessage, HyperlaneProvider, Indexer, LogMeta, Mailbox,
SequenceIndexer, TxCostEstimate, TxOutcome, H256, H512, U256, MerkleTreeHook, SequenceIndexer, TxCostEstimate, TxOutcome, H256, H512, U256,
}; };
use hyperlane_sealevel_interchain_security_module_interface::{ use hyperlane_sealevel_interchain_security_module_interface::{
InterchainSecurityModuleInstruction, VerifyInstruction, InterchainSecurityModuleInstruction, VerifyInstruction,
@ -66,11 +66,11 @@ const PROCESS_COMPUTE_UNITS: u32 = 1_400_000;
/// A reference to a Mailbox contract on some Sealevel chain /// A reference to a Mailbox contract on some Sealevel chain
pub struct SealevelMailbox { pub struct SealevelMailbox {
program_id: Pubkey, pub(crate) program_id: Pubkey,
inbox: (Pubkey, u8), inbox: (Pubkey, u8),
outbox: (Pubkey, u8), pub(crate) outbox: (Pubkey, u8),
rpc_client: RpcClient, pub(crate) rpc_client: RpcClient,
domain: HyperlaneDomain, pub(crate) domain: HyperlaneDomain,
payer: Option<Keypair>, payer: Option<Keypair>,
} }
@ -283,11 +283,7 @@ impl std::fmt::Debug for SealevelMailbox {
impl Mailbox for SealevelMailbox { impl Mailbox for SealevelMailbox {
#[instrument(err, ret, skip(self))] #[instrument(err, ret, skip(self))]
async fn count(&self, _maybe_lag: Option<NonZeroU64>) -> ChainResult<u32> { async fn count(&self, _maybe_lag: Option<NonZeroU64>) -> ChainResult<u32> {
let tree = self.tree(_maybe_lag).await?; <Self as MerkleTreeHook>::count(self, _maybe_lag).await
tree.count()
.try_into()
.map_err(ChainCommunicationError::from_other)
} }
#[instrument(err, ret, skip(self))] #[instrument(err, ret, skip(self))]
@ -310,57 +306,6 @@ impl Mailbox for SealevelMailbox {
Ok(account.value.is_some()) Ok(account.value.is_some())
} }
#[instrument(err, ret, skip(self))]
async fn tree(&self, lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> {
assert!(
lag.is_none(),
"Sealevel does not support querying point-in-time"
);
let outbox_account = self
.rpc_client
.get_account_with_commitment(&self.outbox.0, CommitmentConfig::finalized())
.await
.map_err(ChainCommunicationError::from_other)?
.value
.ok_or_else(|| {
ChainCommunicationError::from_other_str("Could not find account data")
})?;
let outbox = OutboxAccount::fetch(&mut outbox_account.data.as_ref())
.map_err(ChainCommunicationError::from_other)?
.into_inner();
Ok(outbox.tree)
}
#[instrument(err, ret, skip(self))]
async fn latest_checkpoint(&self, lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> {
assert!(
lag.is_none(),
"Sealevel does not support querying point-in-time"
);
let tree = self.tree(lag).await?;
let root = tree.root();
let count: u32 = tree
.count()
.try_into()
.map_err(ChainCommunicationError::from_other)?;
let index = count.checked_sub(1).ok_or_else(|| {
ChainCommunicationError::from_contract_error_str(
"Outbox is empty, cannot compute checkpoint",
)
})?;
let checkpoint = Checkpoint {
mailbox_address: self.program_id.to_bytes().into(),
mailbox_domain: self.domain.id(),
root,
index,
};
Ok(checkpoint)
}
#[instrument(err, ret, skip(self))] #[instrument(err, ret, skip(self))]
async fn default_ism(&self) -> ChainResult<H256> { async fn default_ism(&self) -> ChainResult<H256> {
let inbox_account = self let inbox_account = self
@ -690,7 +635,7 @@ impl SequenceIndexer<HyperlaneMessage> for SealevelMailboxIndexer {
async fn sequence_and_tip(&self) -> ChainResult<(Option<u32>, u32)> { async fn sequence_and_tip(&self) -> ChainResult<(Option<u32>, u32)> {
let tip = Indexer::<HyperlaneMessage>::get_finalized_block_number(self as _).await?; let tip = Indexer::<HyperlaneMessage>::get_finalized_block_number(self as _).await?;
// TODO: need to make sure the call and tip are at the same height? // TODO: need to make sure the call and tip are at the same height?
let count = self.mailbox.count(None).await?; let count = Mailbox::count(&self.mailbox, None).await?;
Ok((Some(count), tip)) Ok((Some(count), tip))
} }
} }

@ -0,0 +1,101 @@
use std::{num::NonZeroU64, ops::RangeInclusive};
use async_trait::async_trait;
use derive_new::new;
use hyperlane_core::{
accumulator::incremental::IncrementalMerkle, ChainCommunicationError, ChainResult, Checkpoint,
Indexer, LogMeta, MerkleTreeHook, MerkleTreeInsertion, SequenceIndexer,
};
use hyperlane_sealevel_mailbox::accounts::OutboxAccount;
use solana_sdk::commitment_config::CommitmentConfig;
use tracing::instrument;
use crate::SealevelMailbox;
#[async_trait]
impl MerkleTreeHook for SealevelMailbox {
#[instrument(err, ret, skip(self))]
async fn tree(&self, lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> {
assert!(
lag.is_none(),
"Sealevel does not support querying point-in-time"
);
let outbox_account = self
.rpc_client
.get_account_with_commitment(&self.outbox.0, CommitmentConfig::finalized())
.await
.map_err(ChainCommunicationError::from_other)?
.value
.ok_or_else(|| {
ChainCommunicationError::from_other_str("Could not find account data")
})?;
let outbox = OutboxAccount::fetch(&mut outbox_account.data.as_ref())
.map_err(ChainCommunicationError::from_other)?
.into_inner();
Ok(outbox.tree)
}
#[instrument(err, ret, skip(self))]
async fn latest_checkpoint(&self, lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> {
assert!(
lag.is_none(),
"Sealevel does not support querying point-in-time"
);
let tree = self.tree(lag).await?;
let root = tree.root();
let count: u32 = tree
.count()
.try_into()
.map_err(ChainCommunicationError::from_other)?;
let index = count.checked_sub(1).ok_or_else(|| {
ChainCommunicationError::from_contract_error_str(
"Outbox is empty, cannot compute checkpoint",
)
})?;
let checkpoint = Checkpoint {
merkle_tree_hook_address: self.program_id.to_bytes().into(),
mailbox_domain: self.domain.id(),
root,
index,
};
Ok(checkpoint)
}
#[instrument(err, ret, skip(self))]
async fn count(&self, _maybe_lag: Option<NonZeroU64>) -> ChainResult<u32> {
let tree = self.tree(_maybe_lag).await?;
tree.count()
.try_into()
.map_err(ChainCommunicationError::from_other)
}
}
/// Struct that retrieves event data for a Sealevel merkle tree hook contract
#[derive(Debug, new)]
pub struct SealevelMerkleTreeHookIndexer {}
#[async_trait]
impl Indexer<MerkleTreeInsertion> for SealevelMerkleTreeHookIndexer {
async fn fetch_logs(
&self,
_range: RangeInclusive<u32>,
) -> ChainResult<Vec<(MerkleTreeInsertion, LogMeta)>> {
Ok(vec![])
}
async fn get_finalized_block_number(&self) -> ChainResult<u32> {
Ok(0)
}
}
#[async_trait]
impl SequenceIndexer<MerkleTreeInsertion> for SealevelMerkleTreeHookIndexer {
async fn sequence_and_tip(&self) -> ChainResult<(Option<u32>, u32)> {
Ok((None, 0))
}
}

@ -1,7 +1,4 @@
use hyperlane_core::{ use hyperlane_core::ChainCommunicationError;
config::{ConfigErrResultExt, ConfigPath, ConfigResult, FromRawConf},
ChainCommunicationError,
};
use url::Url; use url::Url;
/// Sealevel connection configuration /// Sealevel connection configuration
@ -11,12 +8,6 @@ pub struct ConnectionConf {
pub url: Url, pub url: Url,
} }
/// Raw Sealevel connection configuration used for better deserialization errors.
#[derive(Debug, serde::Deserialize)]
pub struct DeprecatedRawConnectionConf {
url: Option<String>,
}
/// An error type when parsing a connection configuration. /// An error type when parsing a connection configuration.
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum ConnectionConfError { pub enum ConnectionConfError {
@ -28,27 +19,6 @@ pub enum ConnectionConfError {
InvalidConnectionUrl(String, url::ParseError), InvalidConnectionUrl(String, url::ParseError),
} }
impl FromRawConf<DeprecatedRawConnectionConf> for ConnectionConf {
fn from_config_filtered(
raw: DeprecatedRawConnectionConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
use ConnectionConfError::*;
match raw {
DeprecatedRawConnectionConf { url: Some(url) } => Ok(Self {
url: url
.parse()
.map_err(|e| InvalidConnectionUrl(url, e))
.into_config_result(|| cwp.join("url"))?,
}),
DeprecatedRawConnectionConf { url: None } => {
Err(MissingConnectionUrl).into_config_result(|| cwp.join("url"))
}
}
}
}
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
#[error(transparent)] #[error(transparent)]
struct SealevelNewConnectionError(#[from] anyhow::Error); struct SealevelNewConnectionError(#[from] anyhow::Error);

@ -2,18 +2,21 @@
"chains": { "chains": {
"sealeveltest1": { "sealeveltest1": {
"name": "sealeveltest1", "name": "sealeveltest1",
"domain": 13375, "chainId": 13375,
"addresses": { "domainId": 13375,
"mailbox": "692KZJaoe2KRcD6uhCQDLLXnLNA5ZLnfvdqjE4aX9iu1", "mailbox": "692KZJaoe2KRcD6uhCQDLLXnLNA5ZLnfvdqjE4aX9iu1",
"interchainGasPaymaster": "DrFtxirPPsfdY4HQiNZj2A9o4Ux7JaL3gELANgAoihhp", "interchainGasPaymaster": "DrFtxirPPsfdY4HQiNZj2A9o4Ux7JaL3gELANgAoihhp",
"validatorAnnounce": "DH43ae1LwemXAboWwSh8zc9pG8j72gKUEXNi57w8fEnn" "validatorAnnounce": "DH43ae1LwemXAboWwSh8zc9pG8j72gKUEXNi57w8fEnn",
},
"protocol": "sealevel", "protocol": "sealevel",
"finalityBlocks": 0, "blocks": {
"connection": { "reorgPeriod": 0,
"type": "http", "confirmations": 0
"url": "http://localhost:8899"
}, },
"rpcUrls": [
{
"http": "http://localhost:8899"
}
],
"index": { "index": {
"from": 1, "from": 1,
"mode": "sequence" "mode": "sequence"
@ -21,18 +24,21 @@
}, },
"sealeveltest2": { "sealeveltest2": {
"name": "sealeveltest2", "name": "sealeveltest2",
"domain": 13376, "chainId": 13376,
"addresses": { "domainId": 13376,
"mailbox": "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj", "mailbox": "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj",
"interchainGasPaymaster": "G5rGigZBL8NmxCaukK2CAKr9Jq4SUfAhsjzeri7GUraK", "interchainGasPaymaster": "G5rGigZBL8NmxCaukK2CAKr9Jq4SUfAhsjzeri7GUraK",
"validatorAnnounce": "3Uo5j2Bti9aZtrDqJmAyuwiFaJFPFoNL5yxTpVCNcUhb" "validatorAnnounce": "3Uo5j2Bti9aZtrDqJmAyuwiFaJFPFoNL5yxTpVCNcUhb",
},
"protocol": "sealevel", "protocol": "sealevel",
"finalityBlocks": 0, "blocks": {
"connection": { "reorgPeriod": 0,
"type": "http", "confirmations": 0
"url": "http://localhost:8899"
}, },
"rpcUrls": [
{
"http": "http://localhost:8899"
}
],
"index": { "index": {
"from": 1, "from": 1,
"mode": "sequence" "mode": "sequence"

@ -73,30 +73,30 @@ The name of the ClusterSecretStore/SecretStore
{{/* {{/*
Recursively converts a config object into environment variables than can Recursively converts a config object into environment variables than can
be parsed by rust. For example, a config of { foo: { bar: { baz: 420 }, boo: 421 } } will be parsed by rust. For example, a config of { foo: { bar: { baz: 420 }, booGo: 421 } } will
be: HYP_FOO_BAR_BAZ=420 and HYP_FOO_BOO=421 be: HYP_FOO_BAR_BAZ=420 and HYP_FOO_BOOGO=421
Env vars can be formatted in FOO="BAR" format if .format is "dot_env", Env vars can be formatted in FOO="BAR" format if .format is "dot_env",
FOO: "BAR" format if .format is "config_map", or otherwise FOO: "BAR" format if .format is "config_map", or otherwise
they will be formatted as spec YAML-friendly environment variables they will be formatted as spec YAML-friendly environment variables
*/}} */}}
{{- define "agent-common.config-env-vars" -}} {{- define "agent-common.config-env-vars" -}}
{{- range $key, $value := .config }} {{- range $key_or_idx, $value := .config }}
{{- $key_name := printf "%s%s" (default "" $.key_name_prefix) $key }} {{- $key_name := printf "%s%v" (default "" $.key_name_prefix) $key_or_idx }}
{{- if typeIs "map[string]interface {}" $value }} {{- if or (typeIs "map[string]interface {}" $value) (typeIs "[]interface {}" $value) }}
{{- include "agent-common.config-env-vars" (dict "config" $value "agent_name" $.agent_name "format" $.format "key_name_prefix" (printf "%s_" $key_name)) }} {{- include "agent-common.config-env-vars" (dict "config" $value "format" $.format "key_name_prefix" (printf "%s_" $key_name)) }}
{{- else }} {{- else }}
{{- include "agent-common.config-env-var" (dict "agent_name" $.agent_name "key" $key_name "value" $value "format" $.format ) }} {{- include "agent-common.config-env-var" (dict "key" $key_name "value" $value "format" $.format ) }}
{{- end }} {{- end }}
{{- end }} {{- end }}
{{- end }} {{- end }}
{{- define "agent-common.config-env-var" }} {{- define "agent-common.config-env-var" }}
{{- if (eq .format "dot_env") }} {{- if (eq .format "dot_env") }}
HYP_{{ .agent_name | upper }}_{{ .key | upper }}={{ .value | quote }} HYP_{{ .key | upper }}={{ .value | quote }}
{{- else if (eq .format "config_map") }} {{- else if (eq .format "config_map") }}
HYP_{{ .agent_name | upper }}_{{ .key | upper }}: {{ .value | quote }} HYP_{{ .key | upper }}: {{ .value | quote }}
{{- else }} {{- else }}
- name: HYP_{{ .agent_name | upper }}_{{ .key | upper }} - name: HYP_{{ .key | upper }}
value: {{ .value | quote }} value: {{ .value | quote }}
{{- end }} {{- end }}
{{- end }} {{- end }}

@ -1,44 +0,0 @@
# Hyperlane-Agent Helm Chart
![Version: 0.1.0](https://img.shields.io/badge/Version-0.1.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 0.1.0](https://img.shields.io/badge/AppVersion-0.1.0-informational?style=flat-square)
A Helm Chart that encapsulates the deployment of the Hyperlane Rust Agent(s). It is currently designed to be deployed against a Google Kubernetes Engine cluster, but specification of another PVC Storage Class should be sufficient to make it compatible with other cloud providers.
Additional documentation is present in comments in `yalues.yaml`.
## Values
| Key | Type | Default | Description |
| -------------------------------------- | ------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| affinity | object | `{}` | |
| fullnameOverride | string | `""` | |
| image.pullPolicy | string | `"Always"` | |
| image.repository | string | `"gcr.io/clabs-optics/optics-agent"` | Main repository for Hyperlane Agent binaries, provided by cLabs |
| image.tag | string | `"latest"` | Overrides the image tag whose default is the chart appVersion. |
| imagePullSecrets | list | `[]` | |
| nameOverride | string | `""` | |
| nodeSelector | object | `{}` | |
| hyperlane | object | `{"outboxChain":{"address":null,"connectionType":null,"connectionUrl":null,"domain":null,"name":"goerli","protocol":null},enabled":false,"messageInterval":null,"signers":[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}]},"processor":{"enabled":false,"pollingInterval":null,"signers":[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}]},"relayer":{"enabled":false,"pollingInterval":null,"signers":[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}]},"inboxChains":[{"address":null,"connectionType":null,"connectionUrl":null,"domain":null,"name":"alfajores","protocol":null}],"runEnv":"default","validator":{"signer":"","enabled":false,"pollingInterval":null,"signers":[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}],"updatePause":null}}` | Hyperlane Overrides By Default, Hyperlane Agents load the config baked into the Docker Image Pass values here in order to override the values in the config Note: For successful operation, one _must_ pass signer keys as they are not baked into the image for security reasons. |
| hyperlane.outboxChain.address | string | `nil` | The contract address for the home contract |
| hyperlane.outboxChain.connectionUrl | string | `nil` | Connection string pointing to an RPC endpoint for the home chain |
| hyperlane.outboxChain.domain | string | `nil` | The hard-coded domain corresponding to this blockchain |
| hyperlane.outboxChain.protocol | string | `nil` | RPC Style |
| hyperlane.relayer.enabled | bool | `false` | Enables or disables the relayer |
| hyperlane.inboxChains | list | `[{"address":null,"connectionType":null,"connectionUrl":null,"domain":null,"name":"alfajores","protocol":null}]` | Replica chain overrides, a sequence |
| hyperlane.inboxChains[0].address | string | `nil` | The contract address for the replica contract |
| hyperlane.inboxChains[0].connectionUrl | string | `nil` | Connection string pointing to an RPC endpoint for the replica chain |
| hyperlane.validator.signer | string | `""` | Specialized key used by validator and watcher used to sign attestations, separate from validator.keys |
| hyperlane.validator.enabled | bool | `false` | Enables or disables the validator |
| hyperlane.validator.pollingInterval | string | `nil` | How long to wait between checking for updates |
| hyperlane.validator.signers | list | `[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}]` | Trnsaction Signing keys for home and replica(s) |
| podAnnotations | object | `{}` | |
| podSecurityContext | object | `{}` | |
| replicaCount | int | `1` | |
| resources | object | `{}` | |
| securityContext | object | `{}` | |
| tolerations | list | `[]` | |
| volumeStorageClass | string | `"standard"` | Default to standard storageclass provided by GKE |
---
Autogenerated from chart metadata using [helm-docs v1.5.0](https://github.com/norwoodj/helm-docs/releases/v1.5.0)

@ -7,10 +7,10 @@ metadata:
data: data:
ONELINE_BACKTRACES: "true" ONELINE_BACKTRACES: "true"
RUST_BACKTRACE: {{ .Values.hyperlane.rustBacktrace }} RUST_BACKTRACE: {{ .Values.hyperlane.rustBacktrace }}
HYP_BASE_DB: {{ .Values.hyperlane.dbPath }} HYP_DB: {{ .Values.hyperlane.dbPath }}
HYP_BASE_TRACING_FMT: {{ .Values.hyperlane.tracing.format }} HYP_LOG_FORMAT: {{ .Values.hyperlane.tracing.format }}
HYP_BASE_TRACING_LEVEL: {{ .Values.hyperlane.tracing.level }} HYP_LOG_LEVEL: {{ .Values.hyperlane.tracing.level }}
{{- range .Values.hyperlane.chains }} {{- range .Values.hyperlane.chains }}
{{- include "agent-common.config-env-vars" (dict "config" . "agent_name" "base" "key_name_prefix" (printf "CHAINS_%s_" (.name | upper)) "format" "config_map") | indent 2 }} {{- include "agent-common.config-env-vars" (dict "config" . "key_name_prefix" (printf "chains_%s_" .name) "format" "config_map") | indent 2 }}
{{- end }} {{- end }}
HYP_BASE_METRICS: {{ .Values.hyperlane.metrics.port | quote }} HYP_METRICSPORT: {{ .Values.hyperlane.metrics.port | quote }}

@ -27,11 +27,7 @@ spec:
*/}} */}}
{{- range .Values.hyperlane.chains }} {{- range .Values.hyperlane.chains }}
{{- if not .disabled }} {{- if not .disabled }}
{{- if or (eq .connection.type "httpQuorum") (eq .connection.type "httpFallback") }} HYP_BASE_CHAINS_{{ .name | upper }}_CUSTOMRPCURLS: {{ printf "'{{ .%s_rpcs | fromJson | join \",\" }}'" .name }}
HYP_BASE_CHAINS_{{ .name | upper }}_CONNECTION_URLS: {{ printf "'{{ .%s_rpcs | fromJson | join \",\" }}'" .name }}
{{- else }}
HYP_BASE_CHAINS_{{ .name | upper }}_CONNECTION_URL: {{ printf "'{{ .%s_rpc | toString }}'" .name }}
{{- end }}
{{- end }} {{- end }}
{{- end }} {{- end }}
data: data:
@ -41,14 +37,8 @@ spec:
*/}} */}}
{{- range .Values.hyperlane.chains }} {{- range .Values.hyperlane.chains }}
{{- if not .disabled }} {{- if not .disabled }}
{{- if or (eq .connection.type "httpQuorum") (eq .connection.type "httpFallback") }}
- secretKey: {{ printf "%s_rpcs" .name }} - secretKey: {{ printf "%s_rpcs" .name }}
remoteRef: remoteRef:
key: {{ printf "%s-rpc-endpoints-%s" $.Values.hyperlane.runEnv .name }} key: {{ printf "%s-rpc-endpoints-%s" $.Values.hyperlane.runEnv .name }}
{{- else }}
- secretKey: {{ printf "%s_rpc" .name }}
remoteRef:
key: {{ printf "%s-rpc-endpoint-%s" $.Values.hyperlane.runEnv .name }}
{{- end }}
{{- end }} {{- end }}
{{- end }} {{- end }}

@ -23,13 +23,13 @@ spec:
data: data:
{{- range .Values.hyperlane.relayerChains }} {{- range .Values.hyperlane.relayerChains }}
{{- if eq .signer.type "hexKey" }} {{- if eq .signer.type "hexKey" }}
HYP_BASE_CHAINS_{{ .name | upper }}_SIGNER_KEY: {{ printf "'{{ .%s_signer_key | toString }}'" .name }} HYP_CHAINS_{{ .name | upper }}_SIGNER_KEY: {{ printf "'{{ .%s_signer_key | toString }}'" .name }}
{{- end }} {{- end }}
{{- end }} {{- if and (eq .signer.type "aws") $.Values.hyperlane.relayer.aws }}
{{- if .Values.hyperlane.relayer.aws }}
AWS_ACCESS_KEY_ID: {{ print "'{{ .aws_access_key_id | toString }}'" }} AWS_ACCESS_KEY_ID: {{ print "'{{ .aws_access_key_id | toString }}'" }}
AWS_SECRET_ACCESS_KEY: {{ print "'{{ .aws_secret_access_key | toString }}'" }} AWS_SECRET_ACCESS_KEY: {{ print "'{{ .aws_secret_access_key | toString }}'" }}
{{- end }} {{- end }}
{{- end }}
data: data:
{{- range .Values.hyperlane.relayerChains }} {{- range .Values.hyperlane.relayerChains }}
{{- if eq .signer.type "hexKey" }} {{- if eq .signer.type "hexKey" }}

@ -55,14 +55,7 @@ spec:
- secretRef: - secretRef:
name: {{ include "agent-common.fullname" . }}-relayer-secret name: {{ include "agent-common.fullname" . }}-relayer-secret
env: env:
{{- include "agent-common.config-env-vars" (dict "config" .Values.hyperlane.relayer.config "agent_name" "relayer") | indent 10 }} {{- include "agent-common.config-env-vars" (dict "config" .Values.hyperlane.relayer.config) | nindent 10 }}
{{- $relayerChainNames := list }}
{{- range .Values.hyperlane.relayerChains }}
{{- include "agent-common.config-env-vars" (dict "config" .signer "agent_name" "base" "key_name_prefix" (printf "CHAINS_%s_SIGNER_" (.name | upper))) | indent 10 }}
{{- $relayerChainNames = append $relayerChainNames .name }}
{{- end }}
- name: HYP_BASE_RELAYCHAINS
value: {{ $relayerChainNames | join "," }}
resources: resources:
{{- toYaml .Values.hyperlane.relayer.resources | nindent 10 }} {{- toYaml .Values.hyperlane.relayer.resources | nindent 10 }}
volumeMounts: volumeMounts:

@ -21,7 +21,7 @@ spec:
labels: labels:
{{- include "agent-common.labels" . | nindent 10 }} {{- include "agent-common.labels" . | nindent 10 }}
data: data:
HYP_BASE_DB: {{ print "'{{ .db | toString }}'" }} HYP_DB: {{ print "'{{ .db | toString }}'" }}
data: data:
- secretKey: db - secretKey: db
remoteRef: remoteRef:

@ -55,14 +55,7 @@ spec:
- secretRef: - secretRef:
name: {{ include "agent-common.fullname" . }}-scraper3-secret name: {{ include "agent-common.fullname" . }}-scraper3-secret
env: env:
{{- $scraperChainNames := list }} {{- include "agent-common.config-env-vars" (dict "config" .Values.hyperlane.scraper.config) | nindent 8 }}
{{- range .Values.hyperlane.chains }}
{{- if not .disabled }}
{{- $scraperChainNames = append $scraperChainNames .name }}
{{- end }}
{{- end }}
- name: HYP_SCRAPER_CHAINSTOSCRAPE
value: {{ $scraperChainNames | join "," }}
resources: resources:
{{- toYaml .Values.hyperlane.scraper.resources | nindent 10 }} {{- toYaml .Values.hyperlane.scraper.resources | nindent 10 }}
ports: ports:

@ -6,10 +6,8 @@ metadata:
labels: labels:
{{- include "agent-common.labels" . | nindent 4 }} {{- include "agent-common.labels" . | nindent 4 }}
data: data:
{{ $index := 0 }} {{- range $index, $config := .Values.hyperlane.validator.configs }}
{{- range .Values.hyperlane.validator.configs }}
validator-{{ $index }}.env: | validator-{{ $index }}.env: |
{{- include "agent-common.config-env-vars" (dict "config" . "agent_name" "validator" "format" "dot_env") | indent 4 }} {{- include "agent-common.config-env-vars" (dict "config" $config "format" "dot_env") | nindent 4 }}
{{ $index = add1 $index }}
{{- end }} {{- end }}
{{- end }} {{- end }}

@ -24,18 +24,18 @@ spec:
{{ $index := 0 }} {{ $index := 0 }}
{{- range .Values.hyperlane.validator.configs }} {{- range .Values.hyperlane.validator.configs }}
validator-{{ $index }}.env: | validator-{{ $index }}.env: |
{{- if eq .validator.type "hexKey" }} {{- if eq .validator.type "hexKey" }}
HYP_VALIDATOR_VALIDATOR_KEY={{ printf "'{{ .signer_key_%d | toString }}'" $index }} HYP_VALIDATOR_KEY={{ printf "'{{ .signer_key_%d | toString }}'" $index }}
HYP_BASE_CHAINS_{{ .originChainName | upper }}_SIGNER_KEY={{ printf "'{{ .signer_key_%d | toString }}'" $index }} HYP_CHAINS_{{ .originChainName | upper }}_SIGNER_KEY={{ printf "'{{ .signer_key_%d | toString }}'" $index }}
{{- end }} {{- end }}
{{- if or (eq .checkpointSyncer.type "s3") $.Values.hyperlane.aws }} {{- if or (eq .checkpointSyncer.type "s3") $.Values.hyperlane.aws }}
AWS_ACCESS_KEY_ID={{ printf "'{{ .aws_access_key_id_%d | toString }}'" $index }} AWS_ACCESS_KEY_ID={{ printf "'{{ .aws_access_key_id_%d | toString }}'" $index }}
AWS_SECRET_ACCESS_KEY={{ printf "'{{ .aws_secret_access_key_%d | toString }}'" $index }} AWS_SECRET_ACCESS_KEY={{ printf "'{{ .aws_secret_access_key_%d | toString }}'" $index }}
{{- end }} {{- end }}
{{ $index = add1 $index }} {{ $index = add1 $index }}
{{- end }} {{- end }}
data: data:
{{ $index := 0 }} {{ $index = 0 }}
{{- range .Values.hyperlane.validator.configs }} {{- range .Values.hyperlane.validator.configs }}
{{- if eq .validator.type "hexKey" }} {{- if eq .validator.type "hexKey" }}
- secretKey: signer_key_{{ $index }} - secretKey: signer_key_{{ $index }}

@ -47,19 +47,33 @@ hyperlane:
aws: # true | false aws: # true | false
# -- Chain overrides, a sequence # -- Chain overrides, a sequence
# This should mirror @hyperlane-xyz/sdk AgentChainMetadata
chains: chains:
- name: 'alfajores' - name: examplechain
disabled: false disabled: false
rpcConsensusType: fallback
signer: signer:
# aws: type: # aws
addresses: index:
mailbox: from:
multisigIsm: chunk:
interchainGasPaymaster: mode:
domain: mailbox:
protocol: # "ethereum" multisigIsm:
connection: interchainGasPaymaster:
type: # "http" interchainSecurityModule:
protocol: ethereum
chainId:
domainId:
customRpcUrls:
- example:
url: https://example.com
priority: 1
blocks:
confirmations:
reorgPeriod:
estimatedBlockTime:
isTestnet: false
# Hyperlane Agent Roles # Hyperlane Agent Roles
# Individually Switchable via <role>.enabled # Individually Switchable via <role>.enabled
@ -81,7 +95,6 @@ hyperlane:
# -- How long to wait between checking for updates # -- How long to wait between checking for updates
configs: [] configs: []
# - interval: # - interval:
# reorgPeriod:
# checkpointSyncers: # checkpointSyncers:
# originChainName: # originChainName:
# type: # "hexKey" # type: # "hexKey"
@ -103,6 +116,7 @@ hyperlane:
cpu: 500m cpu: 500m
memory: 256Mi memory: 256Mi
config: config:
relayChains: ''
multisigCheckpointSyncer: multisigCheckpointSyncer:
checkpointSyncers: checkpointSyncers:
# -- Specify whether a default signer key is used for all chains in Values.hyperlane.relayerChains list. # -- Specify whether a default signer key is used for all chains in Values.hyperlane.relayerChains list.
@ -130,6 +144,7 @@ hyperlane:
cpu: 250m cpu: 250m
memory: 256Mi memory: 256Mi
config: config:
chainsToScrape: ''
kathy: kathy:
enabled: false enabled: false

@ -10,7 +10,7 @@ use async_trait::async_trait;
use derive_new::new; use derive_new::new;
use eyre::Result; use eyre::Result;
use tokio::time::sleep; use tokio::time::sleep;
use tracing::{debug, warn}; use tracing::{debug, info, warn};
use hyperlane_core::{ use hyperlane_core::{
ChainCommunicationError, ChainResult, ContractSyncCursor, CursorAction, HyperlaneMessage, ChainCommunicationError, ChainResult, ContractSyncCursor, CursorAction, HyperlaneMessage,
@ -225,11 +225,11 @@ impl ForwardMessageSyncCursor {
.retrieve_dispatched_block_number(self.cursor.sync_state.next_sequence) .retrieve_dispatched_block_number(self.cursor.sync_state.next_sequence)
.await .await
{ {
debug!(next_block = block_number, "Fast forwarding next block"); info!(next_block = block_number, "Fast forwarding next block");
// It's possible that eth_getLogs dropped logs from this block, therefore we cannot do block_number + 1. // It's possible that eth_getLogs dropped logs from this block, therefore we cannot do block_number + 1.
self.cursor.sync_state.next_block = block_number; self.cursor.sync_state.next_block = block_number;
} }
debug!( info!(
next_nonce = self.cursor.sync_state.next_sequence + 1, next_nonce = self.cursor.sync_state.next_sequence + 1,
"Fast forwarding next nonce" "Fast forwarding next nonce"
); );

@ -10,7 +10,7 @@ use tracing::{debug, instrument, trace};
use hyperlane_core::{ use hyperlane_core::{
GasPaymentKey, HyperlaneDomain, HyperlaneLogStore, HyperlaneMessage, HyperlaneMessageStore, GasPaymentKey, HyperlaneDomain, HyperlaneLogStore, HyperlaneMessage, HyperlaneMessageStore,
HyperlaneWatermarkedLogStore, InterchainGasExpenditure, InterchainGasPayment, HyperlaneWatermarkedLogStore, InterchainGasExpenditure, InterchainGasPayment,
InterchainGasPaymentMeta, LogMeta, H256, InterchainGasPaymentMeta, LogMeta, MerkleTreeInsertion, H256,
}; };
use super::{ use super::{
@ -30,6 +30,8 @@ const GAS_PAYMENT_META_PROCESSED: &str = "gas_payment_meta_processed_v3_";
const GAS_EXPENDITURE_FOR_MESSAGE_ID: &str = "gas_expenditure_for_message_id_v2_"; const GAS_EXPENDITURE_FOR_MESSAGE_ID: &str = "gas_expenditure_for_message_id_v2_";
const PENDING_MESSAGE_RETRY_COUNT_FOR_MESSAGE_ID: &str = const PENDING_MESSAGE_RETRY_COUNT_FOR_MESSAGE_ID: &str =
"pending_message_retry_count_for_message_id_"; "pending_message_retry_count_for_message_id_";
const MERKLE_TREE_INSERTION: &str = "merkle_tree_insertion_";
const MERKLE_LEAF_INDEX_BY_MESSAGE_ID: &str = "merkle_leaf_index_by_message_id_";
const LATEST_INDEXED_GAS_PAYMENT_BLOCK: &str = "latest_indexed_gas_payment_block"; const LATEST_INDEXED_GAS_PAYMENT_BLOCK: &str = "latest_indexed_gas_payment_block";
type DbResult<T> = std::result::Result<T, DbError>; type DbResult<T> = std::result::Result<T, DbError>;
@ -152,6 +154,22 @@ impl HyperlaneRocksDB {
Ok(true) Ok(true)
} }
/// Store the merkle tree insertion event, and also store a mapping from message_id to leaf_index
pub fn process_tree_insertion(&self, insertion: &MerkleTreeInsertion) -> DbResult<bool> {
if let Ok(Some(_)) = self.retrieve_merkle_tree_insertion_by_leaf_index(&insertion.index()) {
debug!(insertion=?insertion, "Tree insertion already stored in db");
return Ok(false);
}
// even if double insertions are ok, store the leaf by `leaf_index` (guaranteed to be unique)
// rather than by `message_id` (not guaranteed to be recurring), so that leaves can be retrieved
// based on insertion order.
self.store_merkle_tree_insertion_by_leaf_index(&insertion.index(), insertion)?;
self.store_merkle_leaf_index_by_message_id(&insertion.message_id(), &insertion.index())?;
// Return true to indicate the tree insertion was processed
Ok(true)
}
/// Processes the gas expenditure and store the total expenditure for the /// Processes the gas expenditure and store the total expenditure for the
/// message. /// message.
pub fn process_gas_expenditure(&self, expenditure: InterchainGasExpenditure) -> DbResult<()> { pub fn process_gas_expenditure(&self, expenditure: InterchainGasExpenditure) -> DbResult<()> {
@ -253,6 +271,21 @@ impl HyperlaneLogStore<InterchainGasPayment> for HyperlaneRocksDB {
} }
} }
#[async_trait]
impl HyperlaneLogStore<MerkleTreeInsertion> for HyperlaneRocksDB {
/// Store every tree insertion event
#[instrument(skip_all)]
async fn store_logs(&self, leaves: &[(MerkleTreeInsertion, LogMeta)]) -> Result<u32> {
let mut insertions = 0;
for (insertion, _meta) in leaves {
if self.process_tree_insertion(insertion)? {
insertions += 1;
}
}
Ok(insertions)
}
}
#[async_trait] #[async_trait]
impl HyperlaneMessageStore for HyperlaneRocksDB { impl HyperlaneMessageStore for HyperlaneRocksDB {
/// Gets a message by nonce. /// Gets a message by nonce.
@ -327,3 +360,17 @@ make_store_and_retrieve!(
H256, H256,
u32 u32
); );
make_store_and_retrieve!(
pub,
merkle_tree_insertion_by_leaf_index,
MERKLE_TREE_INSERTION,
u32,
MerkleTreeInsertion
);
make_store_and_retrieve!(
pub,
merkle_leaf_index_by_message_id,
MERKLE_LEAF_INDEX_BY_MESSAGE_ID,
H256,
u32
);

@ -5,7 +5,7 @@ use futures_util::future::try_join_all;
use hyperlane_core::{ use hyperlane_core::{
Delivery, HyperlaneChain, HyperlaneDomain, HyperlaneMessageStore, HyperlaneProvider, Delivery, HyperlaneChain, HyperlaneDomain, HyperlaneMessageStore, HyperlaneProvider,
HyperlaneWatermarkedLogStore, InterchainGasPaymaster, InterchainGasPayment, Mailbox, HyperlaneWatermarkedLogStore, InterchainGasPaymaster, InterchainGasPayment, Mailbox,
MultisigIsm, ValidatorAnnounce, H256, MerkleTreeHook, MerkleTreeInsertion, MultisigIsm, ValidatorAnnounce, H256,
}; };
use crate::{ use crate::{
@ -179,9 +179,11 @@ macro_rules! build_indexer_fns {
impl Settings { impl Settings {
build_contract_fns!(build_interchain_gas_paymaster, build_interchain_gas_paymasters -> dyn InterchainGasPaymaster); build_contract_fns!(build_interchain_gas_paymaster, build_interchain_gas_paymasters -> dyn InterchainGasPaymaster);
build_contract_fns!(build_mailbox, build_mailboxes -> dyn Mailbox); build_contract_fns!(build_mailbox, build_mailboxes -> dyn Mailbox);
build_contract_fns!(build_merkle_tree_hook, build_merkle_tree_hooks -> dyn MerkleTreeHook);
build_contract_fns!(build_validator_announce, build_validator_announces -> dyn ValidatorAnnounce); build_contract_fns!(build_validator_announce, build_validator_announces -> dyn ValidatorAnnounce);
build_contract_fns!(build_provider, build_providers -> dyn HyperlaneProvider); build_contract_fns!(build_provider, build_providers -> dyn HyperlaneProvider);
build_indexer_fns!(build_delivery_indexer, build_delivery_indexers -> dyn HyperlaneWatermarkedLogStore<Delivery>, WatermarkContractSync<Delivery>); build_indexer_fns!(build_delivery_indexer, build_delivery_indexers -> dyn HyperlaneWatermarkedLogStore<Delivery>, WatermarkContractSync<Delivery>);
build_indexer_fns!(build_message_indexer, build_message_indexers -> dyn HyperlaneMessageStore, MessageContractSync); build_indexer_fns!(build_message_indexer, build_message_indexers -> dyn HyperlaneMessageStore, MessageContractSync);
build_indexer_fns!(build_interchain_gas_payment_indexer, build_interchain_gas_payment_indexers -> dyn HyperlaneWatermarkedLogStore<InterchainGasPayment>, WatermarkContractSync<InterchainGasPayment>); build_indexer_fns!(build_interchain_gas_payment_indexer, build_interchain_gas_payment_indexers -> dyn HyperlaneWatermarkedLogStore<InterchainGasPayment>, WatermarkContractSync<InterchainGasPayment>);
build_indexer_fns!(build_merkle_tree_hook_indexer, build_merkle_tree_hook_indexers -> dyn HyperlaneWatermarkedLogStore<MerkleTreeInsertion>, WatermarkContractSync<MerkleTreeInsertion>);
} }

@ -8,8 +8,9 @@ use eyre::{eyre, Context, Result};
use hyperlane_core::{ use hyperlane_core::{
AggregationIsm, CcipReadIsm, ContractLocator, HyperlaneAbi, HyperlaneDomain, AggregationIsm, CcipReadIsm, ContractLocator, HyperlaneAbi, HyperlaneDomain,
HyperlaneDomainProtocol, HyperlaneMessage, HyperlaneProvider, HyperlaneSigner, IndexMode, HyperlaneDomainProtocol, HyperlaneMessage, HyperlaneProvider, HyperlaneSigner, IndexMode,
InterchainGasPaymaster, InterchainGasPayment, InterchainSecurityModule, Mailbox, MultisigIsm, InterchainGasPaymaster, InterchainGasPayment, InterchainSecurityModule, Mailbox,
RoutingIsm, SequenceIndexer, ValidatorAnnounce, H256, MerkleTreeHook, MerkleTreeInsertion, MultisigIsm, RoutingIsm, SequenceIndexer,
ValidatorAnnounce, H256,
}; };
use hyperlane_ethereum::{ use hyperlane_ethereum::{
self as h_eth, BuildableWithProvider, EthereumInterchainGasPaymasterAbi, EthereumMailboxAbi, self as h_eth, BuildableWithProvider, EthereumInterchainGasPaymasterAbi, EthereumMailboxAbi,
@ -76,6 +77,8 @@ pub struct CoreContractAddresses {
pub interchain_gas_paymaster: H256, pub interchain_gas_paymaster: H256,
/// Address of the ValidatorAnnounce contract /// Address of the ValidatorAnnounce contract
pub validator_announce: H256, pub validator_announce: H256,
/// Address of the MerkleTreeHook contract
pub merkle_tree_hook: Option<H256>,
} }
/// Indexing settings /// Indexing settings
@ -115,7 +118,7 @@ impl ChainConf {
/// Try to convert the chain setting into a Mailbox contract /// Try to convert the chain setting into a Mailbox contract
pub async fn build_mailbox(&self, metrics: &CoreMetrics) -> Result<Box<dyn Mailbox>> { pub async fn build_mailbox(&self, metrics: &CoreMetrics) -> Result<Box<dyn Mailbox>> {
let ctx = "Building provider"; let ctx = "Building mailbox";
let locator = self.locator(self.addresses.mailbox); let locator = self.locator(self.addresses.mailbox);
match &self.connection { match &self.connection {
@ -140,6 +143,37 @@ impl ChainConf {
.context(ctx) .context(ctx)
} }
/// Try to convert the chain setting into a Merkle Tree Hook contract
pub async fn build_merkle_tree_hook(
&self,
metrics: &CoreMetrics,
) -> Result<Box<dyn MerkleTreeHook>> {
let ctx = "Building merkle tree hook";
// TODO: if the merkle tree hook is set for sealevel, it's still a mailbox program
// that the connection is made to using the pda seeds, which will not be usable.
let address = self
.addresses
.merkle_tree_hook
.unwrap_or(self.addresses.mailbox);
let locator = self.locator(address);
match &self.connection {
ChainConnectionConf::Ethereum(conf) => {
self.build_ethereum(conf, &locator, metrics, h_eth::MerkleTreeHookBuilder {})
.await
}
ChainConnectionConf::Fuel(_conf) => {
todo!("Fuel does not support merkle tree hooks yet")
}
ChainConnectionConf::Sealevel(conf) => {
h_sealevel::SealevelMailbox::new(conf, locator, None)
.map(|m| Box::new(m) as Box<dyn MerkleTreeHook>)
.map_err(Into::into)
}
}
.context(ctx)
}
/// Try to convert the chain settings into a message indexer /// Try to convert the chain settings into a message indexer
pub async fn build_message_indexer( pub async fn build_message_indexer(
&self, &self,
@ -264,6 +298,39 @@ impl ChainConf {
.context(ctx) .context(ctx)
} }
/// Try to convert the chain settings into a merkle tree hook indexer
pub async fn build_merkle_tree_hook_indexer(
&self,
metrics: &CoreMetrics,
) -> Result<Box<dyn SequenceIndexer<MerkleTreeInsertion>>> {
let ctx = "Building merkle tree hook indexer";
let address = self
.addresses
.merkle_tree_hook
.unwrap_or(self.addresses.mailbox);
let locator = self.locator(address);
match &self.connection {
ChainConnectionConf::Ethereum(conf) => {
self.build_ethereum(
conf,
&locator,
metrics,
h_eth::MerkleTreeHookIndexerBuilder {
finality_blocks: self.finality_blocks,
},
)
.await
}
ChainConnectionConf::Fuel(_) => todo!(),
ChainConnectionConf::Sealevel(_) => {
let indexer = Box::new(h_sealevel::SealevelMerkleTreeHookIndexer::new());
Ok(indexer as Box<dyn SequenceIndexer<MerkleTreeInsertion>>)
}
}
.context(ctx)
}
/// Try to convert the chain settings into a ValidatorAnnounce /// Try to convert the chain settings into a ValidatorAnnounce
pub async fn build_validator_announce( pub async fn build_validator_announce(
&self, &self,
@ -493,6 +560,13 @@ impl ChainConf {
self.addresses.interchain_gas_paymaster, self.addresses.interchain_gas_paymaster,
EthereumInterchainGasPaymasterAbi::fn_map_owned(), EthereumInterchainGasPaymasterAbi::fn_map_owned(),
); );
if let Some(address) = self.addresses.merkle_tree_hook {
register_contract(
"merkle_tree_hook",
address,
EthereumInterchainGasPaymasterAbi::fn_map_owned(),
);
}
cfg cfg
} }

@ -1,435 +0,0 @@
//! This module is responsible for parsing the agent's settings using the old config format.
// TODO: Remove this module once we have finished migrating to the new format.
use std::{
collections::{HashMap, HashSet},
path::PathBuf,
};
use ethers_prometheus::middleware::PrometheusMiddlewareConf;
use eyre::{eyre, Context};
use hyperlane_core::{cfg_unwrap_all, config::*, utils::hex_or_base58_to_h256, HyperlaneDomain};
use serde::Deserialize;
use super::envs::*;
use crate::settings::{
chains::IndexSettings, trace::TracingConfig, ChainConf, ChainConnectionConf,
CheckpointSyncerConf, CoreContractAddresses, Settings, SignerConf,
};
/// Raw base settings.
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DeprecatedRawSettings {
chains: Option<HashMap<String, DeprecatedRawChainConf>>,
defaultsigner: Option<DeprecatedRawSignerConf>,
metrics: Option<StrOrInt>,
tracing: Option<TracingConfig>,
}
impl FromRawConf<DeprecatedRawSettings, Option<&HashSet<&str>>> for Settings {
fn from_config_filtered(
raw: DeprecatedRawSettings,
cwp: &ConfigPath,
filter: Option<&HashSet<&str>>,
) -> Result<Self, ConfigParsingError> {
let mut err = ConfigParsingError::default();
let chains: HashMap<String, ChainConf> = if let Some(mut chains) = raw.chains {
let default_signer: Option<SignerConf> = raw.defaultsigner.and_then(|r| {
r.parse_config(&cwp.join("defaultsigner"))
.take_config_err(&mut err)
});
if let Some(filter) = filter {
chains.retain(|k, _| filter.contains(&k.as_str()));
}
let chains_path = cwp + "chains";
chains
.into_iter()
.map(|(k, v)| {
let cwp = &chains_path + &k;
let k = k.to_ascii_lowercase();
let mut parsed: ChainConf = v.parse_config(&cwp)?;
if let Some(default_signer) = &default_signer {
parsed.signer.get_or_insert_with(|| default_signer.clone());
}
Ok((k, parsed))
})
.filter_map(|res| match res {
Ok((k, v)) => Some((k, v)),
Err(e) => {
err.merge(e);
None
}
})
.collect()
} else {
Default::default()
};
let tracing = raw.tracing.unwrap_or_default();
let metrics = raw
.metrics
.and_then(|port| port.try_into().take_err(&mut err, || cwp + "metrics"))
.unwrap_or(9090);
err.into_result(Self {
chains,
metrics_port: metrics,
tracing,
})
}
}
#[derive(Deserialize, Debug)]
#[serde(tag = "protocol", content = "connection", rename_all = "camelCase")]
enum DeprecatedRawChainConnectionConf {
Ethereum(h_eth::RawConnectionConf),
Fuel(h_fuel::DeprecatedRawConnectionConf),
Sealevel(h_sealevel::DeprecatedRawConnectionConf),
#[serde(other)]
Unknown,
}
impl FromRawConf<DeprecatedRawChainConnectionConf> for ChainConnectionConf {
fn from_config_filtered(
raw: DeprecatedRawChainConnectionConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
use DeprecatedRawChainConnectionConf::*;
match raw {
Ethereum(r) => Ok(Self::Ethereum(r.parse_config(&cwp.join("connection"))?)),
Fuel(r) => Ok(Self::Fuel(r.parse_config(&cwp.join("connection"))?)),
Sealevel(r) => Ok(Self::Sealevel(r.parse_config(&cwp.join("connection"))?)),
Unknown => {
Err(eyre!("Unknown chain protocol")).into_config_result(|| cwp.join("protocol"))
}
}
}
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct DeprecatedRawCoreContractAddresses {
mailbox: Option<String>,
interchain_gas_paymaster: Option<String>,
validator_announce: Option<String>,
}
impl FromRawConf<DeprecatedRawCoreContractAddresses> for CoreContractAddresses {
fn from_config_filtered(
raw: DeprecatedRawCoreContractAddresses,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
macro_rules! parse_addr {
($name:ident) => {
let $name = raw
.$name
.ok_or_else(|| {
eyre!(
"Missing {} core contract address",
stringify!($name).replace('_', " ")
)
})
.take_err(&mut err, || cwp + stringify!($name))
.and_then(|v| {
hex_or_base58_to_h256(&v).take_err(&mut err, || cwp + stringify!($name))
});
};
}
parse_addr!(mailbox);
parse_addr!(interchain_gas_paymaster);
parse_addr!(validator_announce);
cfg_unwrap_all!(cwp, err: [mailbox, interchain_gas_paymaster, validator_announce]);
err.into_result(Self {
mailbox,
interchain_gas_paymaster,
validator_announce,
})
}
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct DeprecatedRawIndexSettings {
from: Option<StrOrInt>,
chunk: Option<StrOrInt>,
mode: Option<String>,
}
impl FromRawConf<DeprecatedRawIndexSettings> for IndexSettings {
fn from_config_filtered(
raw: DeprecatedRawIndexSettings,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
let from = raw
.from
.and_then(|v| v.try_into().take_err(&mut err, || cwp + "from"))
.unwrap_or_default();
let chunk_size = raw
.chunk
.and_then(|v| v.try_into().take_err(&mut err, || cwp + "chunk"))
.unwrap_or(1999);
let mode = raw
.mode
.map(serde_json::Value::from)
.and_then(|m| {
serde_json::from_value(m)
.context("Invalid mode")
.take_err(&mut err, || cwp + "mode")
})
.unwrap_or_default();
err.into_result(Self {
from,
chunk_size,
mode,
})
}
}
/// A raw chain setup is a domain ID, an address on that chain (where the
/// mailbox is deployed) and details for connecting to the chain API.
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DeprecatedRawChainConf {
name: Option<String>,
domain: Option<StrOrInt>,
pub(super) signer: Option<DeprecatedRawSignerConf>,
finality_blocks: Option<StrOrInt>,
addresses: Option<DeprecatedRawCoreContractAddresses>,
#[serde(flatten, default)]
connection: Option<DeprecatedRawChainConnectionConf>,
// TODO: if people actually use the metrics conf we should also add a raw form.
#[serde(default)]
metrics_conf: Option<PrometheusMiddlewareConf>,
#[serde(default)]
index: Option<DeprecatedRawIndexSettings>,
}
impl FromRawConf<DeprecatedRawChainConf> for ChainConf {
fn from_config_filtered(
raw: DeprecatedRawChainConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
let connection = raw
.connection
.ok_or_else(|| eyre!("Missing `connection` configuration"))
.take_err(&mut err, || cwp + "connection")
.and_then(|r| r.parse_config(cwp).take_config_err(&mut err));
let domain = connection.as_ref().and_then(|c: &ChainConnectionConf| {
let protocol = c.protocol();
let domain_id = raw
.domain
.ok_or_else(|| eyre!("Missing `domain` configuration"))
.take_err(&mut err, || cwp + "domain")
.and_then(|r| {
r.try_into()
.context("Invalid domain id, expected integer")
.take_err(&mut err, || cwp + "domain")
});
let name = raw
.name
.as_deref()
.ok_or_else(|| eyre!("Missing domain `name` configuration"))
.take_err(&mut err, || cwp + "name");
HyperlaneDomain::from_config(domain_id?, name?, protocol)
.take_err(&mut err, || cwp.clone())
});
let addresses = raw
.addresses
.ok_or_else(|| eyre!("Missing `addresses` configuration for core contracts"))
.take_err(&mut err, || cwp + "addresses")
.and_then(|v| {
v.parse_config(&cwp.join("addresses"))
.take_config_err(&mut err)
});
let signer = raw.signer.and_then(|v| -> Option<SignerConf> {
v.parse_config(&cwp.join("signer"))
.take_config_err(&mut err)
});
let finality_blocks = raw
.finality_blocks
.and_then(|v| {
v.try_into()
.context("Invalid `finalityBlocks`, expected integer")
.take_err(&mut err, || cwp + "finality_blocks")
})
.unwrap_or(0);
let index = raw
.index
.and_then(|v| v.parse_config(&cwp.join("index")).take_config_err(&mut err))
.unwrap_or_default();
let metrics_conf = raw.metrics_conf.unwrap_or_default();
cfg_unwrap_all!(cwp, err: [connection, domain, addresses]);
err.into_result(Self {
connection,
domain,
addresses,
signer,
finality_blocks,
index,
metrics_conf,
})
}
}
/// Raw signer types
#[derive(Debug, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct DeprecatedRawSignerConf {
#[serde(rename = "type")]
signer_type: Option<String>,
key: Option<String>,
id: Option<String>,
region: Option<String>,
}
/// Raw checkpoint syncer types
#[derive(Debug, Deserialize)]
#[serde(tag = "type", rename_all = "camelCase")]
pub enum DeprecatedRawCheckpointSyncerConf {
/// A local checkpoint syncer
LocalStorage {
/// Path
path: Option<String>,
},
/// A checkpoint syncer on S3
S3 {
/// Bucket name
bucket: Option<String>,
/// S3 Region
region: Option<String>,
/// Folder name inside bucket - defaults to the root of the bucket
folder: Option<String>,
},
/// Unknown checkpoint syncer type was specified
#[serde(other)]
Unknown,
}
impl FromRawConf<DeprecatedRawSignerConf> for SignerConf {
fn from_config_filtered(
raw: DeprecatedRawSignerConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let key_path = || cwp + "key";
let region_path = || cwp + "region";
match raw.signer_type.as_deref() {
Some("hexKey") => Ok(Self::HexKey {
key: raw
.key
.ok_or_else(|| eyre!("Missing `key` for HexKey signer"))
.into_config_result(key_path)?
.parse()
.into_config_result(key_path)?,
}),
Some("aws") => Ok(Self::Aws {
id: raw
.id
.ok_or_else(|| eyre!("Missing `id` for Aws signer"))
.into_config_result(|| cwp + "id")?,
region: raw
.region
.ok_or_else(|| eyre!("Missing `region` for Aws signer"))
.into_config_result(region_path)?
.parse()
.into_config_result(region_path)?,
}),
Some(t) => Err(eyre!("Unknown signer type `{t}`")).into_config_result(|| cwp + "type"),
None if raw.key.is_some() => Ok(Self::HexKey {
key: raw.key.unwrap().parse().into_config_result(key_path)?,
}),
None if raw.id.is_some() | raw.region.is_some() => Ok(Self::Aws {
id: raw
.id
.ok_or_else(|| eyre!("Missing `id` for Aws signer"))
.into_config_result(|| cwp + "id")?,
region: raw
.region
.ok_or_else(|| eyre!("Missing `region` for Aws signer"))
.into_config_result(region_path)?
.parse()
.into_config_result(region_path)?,
}),
None => Ok(Self::Node),
}
}
}
impl FromRawConf<DeprecatedRawCheckpointSyncerConf> for CheckpointSyncerConf {
fn from_config_filtered(
raw: DeprecatedRawCheckpointSyncerConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
match raw {
DeprecatedRawCheckpointSyncerConf::LocalStorage { path } => {
let path: PathBuf = path
.ok_or_else(|| eyre!("Missing `path` for LocalStorage checkpoint syncer"))
.into_config_result(|| cwp + "path")?
.parse()
.into_config_result(|| cwp + "path")?;
if !path.exists() {
std::fs::create_dir_all(&path)
.with_context(|| {
format!(
"Failed to create local checkpoint syncer storage directory at {:?}",
path
)
})
.into_config_result(|| cwp + "path")?;
} else if !path.is_dir() {
Err(eyre!(
"LocalStorage checkpoint syncer path is not a directory"
))
.into_config_result(|| cwp + "path")?;
}
Ok(Self::LocalStorage { path })
}
DeprecatedRawCheckpointSyncerConf::S3 {
bucket,
folder,
region,
} => Ok(Self::S3 {
bucket: bucket
.ok_or_else(|| eyre!("Missing `bucket` for S3 checkpoint syncer"))
.into_config_result(|| cwp + "bucket")?,
folder,
region: region
.ok_or_else(|| eyre!("Missing `region` for S3 checkpoint syncer"))
.into_config_result(|| cwp + "region")?
.parse()
.into_config_result(|| cwp + "region")?,
}),
DeprecatedRawCheckpointSyncerConf::Unknown => {
Err(eyre!("Missing `type` for checkpoint syncer"))
.into_config_result(|| cwp + "type")
}
}
}
}

@ -1,9 +1,7 @@
use std::ffi::{OsStr, OsString}; use std::ffi::{OsStr, OsString};
use config::{ConfigError, Map, Source, Value, ValueKind}; use config::{ConfigError, Map, Source, Value, ValueKind};
use convert_case::Case; use itertools::Itertools;
use crate::settings::loader::split_and_recase_key;
/// A source for loading configuration from command line arguments. /// A source for loading configuration from command line arguments.
/// ///
@ -24,10 +22,6 @@ pub struct CommandLineArguments {
/// Ignore empty env values (treat as unset). /// Ignore empty env values (treat as unset).
ignore_empty: bool, ignore_empty: bool,
/// What casing to use for the keys in the environment. By default it will not mutate the key
/// value.
casing: Option<Case>,
/// Alternate source for the environment. This can be used when you want to /// Alternate source for the environment. This can be used when you want to
/// test your own code using this source, without the need to change the /// test your own code using this source, without the need to change the
/// actual system environment variables. /// actual system environment variables.
@ -46,11 +40,6 @@ impl CommandLineArguments {
self self
} }
pub fn casing(mut self, casing: Case) -> Self {
self.casing = Some(casing);
self
}
pub fn source<I, S>(mut self, source: I) -> Self pub fn source<I, S>(mut self, source: I) -> Self
where where
I: IntoIterator<Item = S>, I: IntoIterator<Item = S>,
@ -87,7 +76,7 @@ impl Source for CommandLineArguments {
continue; continue;
} }
let key = split_and_recase_key(separator, self.casing, key); let key = key.split(separator).join(".");
m.insert(key, Value::new(Some(&uri), ValueKind::String(value))); m.insert(key, Value::new(Some(&uri), ValueKind::String(value)));
} }

@ -0,0 +1,66 @@
use std::fmt::Debug;
use config::{ConfigError, Map, Source, Value, ValueKind};
use convert_case::{Case, Casing};
use derive_new::new;
use itertools::Itertools;
#[derive(Clone, Debug, new)]
pub struct CaseAdapter<S> {
inner: S,
casing: Case,
}
impl<S> Source for CaseAdapter<S>
where
S: Source + Clone + Send + Sync + 'static,
{
fn clone_into_box(&self) -> Box<dyn Source + Send + Sync> {
Box::new(self.clone())
}
fn collect(&self) -> Result<Map<String, Value>, ConfigError> {
self.inner.collect().map(|m| {
m.into_iter()
.map(|(k, v)| recase_pair(k, v, self.casing))
.collect()
})
}
}
fn recase_pair(key: String, mut val: Value, case: Case) -> (String, Value) {
let key = split_and_recase_key(".", Some(case), key);
match &mut val.kind {
ValueKind::Table(table) => {
let tmp = table
.drain()
.map(|(k, v)| recase_pair(k, v, case))
.collect_vec();
table.extend(tmp.into_iter());
}
ValueKind::Array(ary) => {
let tmp = ary
.drain(..)
.map(|v| recase_pair(String::new(), v, case).1)
.collect_vec();
ary.extend(tmp.into_iter())
}
_ => {}
}
(key, val)
}
/// Load a settings object from the config locations and re-join the components with the standard
/// `config` crate separator `.`.
fn split_and_recase_key(sep: &str, case: Option<Case>, key: String) -> String {
if let Some(case) = case {
// if case is given, replace case of each key component and separate them with `.`
key.split(sep).map(|s| s.to_case(case)).join(".")
} else if !sep.is_empty() && sep != "." {
// Just standardize the separator to `.`
key.replace(sep, ".")
} else {
// no changes needed if there was no separator defined and we are preserving case.
key
}
}

@ -1,343 +0,0 @@
// TODO: Remove this file after deprecated config parsing has been removed.
use std::ffi::{OsStr, OsString};
use config::{ConfigError, Map, Source, Value, ValueKind};
use convert_case::Case;
use crate::settings::loader::split_and_recase_key;
/// A source for loading configuration from command line arguments.
/// Command line argument keys are case-insensitive, and the following forms are
/// supported:
///
/// * `--key=value`
/// * `--key="value"`
/// * `--key='value'`
/// * `--key value`
/// * `--key` (value is an empty string)
#[must_use]
#[derive(Clone, Debug, Default)]
pub struct DeprecatedCommandLineArguments {
/// Optional character sequence that separates each key segment in an
/// environment key pattern. Consider a nested configuration such as
/// `redis.password`, a separator of `-` would allow an environment key
/// of `redis-password` to match.
separator: Option<String>,
/// Ignore empty env values (treat as unset).
ignore_empty: bool,
/// Alternate source for the environment. This can be used when you want to
/// test your own code using this source, without the need to change the
/// actual system environment variables.
source: Option<Vec<OsString>>,
}
#[allow(unused)]
impl DeprecatedCommandLineArguments {
pub fn separator(mut self, s: &str) -> Self {
self.separator = Some(s.into());
self
}
pub fn ignore_empty(mut self, ignore: bool) -> Self {
self.ignore_empty = ignore;
self
}
pub fn source<I, S>(mut self, source: I) -> Self
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
self.source = Some(source.into_iter().map(|s| s.as_ref().to_owned()).collect());
self
}
}
impl Source for DeprecatedCommandLineArguments {
fn clone_into_box(&self) -> Box<dyn Source + Send + Sync> {
Box::new((*self).clone())
}
fn collect(&self) -> Result<Map<String, Value>, ConfigError> {
let mut m = Map::new();
let uri: String = "program argument".into();
let separator = self.separator.as_deref().unwrap_or("-");
let mut args = if let Some(source) = &self.source {
ArgumentParser::from_vec(source.clone())
} else {
ArgumentParser::from_env()
};
while let Some((key, value)) = args
.next()
.transpose()
.map_err(|e| ConfigError::Foreign(Box::new(e)))?
{
if self.ignore_empty && value.is_empty() {
continue;
}
let mut key = split_and_recase_key(separator, Some(Case::Flat), key);
if key.ends_with("interchaingaspaymaster") {
key = key.replace("interchaingaspaymaster", "interchainGasPaymaster");
} else if key.ends_with("validatorannounce") {
key = key.replace("validatorannounce", "validatorAnnounce");
}
m.insert(key, Value::new(Some(&uri), ValueKind::String(value)));
}
let remaining = args.finish();
if remaining.is_empty() {
Ok(m)
} else {
Err(ConfigError::Message("Could not parse all arguments".into()))
}
}
}
/// An ultra simple CLI arguments parser.
/// Adapted from pico-args 0.5.0.
#[derive(Clone, Debug)]
pub struct ArgumentParser(Vec<OsString>);
impl ArgumentParser {
/// Creates a parser from a vector of arguments.
///
/// The executable path **must** be removed.
///
/// This can be used for supporting `--` arguments to forward to another
/// program.
fn from_vec(args: Vec<OsString>) -> Self {
ArgumentParser(args)
}
/// Creates a parser from [`env::args_os`].
///
/// The executable path will be removed.
///
/// [`env::args_os`]: https://doc.rust-lang.org/stable/std/env/fn.args_os.html
fn from_env() -> Self {
let mut args: Vec<_> = std::env::args_os().collect();
args.remove(0);
ArgumentParser(args)
}
/// Returns a list of remaining arguments.
///
/// It's up to the caller what to do with them.
/// One can report an error about unused arguments,
/// other can use them for further processing.
fn finish(self) -> Vec<OsString> {
self.0
}
}
impl Iterator for ArgumentParser {
type Item = Result<(String, String), Error>;
fn next(&mut self) -> Option<Self::Item> {
let (k, v, kind, idx) = match self.find_next_kv_pair() {
Ok(Some(tup)) => tup,
Ok(None) => return None,
Err(e) => return Some(Err(e)),
};
match kind {
PairKind::SingleArgument => {
self.0.remove(idx);
}
PairKind::TwoArguments => {
self.0.remove(idx + 1);
self.0.remove(idx);
}
}
Some(Ok((k, v)))
}
}
// internal workings
impl ArgumentParser {
#[inline(never)]
fn find_next_kv_pair(&mut self) -> Result<Option<(String, String, PairKind, usize)>, Error> {
let Some(idx) = self.index_of_next_key() else {
return Ok(None);
};
// full term without leading '--'
let term = &os_to_str(&self.0[idx])?[2..];
if term.is_empty() {
return Err(Error::EmptyKey);
}
if let Some((key, value)) = term.split_once('=') {
// Parse a `--key=value` pair.
let key = key.to_owned();
// Check for quoted value.
let value = if starts_with(value, b'"') {
if !ends_with(value, b'"') {
// A closing quote must be the same as an opening one.
return Err(Error::UnmatchedQuote(key));
}
&value[1..value.len() - 1]
} else if starts_with(value, b'\'') {
if !ends_with(value, b'\'') {
// A closing quote must be the same as an opening one.
return Err(Error::UnmatchedQuote(key));
}
&value[1..value.len() - 1]
} else {
value
};
Ok(Some((key, value.to_owned(), PairKind::SingleArgument, idx)))
} else {
// Parse a `--key value` pair.
let key = term.to_owned();
let value = self
.0
.get(idx + 1)
.map(|v| os_to_str(v))
.transpose()?
.unwrap_or("");
if value.is_empty() || value.starts_with('-') {
// the next value is another key
Ok(Some((key, "".to_owned(), PairKind::SingleArgument, idx)))
} else {
Ok(Some((key, value.to_owned(), PairKind::TwoArguments, idx)))
}
}
}
fn index_of_next_key(&self) -> Option<usize> {
self.0.iter().position(|v| {
#[cfg(unix)]
{
use std::os::unix::ffi::OsStrExt;
v.len() >= 2 && &v.as_bytes()[0..2] == b"--"
}
#[cfg(not(unix))]
{
v.len() >= 2 && v.to_str().map(|v| v.starts_with("--")).unwrap_or(false)
}
})
}
}
#[inline]
fn starts_with(text: &str, c: u8) -> bool {
if text.is_empty() {
false
} else {
text.as_bytes()[0] == c
}
}
#[inline]
fn ends_with(text: &str, c: u8) -> bool {
if text.is_empty() {
false
} else {
text.as_bytes()[text.len() - 1] == c
}
}
#[inline]
fn os_to_str(text: &OsStr) -> Result<&str, Error> {
text.to_str().ok_or(Error::NonUtf8Argument)
}
/// A list of possible errors.
#[derive(Clone, Debug, thiserror::Error)]
pub enum Error {
/// Arguments must be a valid UTF-8 strings.
#[error("argument is not a UTF-8 string")]
NonUtf8Argument,
/// Found '--` or a key with nothing after the prefix
#[error("key name is empty (possibly after removing prefix)")]
EmptyKey,
/// Could not find closing quote for a value.
#[error("unmatched quote in `{0}`")]
UnmatchedQuote(String),
}
#[derive(Clone, Copy, PartialEq, Eq)]
enum PairKind {
SingleArgument,
TwoArguments,
}
#[cfg(test)]
mod test {
use super::*;
macro_rules! assert_arg {
($config:expr, $key:literal, $value:literal) => {
let origin = "program argument".to_owned();
assert_eq!(
$config.remove($key),
Some(Value::new(
Some(&origin),
ValueKind::String($value.to_owned())
))
);
};
}
const ARGUMENTS: &[&str] = &[
"--key-a",
"value-a",
"--keY-b=value-b",
"--key-c=\"value c\"",
"--KEY-d='valUE d'",
"--key-e=''",
"--key-F",
"--key-g=value-g",
"--key-h",
];
#[test]
fn default_case() {
let mut config = DeprecatedCommandLineArguments::default()
.source(ARGUMENTS)
.collect()
.unwrap();
assert_arg!(config, "key.a", "value-a");
assert_arg!(config, "key.b", "value-b");
assert_arg!(config, "key.c", "value c");
assert_arg!(config, "key.d", "valUE d");
assert_arg!(config, "key.e", "");
assert_arg!(config, "key.f", "");
assert_arg!(config, "key.g", "value-g");
assert_arg!(config, "key.h", "");
assert!(config.is_empty());
}
#[test]
fn ignore_empty() {
let mut config = DeprecatedCommandLineArguments::default()
.source(ARGUMENTS)
.ignore_empty(true)
.collect()
.unwrap();
assert_arg!(config, "key.a", "value-a");
assert_arg!(config, "key.b", "value-b");
assert_arg!(config, "key.c", "value c");
assert_arg!(config, "key.d", "valUE d");
assert_arg!(config, "key.g", "value-g");
assert!(config.is_empty());
}
}

@ -1,9 +1,7 @@
use std::env; use std::env;
use config::{ConfigError, Map, Source, Value, ValueKind}; use config::{ConfigError, Map, Source, Value, ValueKind};
use convert_case::Case; use itertools::Itertools;
use crate::settings::loader::split_and_recase_key;
#[must_use] #[must_use]
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug, Default)]
@ -21,11 +19,6 @@ pub struct Environment {
/// an environment key of `REDIS_PASSWORD` to match. Defaults to `_`. /// an environment key of `REDIS_PASSWORD` to match. Defaults to `_`.
separator: Option<String>, separator: Option<String>,
/// What casing to use for the keys in the environment. By default it will not mutate the key
/// value. Case conversion will be performed after the prefix has been removed on each of the
/// seperated path components individually.
casing: Option<Case>,
/// Ignore empty env values (treat as unset). /// Ignore empty env values (treat as unset).
ignore_empty: bool, ignore_empty: bool,
@ -51,14 +44,9 @@ impl Environment {
self self
} }
pub fn casing(mut self, casing: Case) -> Self {
self.casing = Some(casing);
self
}
pub fn source<'a, I, S>(mut self, source: I) -> Self pub fn source<'a, I, S>(mut self, source: I) -> Self
where where
I: IntoIterator<Item = &'a (S, S)>, I: IntoIterator<Item = (S, S)>,
S: AsRef<str> + 'a, S: AsRef<str> + 'a,
{ {
self.source = Some( self.source = Some(
@ -98,7 +86,7 @@ impl Source for Environment {
return None; return None;
} }
let key = split_and_recase_key(separator, self.casing, key); let key = key.split(separator).join(".");
Some((key, Value::new(Some(&uri), ValueKind::String(value)))) Some((key, Value::new(Some(&uri), ValueKind::String(value))))
}; };
@ -138,17 +126,16 @@ mod test {
#[test] #[test]
fn default_case() { fn default_case() {
let mut config = Environment::default() let mut config = Environment::default()
.source(ENVS) .source(ENVS.iter().cloned())
.prefix("PRE__") .prefix("PRE__")
.separator("__") .separator("__")
.casing(Case::Camel)
.collect() .collect()
.unwrap(); .unwrap();
assert_env!(config, "key.a", "value-a"); assert_env!(config, "KEY.A", "value-a");
assert_env!(config, "key.b", ""); assert_env!(config, "key.b", "");
assert_env!(config, "key.c.partA", "value c a"); assert_env!(config, "KEY.C.PART_A", "value c a");
assert_env!(config, "key.cPartB", "value c b"); assert_env!(config, "KEY.C_PART_B", "value c b");
assert!(config.is_empty()); assert!(config.is_empty());
} }
@ -156,18 +143,17 @@ mod test {
#[test] #[test]
fn ignore_empty() { fn ignore_empty() {
let mut config = Environment::default() let mut config = Environment::default()
.source(ENVS) .source(ENVS.iter().cloned())
.ignore_empty(true) .ignore_empty(true)
.source(ENVS) .source(ENVS.iter().cloned())
.prefix("PRE__") .prefix("PRE__")
.separator("__") .separator("__")
.casing(Case::Snake)
.collect() .collect()
.unwrap(); .unwrap();
assert_env!(config, "key.a", "value-a"); assert_env!(config, "KEY.A", "value-a");
assert_env!(config, "key.c.part_a", "value c a"); assert_env!(config, "KEY.C.PART_A", "value c a");
assert_env!(config, "key.c_part_b", "value c b"); assert_env!(config, "KEY.C_PART_B", "value c b");
assert!(config.is_empty()); assert!(config.is_empty());
} }

@ -1,49 +1,28 @@
//! Load a settings object from the config locations. //! Load a settings object from the config locations.
use std::{collections::HashMap, env, error::Error, fmt::Debug, path::PathBuf}; use std::{env, error::Error, fmt::Debug, path::PathBuf};
use config::{Config, Environment as DeprecatedEnvironment, File}; use config::{Config, File};
use convert_case::{Case, Casing}; use convert_case::Case;
use eyre::{bail, Context, Result}; use eyre::{eyre, Context, Result};
use hyperlane_core::config::*; use hyperlane_core::config::*;
use itertools::Itertools;
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
use crate::settings::loader::deprecated_arguments::DeprecatedCommandLineArguments; use crate::settings::loader::{
arguments::CommandLineArguments, case_adapter::CaseAdapter, environment::Environment,
};
mod arguments; mod arguments;
mod deprecated_arguments; mod case_adapter;
mod environment; mod environment;
/// Deserialize a settings object from the configs. /// Deserialize a settings object from the configs.
pub fn load_settings<T, R>(name: &str) -> ConfigResult<R> pub fn load_settings<T, R>() -> ConfigResult<R>
where where
T: DeserializeOwned + Debug, T: DeserializeOwned + Debug,
R: FromRawConf<T>, R: FromRawConf<T>,
{ {
let root_path = ConfigPath::default(); let root_path = ConfigPath::default();
let raw =
load_settings_object::<T, &str>(name, &[]).into_config_result(|| root_path.clone())?;
raw.parse_config(&root_path)
}
/// Load a settings object from the config locations.
/// Further documentation can be found in the `settings` module.
fn load_settings_object<T, S>(agent_prefix: &str, ignore_prefixes: &[S]) -> Result<T>
where
T: DeserializeOwned,
S: AsRef<str>,
{
// Derive additional prefix from agent name
let prefix = format!("HYP_{}", agent_prefix).to_ascii_uppercase();
let filtered_env: HashMap<String, String> = env::vars()
.filter(|(k, _v)| {
!ignore_prefixes
.iter()
.any(|prefix| k.starts_with(prefix.as_ref()))
})
.collect();
let mut base_config_sources = vec![]; let mut base_config_sources = vec![];
let mut builder = Config::builder(); let mut builder = Config::builder();
@ -51,7 +30,8 @@ where
// Always load the default config files (`rust/config/*.json`) // Always load the default config files (`rust/config/*.json`)
for entry in PathBuf::from("./config") for entry in PathBuf::from("./config")
.read_dir() .read_dir()
.expect("Failed to open config directory") .context("Failed to open config directory")
.into_config_result(|| root_path.clone())?
.map(Result::unwrap) .map(Result::unwrap)
{ {
if !entry.file_type().unwrap().is_file() { if !entry.file_type().unwrap().is_file() {
@ -62,7 +42,7 @@ where
let ext = fname.to_str().unwrap().split('.').last().unwrap_or(""); let ext = fname.to_str().unwrap().split('.').last().unwrap_or("");
if ext == "json" { if ext == "json" {
base_config_sources.push(format!("{:?}", entry.path())); base_config_sources.push(format!("{:?}", entry.path()));
builder = builder.add_source(File::from(entry.path())); builder = builder.add_source(CaseAdapter::new(File::from(entry.path()), Case::Flat));
} }
} }
@ -75,31 +55,41 @@ where
let p = PathBuf::from(path); let p = PathBuf::from(path);
if p.is_file() { if p.is_file() {
if p.extension() == Some("json".as_ref()) { if p.extension() == Some("json".as_ref()) {
builder = builder.add_source(File::from(p)); let config_file = File::from(p);
let re_cased_config_file = CaseAdapter::new(config_file, Case::Flat);
builder = builder.add_source(re_cased_config_file);
} else { } else {
bail!("Provided config path via CONFIG_FILES is of an unsupported type ({p:?})") return Err(eyre!(
"Provided config path via CONFIG_FILES is of an unsupported type ({p:?})"
))
.into_config_result(|| root_path.clone());
} }
} else if !p.exists() { } else if !p.exists() {
bail!("Provided config path via CONFIG_FILES does not exist ({p:?})") return Err(eyre!(
"Provided config path via CONFIG_FILES does not exist ({p:?})"
))
.into_config_result(|| root_path.clone());
} else { } else {
bail!("Provided config path via CONFIG_FILES is not a file ({p:?})") return Err(eyre!(
"Provided config path via CONFIG_FILES is not a file ({p:?})"
))
.into_config_result(|| root_path.clone());
} }
} }
let config_deserializer = builder let config_deserializer = builder
// Use a base configuration env variable prefix // Use a base configuration env variable prefix
.add_source( .add_source(CaseAdapter::new(
DeprecatedEnvironment::with_prefix("HYP_BASE") Environment::default().prefix("HYP_").separator("_"),
.separator("_") Case::Flat,
.source(Some(filtered_env.clone())), ))
) .add_source(CaseAdapter::new(
.add_source( CommandLineArguments::default().separator("."),
DeprecatedEnvironment::with_prefix(&prefix) Case::Flat,
.separator("_") ))
.source(Some(filtered_env)), .build()
) .context("Failed to load config sources")
.add_source(DeprecatedCommandLineArguments::default().separator(".")) .into_config_result(|| root_path.clone())?;
.build()?;
let formatted_config = { let formatted_config = {
let f = format!("{config_deserializer:#?}"); let f = format!("{config_deserializer:#?}");
@ -114,34 +104,26 @@ where
} }
}; };
Config::try_deserialize::<T>(config_deserializer).or_else(|err| { let raw_config = Config::try_deserialize::<T>(config_deserializer)
let mut err = if let Some(source_err) = err.source() { .or_else(|err| {
let source = format!("Config error source: {source_err}"); let mut err = if let Some(source_err) = err.source() {
Err(err).context(source) let source = format!("Config error source: {source_err}");
} else { Err(err).context(source)
Err(err.into()) } else {
}; Err(err.into())
};
for cfg_path in base_config_sources.iter().chain(config_file_paths.iter()) {
err = err.with_context(|| format!("Config loaded: {cfg_path}"));
}
println!("Error during deserialization, showing the config for debugging: {formatted_config}"); for cfg_path in base_config_sources.iter().chain(config_file_paths.iter()) {
err.context("Config deserialization error, please check the config reference (https://docs.hyperlane.xyz/docs/operators/agent-configuration/configuration-reference)") err = err.with_context(|| format!("Config loaded: {cfg_path}"));
}) }
} eprintln!("Loaded config for debugging: {formatted_config}");
err.context("Config deserialization error, please check the config reference (https://docs.hyperlane.xyz/docs/operators/agent-configuration/configuration-reference)")
})
.into_config_result(|| root_path.clone())?;
/// Load a settings object from the config locations and re-join the components with the standard let res = raw_config.parse_config(&root_path);
/// `config` crate separator `.`. if res.is_err() {
fn split_and_recase_key(sep: &str, case: Option<Case>, key: String) -> String { eprintln!("Loaded config for debugging: {formatted_config}");
if let Some(case) = case {
// if case is given, replace case of each key component and separate them with `.`
key.split(sep).map(|s| s.to_case(case)).join(".")
} else if !sep.is_empty() && sep != "." {
// Just standardize the separator to `.`
key.replace(sep, ".")
} else {
// no changes needed if there was no separator defined and we are preserving case.
key
} }
res
} }

@ -25,14 +25,7 @@
//! #### N.B.: Environment variable names correspond 1:1 with cfg file's JSON object hierarchy. //! #### N.B.: Environment variable names correspond 1:1 with cfg file's JSON object hierarchy.
//! //!
//! In particular, note that any environment variables whose names are prefixed //! In particular, note that any environment variables whose names are prefixed
//! with: //! with `HYP_` will be read as an override to be applied against the hierarchical structure
//!
//! * `HYP_BASE`
//!
//! * `HYP_[agentname]`, where `[agentmame]` is agent-specific, e.g.
//! `HYP_VALIDATOR` or `HYP_RELAYER`.
//!
//! will be read as an override to be applied against the hierarchical structure
//! of the configuration provided by the json config file at //! of the configuration provided by the json config file at
//! `./config/<env>/<config>.json`. //! `./config/<env>/<config>.json`.
//! //!
@ -40,11 +33,10 @@
//! //!
//! ```json //! ```json
//! { //! {
//! "environment": "test",
//! "signers": {}, //! "signers": {},
//! "chains": { //! "chains": {
//! "test2": { //! "test2": {
//! "domain": "13372", //! "domainId": "13372",
//! ... //! ...
//! }, //! },
//! ... //! ...
@ -53,11 +45,9 @@
//! ``` //! ```
//! //!
//! and an environment variable is supplied which defines //! and an environment variable is supplied which defines
//! `HYP_BASE_CHAINS_TEST2_DOMAIN=1`, then the `decl_settings` macro in //! `HYP_BASE_CHAINS_TEST2_DOMAINID=1`, then the config parser will directly override the value of
//! `rust/hyperlane-base/src/macros.rs` will directly override the 'domain' //! the field found in config to be `1`, since the fields in the environment variable name describe
//! field found in the json config to be `1`, since the fields in the //! the path traversal to arrive at this field in the JSON config object.
//! environment variable name describe the path traversal to arrive at this
//! field in the JSON config object.
//! //!
//! ### Configuration value precedence //! ### Configuration value precedence
//! //!
@ -69,10 +59,7 @@
//! overwriting previous ones as appropriate. //! overwriting previous ones as appropriate.
//! 3. Configuration env vars with the prefix `HYP_BASE` intended //! 3. Configuration env vars with the prefix `HYP_BASE` intended
//! to be shared by multiple agents in the same environment //! to be shared by multiple agents in the same environment
//! E.g. `export HYP_BASE_INBOXES_KOVAN_DOMAIN=3000` //! E.g. `export HYP_CHAINS_ARBITRUM_DOMAINID=3000`
//! 4. Configuration env vars with the prefix `HYP_<agent_prefix>`
//! intended to be used by a specific agent.
//! E.g. `export HYP_RELAYER_ORIGINCHAIN="ethereum"`
//! 5. Arguments passed to the agent on the command line. //! 5. Arguments passed to the agent on the command line.
//! E.g. `--originChainName ethereum` //! E.g. `--originChainName ethereum`
@ -103,7 +90,6 @@ mod signers;
mod trace; mod trace;
mod checkpoint_syncer; mod checkpoint_syncer;
pub mod deprecated_parser;
pub mod parser; pub mod parser;
/// Declare that an agent can be constructed from settings. /// Declare that an agent can be constructed from settings.
@ -117,9 +103,7 @@ macro_rules! impl_loadable_from_settings {
($agent:ident, $settingsparser:ident -> $settingsobj:ident) => { ($agent:ident, $settingsparser:ident -> $settingsobj:ident) => {
impl hyperlane_base::LoadableFromSettings for $settingsobj { impl hyperlane_base::LoadableFromSettings for $settingsobj {
fn load() -> hyperlane_core::config::ConfigResult<Self> { fn load() -> hyperlane_core::config::ConfigResult<Self> {
hyperlane_base::settings::loader::load_settings::<$settingsparser, Self>( hyperlane_base::settings::loader::load_settings::<$settingsparser, Self>()
stringify!($agent),
)
} }
} }
}; };

@ -4,6 +4,7 @@ use convert_case::{Case, Casing};
use derive_new::new; use derive_new::new;
use eyre::{eyre, Context}; use eyre::{eyre, Context};
use hyperlane_core::{config::*, utils::hex_or_base58_to_h256, H256, U256}; use hyperlane_core::{config::*, utils::hex_or_base58_to_h256, H256, U256};
use itertools::Itertools;
use serde::de::{DeserializeOwned, StdError}; use serde::de::{DeserializeOwned, StdError};
use serde_json::Value; use serde_json::Value;
@ -26,7 +27,7 @@ impl<'v> ValueParser<'v> {
/// Get a value at the given key and verify that it is present. /// Get a value at the given key and verify that it is present.
pub fn get_key(&self, key: &str) -> ConfigResult<ValueParser<'v>> { pub fn get_key(&self, key: &str) -> ConfigResult<ValueParser<'v>> {
self.get_opt_key(key)? self.get_opt_key(&key.to_case(Case::Flat))?
.ok_or_else(|| eyre!("Expected key `{key}` to be defined")) .ok_or_else(|| eyre!("Expected key `{key}` to be defined"))
.into_config_result(|| &self.cwp + key.to_case(Case::Snake)) .into_config_result(|| &self.cwp + key.to_case(Case::Snake))
} }
@ -35,7 +36,7 @@ impl<'v> ValueParser<'v> {
pub fn get_opt_key(&self, key: &str) -> ConfigResult<Option<ValueParser<'v>>> { pub fn get_opt_key(&self, key: &str) -> ConfigResult<Option<ValueParser<'v>>> {
let cwp = &self.cwp + key.to_case(Case::Snake); let cwp = &self.cwp + key.to_case(Case::Snake);
match self.val { match self.val {
Value::Object(obj) => Ok(obj.get(key).map(|val| Self { Value::Object(obj) => Ok(obj.get(&key.to_case(Case::Flat)).map(|val| Self {
val, val,
cwp: cwp.clone(), cwp: cwp.clone(),
})), })),
@ -45,6 +46,7 @@ impl<'v> ValueParser<'v> {
} }
/// Create an iterator over all (key, value) tuples. /// Create an iterator over all (key, value) tuples.
/// Be warned that keys will be in flat case.
pub fn into_obj_iter( pub fn into_obj_iter(
self, self,
) -> ConfigResult<impl Iterator<Item = (String, ValueParser<'v>)> + 'v> { ) -> ConfigResult<impl Iterator<Item = (String, ValueParser<'v>)> + 'v> {
@ -67,11 +69,40 @@ impl<'v> ValueParser<'v> {
/// Create an iterator over all array elements. /// Create an iterator over all array elements.
pub fn into_array_iter(self) -> ConfigResult<impl Iterator<Item = ValueParser<'v>>> { pub fn into_array_iter(self) -> ConfigResult<impl Iterator<Item = ValueParser<'v>>> {
let cwp = self.cwp.clone(); let cwp = self.cwp.clone();
match self.val { match self.val {
Value::Array(arr) => Ok(arr.iter().enumerate().map(move |(i, v)| Self { Value::Array(arr) => Ok(arr.iter().enumerate().map(move |(i, v)| Self {
val: v, val: v,
cwp: &cwp + i.to_string(), cwp: &cwp + i.to_string(),
})), }))
.map(|itr| Box::new(itr) as Box<dyn Iterator<Item = ValueParser<'v>>>),
Value::Object(obj) => obj
.iter()
// convert all keys to a usize index of their position in the array
.map(|(k, v)| k.parse().map(|k| (k, v)))
// handle any errors during index parsing
.collect::<Result<Vec<(usize, &'v Value)>, _>>()
.context("Expected array or array-like object where all keys are indexes; some keys are not indexes")
// sort by index
.map(|arr| arr.into_iter().sorted_unstable_by_key(|(k, _)| *k))
// check that all indexes are present
.and_then(|itr| {
itr.clone()
.enumerate()
.all(|(expected, (actual, _))| expected == actual)
.then_some(itr)
.ok_or(eyre!(
"Expected array or array-like object where all keys are indexes; some indexes are missing"
))
})
// convert to an iterator of value parsers over the values
.map(|itr| {
itr.map(move |(i, v)| Self {
val: v,
cwp: &cwp + i.to_string(),
})
})
.map(|itr| Box::new(itr) as Box<dyn Iterator<Item = ValueParser<'v>>>),
_ => Err(eyre!("Expected an array type")), _ => Err(eyre!("Expected an array type")),
} }
.into_config_result(|| self.cwp) .into_config_result(|| self.cwp)

@ -4,14 +4,12 @@
//! and validations it defines are not applied here, we should mirror them. //! and validations it defines are not applied here, we should mirror them.
//! ANY CHANGES HERE NEED TO BE REFLECTED IN THE TYPESCRIPT SDK. //! ANY CHANGES HERE NEED TO BE REFLECTED IN THE TYPESCRIPT SDK.
#![allow(dead_code)] // TODO(2214): remove before PR merge
use std::{ use std::{
cmp::Reverse,
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
default::Default, default::Default,
}; };
use convert_case::{Case, Casing};
use eyre::{eyre, Context}; use eyre::{eyre, Context};
use hyperlane_core::{ use hyperlane_core::{
cfg_unwrap_all, config::*, HyperlaneDomain, HyperlaneDomainProtocol, IndexMode, cfg_unwrap_all, config::*, HyperlaneDomain, HyperlaneDomainProtocol, IndexMode,
@ -23,8 +21,8 @@ use serde_json::Value;
pub use self::json_value_parser::ValueParser; pub use self::json_value_parser::ValueParser;
pub use super::envs::*; pub use super::envs::*;
use crate::settings::{ use crate::settings::{
chains::IndexSettings, parser::json_value_parser::ParseChain, trace::TracingConfig, ChainConf, chains::IndexSettings, trace::TracingConfig, ChainConf, ChainConnectionConf,
ChainConnectionConf, CoreContractAddresses, Settings, SignerConf, CoreContractAddresses, Settings, SignerConf,
}; };
mod json_value_parser; mod json_value_parser;
@ -83,10 +81,16 @@ impl FromRawConf<RawAgentConf, Option<&HashSet<&str>>> for Settings {
.and_then(parse_signer) .and_then(parse_signer)
.end(); .end();
let default_rpc_consensus_type = p
.chain(&mut err)
.get_opt_key("defaultRpcConsensusType")
.parse_string()
.unwrap_or("fallback");
let chains: HashMap<String, ChainConf> = raw_chains let chains: HashMap<String, ChainConf> = raw_chains
.into_iter() .into_iter()
.filter_map(|(name, chain)| { .filter_map(|(name, chain)| {
parse_chain(chain, &name) parse_chain(chain, &name, default_rpc_consensus_type)
.take_config_err(&mut err) .take_config_err(&mut err)
.map(|v| (name, v)) .map(|v| (name, v))
}) })
@ -107,7 +111,11 @@ impl FromRawConf<RawAgentConf, Option<&HashSet<&str>>> for Settings {
} }
/// The chain name and ChainMetadata /// The chain name and ChainMetadata
fn parse_chain(chain: ValueParser, name: &str) -> ConfigResult<ChainConf> { fn parse_chain(
chain: ValueParser,
name: &str,
default_rpc_consensus_type: &str,
) -> ConfigResult<ChainConf> {
let mut err = ConfigParsingError::default(); let mut err = ConfigParsingError::default();
let domain = parse_domain(chain.clone(), name).take_config_err(&mut err); let domain = parse_domain(chain.clone(), name).take_config_err(&mut err);
@ -117,8 +125,6 @@ fn parse_chain(chain: ValueParser, name: &str) -> ConfigResult<ChainConf> {
.and_then(parse_signer) .and_then(parse_signer)
.end(); .end();
// TODO(2214): is it correct to define finality blocks as `confirmations` and not `reorgPeriod`?
// TODO(2214): should we rename `finalityBlocks` in ChainConf?
let finality_blocks = chain let finality_blocks = chain
.chain(&mut err) .chain(&mut err)
.get_opt_key("blocks") .get_opt_key("blocks")
@ -126,33 +132,37 @@ fn parse_chain(chain: ValueParser, name: &str) -> ConfigResult<ChainConf> {
.parse_u32() .parse_u32()
.unwrap_or(1); .unwrap_or(1);
let rpcs: Vec<ValueParser> = let rpcs_base = chain
if let Some(custom_rpc_urls) = chain.get_opt_key("customRpcUrls").unwrap_or_default() { .chain(&mut err)
// use the custom defined urls, sorted by highest prio first .get_key("rpcUrls")
custom_rpc_urls.chain(&mut err).into_obj_iter().map(|itr| { .into_array_iter()
itr.map(|(_, url)| { .map(|urls| {
( urls.filter_map(|v| {
url.chain(&mut err) v.chain(&mut err)
.get_opt_key("priority") .get_key("http")
.parse_i32() .parse_from_str("Invalid http url")
.unwrap_or(0), .end()
url,
)
})
.sorted_unstable_by_key(|(p, _)| Reverse(*p))
.map(|(_, url)| url)
.collect()
}) })
} else { .collect_vec()
// if no custom rpc urls are set, use the default rpc urls })
chain
.chain(&mut err)
.get_key("rpcUrls")
.into_array_iter()
.map(Iterator::collect)
}
.unwrap_or_default(); .unwrap_or_default();
let rpc_overrides = chain
.chain(&mut err)
.get_opt_key("customRpcUrls")
.parse_string()
.end()
.map(|urls| {
urls.split(',')
.filter_map(|url| {
url.parse()
.take_err(&mut err, || &chain.cwp + "customRpcUrls")
})
.collect_vec()
});
let rpcs = rpc_overrides.unwrap_or(rpcs_base);
if rpcs.is_empty() { if rpcs.is_empty() {
err.push( err.push(
&chain.cwp + "rpc_urls", &chain.cwp + "rpc_urls",
@ -207,58 +217,46 @@ fn parse_chain(chain: ValueParser, name: &str) -> ConfigResult<ChainConf> {
.get_key("validatorAnnounce") .get_key("validatorAnnounce")
.parse_address_hash() .parse_address_hash()
.end(); .end();
let merkle_tree_hook = chain
.chain(&mut err)
.get_opt_key("merkleTreeHook")
.parse_address_hash()
.end();
cfg_unwrap_all!(&chain.cwp, err: [domain]); cfg_unwrap_all!(&chain.cwp, err: [domain]);
let connection: Option<ChainConnectionConf> = match domain.domain_protocol() { let connection: Option<ChainConnectionConf> = match domain.domain_protocol() {
HyperlaneDomainProtocol::Ethereum => { HyperlaneDomainProtocol::Ethereum => {
if rpcs.len() <= 1 { if rpcs.len() <= 1 {
let into_connection = rpcs.into_iter()
|url| ChainConnectionConf::Ethereum(h_eth::ConnectionConf::Http { url }); .next()
rpcs.into_iter().next().and_then(|rpc| { .map(|url| ChainConnectionConf::Ethereum(h_eth::ConnectionConf::Http { url }))
rpc.chain(&mut err)
.get_key("http")
.parse_from_str("Invalid http url")
.end()
.map(into_connection)
})
} else { } else {
let urls = rpcs
.into_iter()
.filter_map(|rpc| {
rpc.chain(&mut err)
.get_key("http")
.parse_from_str("Invalid http url")
.end()
})
.collect_vec();
let rpc_consensus_type = chain let rpc_consensus_type = chain
.chain(&mut err) .chain(&mut err)
.get_opt_key("rpcConsensusType") .get_opt_key("rpcConsensusType")
.parse_string() .parse_string()
.unwrap_or("fallback"); .unwrap_or(default_rpc_consensus_type);
match rpc_consensus_type { match rpc_consensus_type {
"fallback" => Some(h_eth::ConnectionConf::HttpFallback { urls }), "single" => Some(h_eth::ConnectionConf::Http {
"quorum" => Some(h_eth::ConnectionConf::HttpQuorum { urls }), url: rpcs.into_iter().next().unwrap(),
}),
"fallback" => Some(h_eth::ConnectionConf::HttpFallback { urls: rpcs }),
"quorum" => Some(h_eth::ConnectionConf::HttpQuorum { urls: rpcs }),
ty => Err(eyre!("unknown rpc consensus type `{ty}`")) ty => Err(eyre!("unknown rpc consensus type `{ty}`"))
.take_err(&mut err, || &chain.cwp + "rpc_consensus_type"), .take_err(&mut err, || &chain.cwp + "rpc_consensus_type"),
} }
.map(ChainConnectionConf::Ethereum) .map(ChainConnectionConf::Ethereum)
} }
} }
HyperlaneDomainProtocol::Fuel => ParseChain::from_option(rpcs.into_iter().next(), &mut err) HyperlaneDomainProtocol::Fuel => rpcs
.get_key("http") .into_iter()
.parse_from_str("Invalid http url") .next()
.end()
.map(|url| ChainConnectionConf::Fuel(h_fuel::ConnectionConf { url })), .map(|url| ChainConnectionConf::Fuel(h_fuel::ConnectionConf { url })),
HyperlaneDomainProtocol::Sealevel => { HyperlaneDomainProtocol::Sealevel => rpcs
ParseChain::from_option(rpcs.into_iter().next(), &mut err) .into_iter()
.get_key("http") .next()
.parse_from_str("Invalod http url") .map(|url| ChainConnectionConf::Sealevel(h_sealevel::ConnectionConf { url })),
.end()
.map(|url| ChainConnectionConf::Sealevel(h_sealevel::ConnectionConf { url }))
}
}; };
cfg_unwrap_all!(&chain.cwp, err: [connection, mailbox, interchain_gas_paymaster, validator_announce]); cfg_unwrap_all!(&chain.cwp, err: [connection, mailbox, interchain_gas_paymaster, validator_announce]);
@ -270,6 +268,7 @@ fn parse_chain(chain: ValueParser, name: &str) -> ConfigResult<ChainConf> {
mailbox, mailbox,
interchain_gas_paymaster, interchain_gas_paymaster,
validator_announce, validator_announce,
merkle_tree_hook,
}, },
connection, connection,
metrics_conf: Default::default(), metrics_conf: Default::default(),
@ -387,3 +386,24 @@ impl FromRawConf<RawAgentSignerConf> for SignerConf {
parse_signer(ValueParser::new(cwp.clone(), &raw.0)) parse_signer(ValueParser::new(cwp.clone(), &raw.0))
} }
} }
/// Recursively re-cases a json value's keys to the given case.
pub fn recase_json_value(mut val: Value, case: Case) -> Value {
match &mut val {
Value::Array(ary) => {
for i in ary {
let val = recase_json_value(i.take(), case);
*i = val;
}
}
Value::Object(obj) => {
let keys = obj.keys().cloned().collect_vec();
for key in keys {
let val = obj.remove(&key).unwrap();
obj.insert(key.to_case(case), recase_json_value(val, case));
}
}
_ => {}
}
val
}

@ -2,7 +2,7 @@ use std::{collections::BTreeSet, fs::read_to_string, path::Path};
use config::{Config, FileFormat}; use config::{Config, FileFormat};
use eyre::Context; use eyre::Context;
use hyperlane_base::settings::{deprecated_parser::DeprecatedRawSettings, Settings}; use hyperlane_base::settings::{parser::RawAgentConf, Settings};
use hyperlane_core::{config::*, KnownHyperlaneDomain}; use hyperlane_core::{config::*, KnownHyperlaneDomain};
use walkdir::WalkDir; use walkdir::WalkDir;
@ -71,11 +71,11 @@ fn hyperlane_settings() -> Vec<Settings> {
.zip(files.iter()) .zip(files.iter())
// Filter out config files that can't be parsed as json (e.g. env files) // Filter out config files that can't be parsed as json (e.g. env files)
.filter_map(|(p, f)| { .filter_map(|(p, f)| {
let raw: DeprecatedRawSettings = Config::builder() let raw: RawAgentConf = Config::builder()
.add_source(config::File::from_str(f.as_str(), FileFormat::Json)) .add_source(config::File::from_str(f.as_str(), FileFormat::Json))
.build() .build()
.ok()? .ok()?
.try_deserialize::<DeprecatedRawSettings>() .try_deserialize::<RawAgentConf>()
.unwrap_or_else(|e| { .unwrap_or_else(|e| {
panic!("!cfg({}): {:?}: {}", p, e, f); panic!("!cfg({}): {:?}: {}", p, e, f);
}); });

@ -76,10 +76,10 @@ impl ConfigPath {
/// Get the environment variable formatted path. /// Get the environment variable formatted path.
pub fn env_name(&self) -> String { pub fn env_name(&self) -> String {
["HYP", "BASE"] ["HYP"]
.into_iter() .into_iter()
.chain(self.0.iter().map(|s| s.as_str())) .chain(self.0.iter().map(|s| s.as_str()))
.map(|s| s.to_uppercase()) .map(|s| s.to_case(Case::UpperFlat))
.join("_") .join("_")
} }

@ -5,8 +5,8 @@ use async_trait::async_trait;
use auto_impl::auto_impl; use auto_impl::auto_impl;
use crate::{ use crate::{
accumulator::incremental::IncrementalMerkle, traits::TxOutcome, utils::domain_hash, traits::TxOutcome, utils::domain_hash, ChainResult, HyperlaneContract, HyperlaneMessage,
ChainResult, Checkpoint, HyperlaneContract, HyperlaneMessage, TxCostEstimate, H256, U256, TxCostEstimate, H256, U256,
}; };
/// Interface for the Mailbox chain contract. Allows abstraction over different /// Interface for the Mailbox chain contract. Allows abstraction over different
@ -19,12 +19,6 @@ pub trait Mailbox: HyperlaneContract + Send + Sync + Debug {
domain_hash(self.address(), self.domain().id()) domain_hash(self.address(), self.domain().id())
} }
/// Return the incremental merkle tree in storage
///
/// - `lag` is how far behind the current block to query, if not specified
/// it will query at the latest block.
async fn tree(&self, lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle>;
/// Gets the current leaf count of the merkle tree /// Gets the current leaf count of the merkle tree
/// ///
/// - `lag` is how far behind the current block to query, if not specified /// - `lag` is how far behind the current block to query, if not specified
@ -34,12 +28,6 @@ pub trait Mailbox: HyperlaneContract + Send + Sync + Debug {
/// Fetch the status of a message /// Fetch the status of a message
async fn delivered(&self, id: H256) -> ChainResult<bool>; async fn delivered(&self, id: H256) -> ChainResult<bool>;
/// Get the latest checkpoint.
///
/// - `lag` is how far behind the current block to query, if not specified
/// it will query at the latest block.
async fn latest_checkpoint(&self, lag: Option<NonZeroU64>) -> ChainResult<Checkpoint>;
/// Fetch the current default interchain security module value /// Fetch the current default interchain security module value
async fn default_ism(&self) -> ChainResult<H256>; async fn default_ism(&self) -> ChainResult<H256>;

@ -0,0 +1,33 @@
use std::fmt::Debug;
use std::num::NonZeroU64;
use async_trait::async_trait;
use auto_impl::auto_impl;
use crate::{
accumulator::incremental::IncrementalMerkle, ChainResult, Checkpoint, HyperlaneContract,
};
/// Interface for the MerkleTreeHook chain contract. Allows abstraction over different
/// chains
#[async_trait]
#[auto_impl(&, Box, Arc)]
pub trait MerkleTreeHook: HyperlaneContract + Send + Sync + Debug {
/// Return the incremental merkle tree in storage
///
/// - `lag` is how far behind the current block to query, if not specified
/// it will query at the latest block.
async fn tree(&self, lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle>;
/// Gets the current leaf count of the merkle tree
///
/// - `lag` is how far behind the current block to query, if not specified
/// it will query at the latest block.
async fn count(&self, lag: Option<NonZeroU64>) -> ChainResult<u32>;
/// Get the latest checkpoint.
///
/// - `lag` is how far behind the current block to query, if not specified
/// it will query at the latest block.
async fn latest_checkpoint(&self, lag: Option<NonZeroU64>) -> ChainResult<Checkpoint>;
}

@ -8,6 +8,7 @@ pub use indexer::*;
pub use interchain_gas::*; pub use interchain_gas::*;
pub use interchain_security_module::*; pub use interchain_security_module::*;
pub use mailbox::*; pub use mailbox::*;
pub use merkle_tree_hook::*;
pub use multisig_ism::*; pub use multisig_ism::*;
pub use provider::*; pub use provider::*;
pub use routing_ism::*; pub use routing_ism::*;
@ -24,6 +25,7 @@ mod indexer;
mod interchain_gas; mod interchain_gas;
mod interchain_security_module; mod interchain_security_module;
mod mailbox; mod mailbox;
mod merkle_tree_hook;
mod multisig_ism; mod multisig_ism;
mod provider; mod provider;
mod routing_ism; mod routing_ism;

@ -10,7 +10,7 @@ use crate::{utils::domain_hash, Signable, Signature, SignedType, H160, H256};
#[derive(Copy, Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] #[derive(Copy, Clone, Eq, PartialEq, Serialize, Deserialize, Debug)]
pub struct Checkpoint { pub struct Checkpoint {
/// The mailbox address /// The mailbox address
pub mailbox_address: H256, pub merkle_tree_hook_address: H256,
/// The mailbox chain /// The mailbox chain
pub mailbox_domain: u32, pub mailbox_domain: u32,
/// The checkpointed root /// The checkpointed root
@ -37,7 +37,10 @@ impl Signable for Checkpoint {
// domain_hash(mailbox_address, mailbox_domain) || root || index (as u32) // domain_hash(mailbox_address, mailbox_domain) || root || index (as u32)
H256::from_slice( H256::from_slice(
Keccak256::new() Keccak256::new()
.chain(domain_hash(self.mailbox_address, self.mailbox_domain)) .chain(domain_hash(
self.merkle_tree_hook_address,
self.mailbox_domain,
))
.chain(self.root) .chain(self.root)
.chain(self.index.to_be_bytes()) .chain(self.index.to_be_bytes())
.finalize() .finalize()
@ -54,7 +57,10 @@ impl Signable for CheckpointWithMessageId {
// domain_hash(mailbox_address, mailbox_domain) || root || index (as u32) || message_id // domain_hash(mailbox_address, mailbox_domain) || root || index (as u32) || message_id
H256::from_slice( H256::from_slice(
Keccak256::new() Keccak256::new()
.chain(domain_hash(self.mailbox_address, self.mailbox_domain)) .chain(domain_hash(
self.merkle_tree_hook_address,
self.mailbox_domain,
))
.chain(self.root) .chain(self.root)
.chain(self.index.to_be_bytes()) .chain(self.index.to_be_bytes())
.chain(self.message_id) .chain(self.message_id)

@ -0,0 +1,45 @@
use derive_new::new;
use std::io::{Read, Write};
use crate::{Decode, Encode, HyperlaneProtocolError, H256};
/// Merkle Tree Hook insertion event
#[derive(Debug, Copy, Clone, new)]
pub struct MerkleTreeInsertion {
leaf_index: u32,
message_id: H256,
}
impl MerkleTreeInsertion {
/// The leaf index of this insertion
pub fn index(&self) -> u32 {
self.leaf_index
}
/// ID of the message inserted
pub fn message_id(&self) -> H256 {
self.message_id
}
}
impl Encode for MerkleTreeInsertion {
fn write_to<W>(&self, writer: &mut W) -> std::io::Result<usize>
where
W: Write,
{
Ok(self.leaf_index.write_to(writer)? + self.message_id.write_to(writer)?)
}
}
impl Decode for MerkleTreeInsertion {
fn read_from<R>(reader: &mut R) -> Result<Self, HyperlaneProtocolError>
where
R: Read,
Self: Sized,
{
Ok(Self {
leaf_index: u32::read_from(reader)?,
message_id: H256::read_from(reader)?,
})
}
}

@ -10,6 +10,7 @@ pub use announcement::*;
pub use chain_data::*; pub use chain_data::*;
pub use checkpoint::*; pub use checkpoint::*;
pub use log_metadata::*; pub use log_metadata::*;
pub use merkle_tree::*;
pub use message::*; pub use message::*;
use crate::{Decode, Encode, HyperlaneProtocolError}; use crate::{Decode, Encode, HyperlaneProtocolError};
@ -18,6 +19,7 @@ mod announcement;
mod chain_data; mod chain_data;
mod checkpoint; mod checkpoint;
mod log_metadata; mod log_metadata;
mod merkle_tree;
mod message; mod message;
mod serialize; mod serialize;

@ -72,14 +72,6 @@ impl Mailbox for MockMailboxContract {
self._count(maybe_lag) self._count(maybe_lag)
} }
async fn tree(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> {
self._tree(maybe_lag)
}
async fn latest_checkpoint(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> {
self._latest_checkpoint(maybe_lag)
}
async fn default_ism(&self) -> ChainResult<H256> { async fn default_ism(&self) -> ChainResult<H256> {
self._default_ism() self._default_ism()
} }

@ -1,10 +1,10 @@
{ {
"sealeveltest2": {
"hex": "0x2317f9615d4ebc2419ad4b88580e2a80a03b2c7a60bc960de7d6934dbc37a87e",
"base58": "3MzUPjP5LEkiHH82nEAe28Xtz9ztuMqWc8UmuKxrpVQH"
},
"sealeveltest1": { "sealeveltest1": {
"hex": "0xa77b4e2ed231894cc8cb8eee21adcc705d8489bccc6b2fcf40a358de23e60b7b", "hex": "0xa77b4e2ed231894cc8cb8eee21adcc705d8489bccc6b2fcf40a358de23e60b7b",
"base58": "CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga" "base58": "CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga"
},
"sealeveltest2": {
"hex": "0x2317f9615d4ebc2419ad4b88580e2a80a03b2c7a60bc960de7d6934dbc37a87e",
"base58": "3MzUPjP5LEkiHH82nEAe28Xtz9ztuMqWc8UmuKxrpVQH"
} }
} }

@ -34,7 +34,7 @@ pub fn get_multisig_ism_test_data() -> MultisigIsmTestData {
let checkpoint = CheckpointWithMessageId { let checkpoint = CheckpointWithMessageId {
checkpoint: Checkpoint { checkpoint: Checkpoint {
mailbox_address: H256::from_str( merkle_tree_hook_address: H256::from_str(
"0xabababababababababababababababababababababababababababababababab", "0xabababababababababababababababababababababababababababababababab",
) )
.unwrap(), .unwrap(),

@ -246,7 +246,7 @@ fn verify(
let multisig_ism = MultisigIsm::new( let multisig_ism = MultisigIsm::new(
CheckpointWithMessageId { CheckpointWithMessageId {
checkpoint: Checkpoint { checkpoint: Checkpoint {
mailbox_address: metadata.origin_mailbox, merkle_tree_hook_address: metadata.origin_mailbox,
mailbox_domain: message.origin, mailbox_domain: message.origin,
root: metadata.merkle_root, root: metadata.merkle_root,
index: message.nonce, index: message.nonce,
@ -599,7 +599,7 @@ pub mod test {
// is handled in compliance with what the Mailbox expects // is handled in compliance with what the Mailbox expects
InterchainSecurityModuleInstruction::Verify(VerifyInstruction { InterchainSecurityModuleInstruction::Verify(VerifyInstruction {
metadata: MultisigIsmMessageIdMetadata { metadata: MultisigIsmMessageIdMetadata {
origin_mailbox: checkpoint.mailbox_address, origin_mailbox: checkpoint.merkle_tree_hook_address,
merkle_root: checkpoint.root, merkle_root: checkpoint.root,
validator_signatures: vec![ validator_signatures: vec![
EcdsaSignature::from_bytes(&signatures[0]).unwrap(), EcdsaSignature::from_bytes(&signatures[0]).unwrap(),
@ -624,7 +624,7 @@ pub mod test {
// is handled in compliance with what the Mailbox expects // is handled in compliance with what the Mailbox expects
InterchainSecurityModuleInstruction::Verify(VerifyInstruction { InterchainSecurityModuleInstruction::Verify(VerifyInstruction {
metadata: MultisigIsmMessageIdMetadata { metadata: MultisigIsmMessageIdMetadata {
origin_mailbox: checkpoint.mailbox_address, origin_mailbox: checkpoint.merkle_tree_hook_address,
merkle_root: checkpoint.root, merkle_root: checkpoint.root,
validator_signatures: vec![ validator_signatures: vec![
EcdsaSignature::from_bytes(&signatures[1]).unwrap(), EcdsaSignature::from_bytes(&signatures[1]).unwrap(),
@ -652,7 +652,7 @@ pub mod test {
// is handled in compliance with what the Mailbox expects // is handled in compliance with what the Mailbox expects
InterchainSecurityModuleInstruction::Verify(VerifyInstruction { InterchainSecurityModuleInstruction::Verify(VerifyInstruction {
metadata: MultisigIsmMessageIdMetadata { metadata: MultisigIsmMessageIdMetadata {
origin_mailbox: checkpoint.mailbox_address, origin_mailbox: checkpoint.merkle_tree_hook_address,
merkle_root: checkpoint.root, merkle_root: checkpoint.root,
validator_signatures: vec![ validator_signatures: vec![
EcdsaSignature::from_bytes(&signatures[0]).unwrap(), EcdsaSignature::from_bytes(&signatures[0]).unwrap(),
@ -676,7 +676,7 @@ pub mod test {
// is handled in compliance with what the Mailbox expects // is handled in compliance with what the Mailbox expects
InterchainSecurityModuleInstruction::Verify(VerifyInstruction { InterchainSecurityModuleInstruction::Verify(VerifyInstruction {
metadata: MultisigIsmMessageIdMetadata { metadata: MultisigIsmMessageIdMetadata {
origin_mailbox: checkpoint.mailbox_address, origin_mailbox: checkpoint.merkle_tree_hook_address,
merkle_root: checkpoint.root, merkle_root: checkpoint.root,
validator_signatures: vec![ validator_signatures: vec![
EcdsaSignature::from_bytes(&signatures[0]).unwrap(), EcdsaSignature::from_bytes(&signatures[0]).unwrap(),

@ -419,7 +419,7 @@ async fn test_ism_verify() {
// A valid verify instruction with a quorum // A valid verify instruction with a quorum
let verify_instruction = VerifyInstruction { let verify_instruction = VerifyInstruction {
metadata: MultisigIsmMessageIdMetadata { metadata: MultisigIsmMessageIdMetadata {
origin_mailbox: checkpoint.mailbox_address, origin_mailbox: checkpoint.merkle_tree_hook_address,
merkle_root: checkpoint.root, merkle_root: checkpoint.root,
validator_signatures: vec![ validator_signatures: vec![
EcdsaSignature::from_bytes(&signatures[0]).unwrap(), EcdsaSignature::from_bytes(&signatures[0]).unwrap(),

@ -8,7 +8,7 @@ use crate::config::Config;
use crate::logging::log; use crate::logging::log;
use crate::program::Program; use crate::program::Program;
use crate::utils::{as_task, AgentHandles, TaskHandle}; use crate::utils::{as_task, AgentHandles, TaskHandle};
use crate::{INFRA_PATH, MONOREPO_ROOT_PATH, TS_SDK_PATH}; use crate::{INFRA_PATH, MONOREPO_ROOT_PATH};
#[apply(as_task)] #[apply(as_task)]
pub fn start_anvil(config: Arc<Config>) -> AgentHandles { pub fn start_anvil(config: Arc<Config>) -> AgentHandles {
@ -35,34 +35,13 @@ pub fn start_anvil(config: Arc<Config>) -> AgentHandles {
sleep(Duration::from_secs(10)); sleep(Duration::from_secs(10));
let yarn_infra = Program::new("yarn") let yarn_infra = Program::new("yarn").working_dir(INFRA_PATH);
.working_dir(INFRA_PATH)
.env("ALLOW_LEGACY_MULTISIG_ISM", "true");
log!("Deploying hyperlane ism contracts..."); log!("Deploying hyperlane ism contracts...");
yarn_infra.clone().cmd("deploy-ism").run().join(); yarn_infra.clone().cmd("deploy-ism").run().join();
log!("Rebuilding sdk...");
let yarn_sdk = Program::new("yarn").working_dir(TS_SDK_PATH);
yarn_sdk.clone().cmd("build").run().join();
log!("Deploying hyperlane core contracts..."); log!("Deploying hyperlane core contracts...");
yarn_infra.clone().cmd("deploy-core").run().join(); yarn_infra.clone().cmd("deploy-core").run().join();
log!("Deploying hyperlane hook contracts...");
yarn_infra.clone().cmd("deploy-hook").run().join();
log!("Deploying hyperlane igp contracts...");
yarn_infra.cmd("deploy-igp").run().join();
if !config.is_ci_env {
// Follow-up 'yarn hardhat node' invocation with 'yarn prettier' to fixup
// formatting on any autogenerated json config files to avoid any diff creation.
yarn_monorepo.cmd("prettier").run().join();
}
// Rebuild the SDK to pick up the deployed contracts
log!("Rebuilding sdk...");
yarn_sdk.cmd("build").run().join();
anvil anvil
} }

@ -1,22 +1,22 @@
use std::path::Path; // use std::path::Path;
use crate::config::Config; use crate::config::Config;
use maplit::hashmap; use maplit::hashmap;
use crate::fetch_metric; use crate::fetch_metric;
use crate::logging::log; use crate::logging::log;
use crate::solana::solana_termination_invariants_met; // use crate::solana::solana_termination_invariants_met;
// This number should be even, so the messages can be split into two equal halves // This number should be even, so the messages can be split into two equal halves
// sent before and after the relayer spins up, to avoid rounding errors. // sent before and after the relayer spins up, to avoid rounding errors.
pub const SOL_MESSAGES_EXPECTED: u32 = 20; pub const SOL_MESSAGES_EXPECTED: u32 = 0;
/// Use the metrics to check if the relayer queues are empty and the expected /// Use the metrics to check if the relayer queues are empty and the expected
/// number of messages have been sent. /// number of messages have been sent.
pub fn termination_invariants_met( pub fn termination_invariants_met(
config: &Config, config: &Config,
solana_cli_tools_path: &Path, // solana_cli_tools_path: &Path,
solana_config_path: &Path, // solana_config_path: &Path,
) -> eyre::Result<bool> { ) -> eyre::Result<bool> {
let eth_messages_expected = (config.kathy_messages / 2) as u32 * 2; let eth_messages_expected = (config.kathy_messages / 2) as u32 * 2;
let total_messages_expected = eth_messages_expected + SOL_MESSAGES_EXPECTED; let total_messages_expected = eth_messages_expected + SOL_MESSAGES_EXPECTED;
@ -63,19 +63,20 @@ pub fn termination_invariants_met(
.sum::<u32>(); .sum::<u32>();
// TestSendReceiver randomly breaks gas payments up into // TestSendReceiver randomly breaks gas payments up into
// two. So we expect at least as many gas payments as messages. // two. So we expect at least as many gas payments as messages.
if gas_payment_events_count < total_messages_expected { // TODO: fix this once eth gas payments are introduced
log!( // if gas_payment_events_count < total_messages_expected {
"Relayer has {} gas payment events, expected at least {}", // log!(
gas_payment_events_count, // "Relayer has {} gas payment events, expected at least {}",
total_messages_expected // gas_payment_events_count,
); // total_messages_expected
return Ok(false); // );
} // return Ok(false);
// }
if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) { // if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) {
log!("Solana termination invariants not met"); // log!("Solana termination invariants not met");
return Ok(false); // return Ok(false);
} // }
let dispatched_messages_scraped = fetch_metric( let dispatched_messages_scraped = fetch_metric(
"9093", "9093",

@ -12,26 +12,27 @@
//! - `E2E_KATHY_MESSAGES`: Number of kathy messages to dispatch. Defaults to 16 if CI mode is enabled. //! - `E2E_KATHY_MESSAGES`: Number of kathy messages to dispatch. Defaults to 16 if CI mode is enabled.
//! else false. //! else false.
use std::path::Path;
use std::{ use std::{
fs, fs,
path::Path,
process::{Child, ExitCode}, process::{Child, ExitCode},
sync::atomic::{AtomicBool, Ordering}, sync::atomic::{AtomicBool, Ordering},
thread::sleep, thread::sleep,
time::{Duration, Instant}, time::{Duration, Instant},
}; };
use tempfile::tempdir;
use logging::log; use logging::log;
pub use metrics::fetch_metric; pub use metrics::fetch_metric;
use program::Program; use program::Program;
use tempfile::tempdir;
use crate::config::Config; use crate::{
use crate::ethereum::start_anvil; config::Config,
use crate::invariants::{termination_invariants_met, SOL_MESSAGES_EXPECTED}; ethereum::start_anvil,
use crate::solana::*; invariants::termination_invariants_met,
use crate::utils::{concat_path, make_static, stop_child, AgentHandles, ArbitraryData, TaskHandle}; solana::*,
utils::{concat_path, make_static, stop_child, AgentHandles, ArbitraryData, TaskHandle},
};
mod config; mod config;
mod ethereum; mod ethereum;
@ -70,7 +71,7 @@ const VALIDATOR_ORIGIN_CHAINS: &[&str] = &["test1", "test2", "test3", "sealevelt
const AGENT_BIN_PATH: &str = "target/debug"; const AGENT_BIN_PATH: &str = "target/debug";
const INFRA_PATH: &str = "../typescript/infra"; const INFRA_PATH: &str = "../typescript/infra";
const TS_SDK_PATH: &str = "../typescript/sdk"; // const TS_SDK_PATH: &str = "../typescript/sdk";
const MONOREPO_ROOT_PATH: &str = "../"; const MONOREPO_ROOT_PATH: &str = "../";
type DynPath = Box<dyn AsRef<Path>>; type DynPath = Box<dyn AsRef<Path>>;
@ -145,8 +146,8 @@ fn main() -> ExitCode {
let common_agent_env = Program::default() let common_agent_env = Program::default()
.env("RUST_BACKTRACE", "full") .env("RUST_BACKTRACE", "full")
.hyp_env("TRACING_FMT", "compact") .hyp_env("LOG_FORMAT", "compact")
.hyp_env("TRACING_LEVEL", "debug") .hyp_env("LOG_LEVEL", "debug")
.hyp_env("CHAINS_TEST1_INDEX_CHUNK", "1") .hyp_env("CHAINS_TEST1_INDEX_CHUNK", "1")
.hyp_env("CHAINS_TEST2_INDEX_CHUNK", "1") .hyp_env("CHAINS_TEST2_INDEX_CHUNK", "1")
.hyp_env("CHAINS_TEST3_INDEX_CHUNK", "1"); .hyp_env("CHAINS_TEST3_INDEX_CHUNK", "1");
@ -154,16 +155,16 @@ fn main() -> ExitCode {
let relayer_env = common_agent_env let relayer_env = common_agent_env
.clone() .clone()
.bin(concat_path(AGENT_BIN_PATH, "relayer")) .bin(concat_path(AGENT_BIN_PATH, "relayer"))
.hyp_env("CHAINS_TEST1_CONNECTION_TYPE", "httpFallback") .hyp_env("CHAINS_TEST1_RPCCONSENSUSTYPE", "fallback")
.hyp_env( .hyp_env(
"CHAINS_TEST2_CONNECTION_URLS", "CHAINS_TEST2_CONNECTION_URLS",
"http://127.0.0.1:8545,http://127.0.0.1:8545,http://127.0.0.1:8545", "http://127.0.0.1:8545,http://127.0.0.1:8545,http://127.0.0.1:8545",
) )
// by setting this as a quorum provider we will cause nonce errors when delivering to test2 // by setting this as a quorum provider we will cause nonce errors when delivering to test2
// because the message will be sent to the node 3 times. // because the message will be sent to the node 3 times.
.hyp_env("CHAINS_TEST2_CONNECTION_TYPE", "httpQuorum") .hyp_env("CHAINS_TEST2_RPCCONSENSUSTYPE", "quorum")
.hyp_env("CHAINS_TEST3_CONNECTION_URL", "http://127.0.0.1:8545") .hyp_env("CHAINS_TEST3_RPCCONSENSUSTYPE", "http://127.0.0.1:8545")
.hyp_env("METRICS", "9092") .hyp_env("METRICSPORT", "9092")
.hyp_env("DB", relayer_db.to_str().unwrap()) .hyp_env("DB", relayer_db.to_str().unwrap())
.hyp_env("CHAINS_TEST1_SIGNER_KEY", RELAYER_KEYS[0]) .hyp_env("CHAINS_TEST1_SIGNER_KEY", RELAYER_KEYS[0])
.hyp_env("CHAINS_TEST2_SIGNER_KEY", RELAYER_KEYS[1]) .hyp_env("CHAINS_TEST2_SIGNER_KEY", RELAYER_KEYS[1])
@ -188,7 +189,7 @@ fn main() -> ExitCode {
}]"#, }]"#,
) )
.arg( .arg(
"chains.test1.connection.urls", "chains.test1.customRpcUrls",
"http://127.0.0.1:8545,http://127.0.0.1:8545,http://127.0.0.1:8545", "http://127.0.0.1:8545,http://127.0.0.1:8545,http://127.0.0.1:8545",
) )
// default is used for TEST3 // default is used for TEST3
@ -202,17 +203,19 @@ fn main() -> ExitCode {
.clone() .clone()
.bin(concat_path(AGENT_BIN_PATH, "validator")) .bin(concat_path(AGENT_BIN_PATH, "validator"))
.hyp_env( .hyp_env(
"CHAINS_TEST1_CONNECTION_URLS", "CHAINS_TEST1_CUSTOMRPCURLS",
"http://127.0.0.1:8545,http://127.0.0.1:8545,http://127.0.0.1:8545", "http://127.0.0.1:8545,http://127.0.0.1:8545,http://127.0.0.1:8545",
) )
.hyp_env("CHAINS_TEST1_CONNECTION_TYPE", "httpQuorum") .hyp_env("CHAINS_TEST1_RPCCONSENSUSTYPE", "quorum")
.hyp_env( .hyp_env(
"CHAINS_TEST2_CONNECTION_URLS", "CHAINS_TEST2_CUSTOMRPCURLS",
"http://127.0.0.1:8545,http://127.0.0.1:8545,http://127.0.0.1:8545", "http://127.0.0.1:8545,http://127.0.0.1:8545,http://127.0.0.1:8545",
) )
.hyp_env("CHAINS_TEST2_CONNECTION_TYPE", "httpFallback") .hyp_env("CHAINS_TEST2_RPCCONSENSUSTYPE", "fallback")
.hyp_env("CHAINS_TEST3_CONNECTION_URL", "http://127.0.0.1:8545") .hyp_env("CHAINS_TEST3_CUSTOMRPCURLS", "http://127.0.0.1:8545")
.hyp_env("REORGPERIOD", "0") .hyp_env("CHAINS_TEST1_BLOCKS_REORGPERIOD", "0")
.hyp_env("CHAINS_TEST2_BLOCKS_REORGPERIOD", "0")
.hyp_env("CHAINS_TEST3_BLOCKS_REORGPERIOD", "0")
.hyp_env("INTERVAL", "5") .hyp_env("INTERVAL", "5")
.hyp_env("CHECKPOINTSYNCER_TYPE", "localStorage"); .hyp_env("CHECKPOINTSYNCER_TYPE", "localStorage");
@ -220,7 +223,7 @@ fn main() -> ExitCode {
.map(|i| { .map(|i| {
base_validator_env base_validator_env
.clone() .clone()
.hyp_env("METRICS", (9094 + i).to_string()) .hyp_env("METRICSPORT", (9094 + i).to_string())
.hyp_env("DB", validator_dbs[i].to_str().unwrap()) .hyp_env("DB", validator_dbs[i].to_str().unwrap())
.hyp_env("ORIGINCHAINNAME", VALIDATOR_ORIGIN_CHAINS[i]) .hyp_env("ORIGINCHAINNAME", VALIDATOR_ORIGIN_CHAINS[i])
.hyp_env("VALIDATOR_KEY", VALIDATOR_KEYS[i]) .hyp_env("VALIDATOR_KEY", VALIDATOR_KEYS[i])
@ -233,14 +236,14 @@ fn main() -> ExitCode {
let scraper_env = common_agent_env let scraper_env = common_agent_env
.bin(concat_path(AGENT_BIN_PATH, "scraper")) .bin(concat_path(AGENT_BIN_PATH, "scraper"))
.hyp_env("CHAINS_TEST1_CONNECTION_TYPE", "httpQuorum") .hyp_env("CHAINS_TEST1_RPCCONSENSUSTYPE", "quorum")
.hyp_env("CHAINS_TEST1_CONNECTION_URL", "http://127.0.0.1:8545") .hyp_env("CHAINS_TEST1_CUSTOMRPCURLS", "http://127.0.0.1:8545")
.hyp_env("CHAINS_TEST2_CONNECTION_TYPE", "httpQuorum") .hyp_env("CHAINS_TEST2_RPCCONSENSUSTYPE", "quorum")
.hyp_env("CHAINS_TEST2_CONNECTION_URL", "http://127.0.0.1:8545") .hyp_env("CHAINS_TEST2_CUSTOMRPCURLS", "http://127.0.0.1:8545")
.hyp_env("CHAINS_TEST3_CONNECTION_TYPE", "httpQuorum") .hyp_env("CHAINS_TEST3_RPCCONSENSUSTYPE", "quorum")
.hyp_env("CHAINS_TEST3_CONNECTION_URL", "http://127.0.0.1:8545") .hyp_env("CHAINS_TEST3_CUSTOMRPCURLS", "http://127.0.0.1:8545")
.hyp_env("CHAINSTOSCRAPE", "test1,test2,test3") .hyp_env("CHAINSTOSCRAPE", "test1,test2,test3")
.hyp_env("METRICS", "9093") .hyp_env("METRICSPORT", "9093")
.hyp_env( .hyp_env(
"DB", "DB",
"postgresql://postgres:47221c18c610@localhost:5432/postgres", "postgresql://postgres:47221c18c610@localhost:5432/postgres",
@ -306,7 +309,7 @@ fn main() -> ExitCode {
solana_ledger_dir.as_ref().to_path_buf(), solana_ledger_dir.as_ref().to_path_buf(),
); );
let (solana_config_path, solana_validator) = start_solana_validator.join(); let (_solana_config_path, solana_validator) = start_solana_validator.join();
state.push_agent(solana_validator); state.push_agent(solana_validator);
state.push_agent(start_anvil.join()); state.push_agent(start_anvil.join());
@ -336,16 +339,16 @@ fn main() -> ExitCode {
} }
// Send some sealevel messages before spinning up the relayer, to test the backward indexing cursor // Send some sealevel messages before spinning up the relayer, to test the backward indexing cursor
for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { // for _i in 0..(SOL_MESSAGES_EXPECTED / 2) {
initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join(); // initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join();
} // }
state.push_agent(relayer_env.spawn("RLY")); state.push_agent(relayer_env.spawn("RLY"));
// Send some sealevel messages after spinning up the relayer, to test the forward indexing cursor // Send some sealevel messages after spinning up the relayer, to test the forward indexing cursor
for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { // for _i in 0..(SOL_MESSAGES_EXPECTED / 2) {
initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join(); // initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join();
} // }
log!("Setup complete! Agents running in background..."); log!("Setup complete! Agents running in background...");
log!("Ctrl+C to end execution..."); log!("Ctrl+C to end execution...");
@ -360,7 +363,8 @@ fn main() -> ExitCode {
while !SHUTDOWN.load(Ordering::Relaxed) { while !SHUTDOWN.load(Ordering::Relaxed) {
if config.ci_mode { if config.ci_mode {
// for CI we have to look for the end condition. // for CI we have to look for the end condition.
if termination_invariants_met(&config, &solana_path, &solana_config_path) if termination_invariants_met(&config)
// if termination_invariants_met(&config, &solana_path, &solana_config_path)
.unwrap_or(false) .unwrap_or(false)
{ {
// end condition reached successfully // end condition reached successfully
@ -375,11 +379,17 @@ fn main() -> ExitCode {
// verify long-running tasks are still running // verify long-running tasks are still running
for (name, child) in state.agents.iter_mut() { for (name, child) in state.agents.iter_mut() {
if child.try_wait().unwrap().is_some() { if let Some(status) = child.try_wait().unwrap() {
log!("Child process {} exited unexpectedly, shutting down", name); if !status.success() {
failure_occurred = true; log!(
SHUTDOWN.store(true, Ordering::Relaxed); "Child process {} exited unexpectedly, with code {}. Shutting down",
break; name,
status.code().unwrap()
);
failure_occurred = true;
SHUTDOWN.store(true, Ordering::Relaxed);
break;
}
} }
} }

@ -1,24 +1,31 @@
use std::collections::BTreeMap; use std::{
use std::ffi::OsStr; collections::BTreeMap,
use std::fmt::{Debug, Display, Formatter}; ffi::OsStr,
use std::io::{BufRead, BufReader, Read}; fmt::{Debug, Display, Formatter},
use std::path::{Path, PathBuf}; io::{BufRead, BufReader, Read},
use std::process::{Command, Stdio}; path::{Path, PathBuf},
use std::sync::atomic::{AtomicBool, Ordering}; process::{Command, Stdio},
use std::sync::mpsc::Sender; sync::{
use std::sync::{mpsc, Arc}; atomic::{AtomicBool, Ordering},
use std::thread::{sleep, spawn}; mpsc,
use std::time::Duration; mpsc::Sender,
Arc,
},
thread::{sleep, spawn},
time::Duration,
};
use eyre::Context; use eyre::Context;
use macro_rules_attribute::apply; use macro_rules_attribute::apply;
use crate::logging::log; use crate::{
use crate::utils::{ logging::log,
as_task, stop_child, AgentHandles, ArbitraryData, LogFilter, MappingTaskHandle, utils::{
SimpleTaskHandle, TaskHandle, as_task, stop_child, AgentHandles, ArbitraryData, LogFilter, MappingTaskHandle,
SimpleTaskHandle, TaskHandle,
},
RUN_LOG_WATCHERS, SHUTDOWN,
}; };
use crate::{RUN_LOG_WATCHERS, SHUTDOWN};
#[derive(Default, Clone)] #[derive(Default, Clone)]
#[must_use] #[must_use]
@ -134,7 +141,7 @@ impl Program {
/// add an env that will be prefixed with the default hyperlane env prefix /// add an env that will be prefixed with the default hyperlane env prefix
pub fn hyp_env(self, key: impl AsRef<str>, value: impl Into<String>) -> Self { pub fn hyp_env(self, key: impl AsRef<str>, value: impl Into<String>) -> Self {
const PREFIX: &str = "HYP_BASE_"; const PREFIX: &str = "HYP_";
let key = key.as_ref(); let key = key.as_ref();
debug_assert!( debug_assert!(
!key.starts_with(PREFIX), !key.starts_with(PREFIX),

@ -282,7 +282,7 @@ pub fn start_solana_test_validator(
} }
#[apply(as_task)] #[apply(as_task)]
pub fn initiate_solana_hyperlane_transfer( pub fn _initiate_solana_hyperlane_transfer(
solana_cli_tools_path: PathBuf, solana_cli_tools_path: PathBuf,
solana_config_path: PathBuf, solana_config_path: PathBuf,
) { ) {
@ -309,7 +309,7 @@ pub fn initiate_solana_hyperlane_transfer(
.run_with_output() .run_with_output()
.join(); .join();
let message_id = get_message_id_from_logs(output); let message_id = _get_message_id_from_logs(output);
if let Some(message_id) = message_id { if let Some(message_id) = message_id {
sealevel_client(&solana_cli_tools_path, &solana_config_path) sealevel_client(&solana_cli_tools_path, &solana_config_path)
.cmd("igp") .cmd("igp")
@ -321,7 +321,7 @@ pub fn initiate_solana_hyperlane_transfer(
} }
} }
fn get_message_id_from_logs(logs: Vec<String>) -> Option<String> { fn _get_message_id_from_logs(logs: Vec<String>) -> Option<String> {
let message_id_regex = Regex::new(r"Dispatched message to \d+, ID 0x([0-9a-fA-F]+)").unwrap(); let message_id_regex = Regex::new(r"Dispatched message to \d+, ID 0x([0-9a-fA-F]+)").unwrap();
for log in logs { for log in logs {
// Use the regular expression to capture the ID // Use the regular expression to capture the ID
@ -335,7 +335,7 @@ fn get_message_id_from_logs(logs: Vec<String>) -> Option<String> {
None None
} }
pub fn solana_termination_invariants_met( pub fn _solana_termination_invariants_met(
solana_cli_tools_path: &Path, solana_cli_tools_path: &Path,
solana_config_path: &Path, solana_config_path: &Path,
) -> bool { ) -> bool {

@ -7,6 +7,8 @@ import {IInterchainGasPaymaster} from "../interfaces/IInterchainGasPaymaster.sol
import {IMessageRecipient} from "../interfaces/IMessageRecipient.sol"; import {IMessageRecipient} from "../interfaces/IMessageRecipient.sol";
import {IMailbox} from "../interfaces/IMailbox.sol"; import {IMailbox} from "../interfaces/IMailbox.sol";
import {StandardHookMetadata} from "../hooks/libs/StandardHookMetadata.sol";
contract TestSendReceiver is IMessageRecipient { contract TestSendReceiver is IMessageRecipient {
using TypeCasts for address; using TypeCasts for address;
@ -16,42 +18,20 @@ contract TestSendReceiver is IMessageRecipient {
function dispatchToSelf( function dispatchToSelf(
IMailbox _mailbox, IMailbox _mailbox,
IInterchainGasPaymaster _paymaster,
uint32 _destinationDomain, uint32 _destinationDomain,
bytes calldata _messageBody bytes calldata _messageBody
) external payable { ) external payable {
bytes32 _messageId = _mailbox.dispatch( bytes memory hookMetadata = StandardHookMetadata.formatMetadata(
HANDLE_GAS_AMOUNT,
msg.sender
);
// TODO: handle topping up?
_mailbox.dispatch{value: msg.value}(
_destinationDomain, _destinationDomain,
address(this).addressToBytes32(), address(this).addressToBytes32(),
_messageBody _messageBody,
hookMetadata
); );
uint256 _blockHashNum = uint256(previousBlockHash());
uint256 _value = msg.value;
if (_blockHashNum % 5 == 0) {
// Pay in two separate calls, resulting in 2 distinct events
uint256 _halfPayment = _value / 2;
uint256 _halfGasAmount = HANDLE_GAS_AMOUNT / 2;
_paymaster.payForGas{value: _halfPayment}(
_messageId,
_destinationDomain,
_halfGasAmount,
msg.sender
);
_paymaster.payForGas{value: _value - _halfPayment}(
_messageId,
_destinationDomain,
HANDLE_GAS_AMOUNT - _halfGasAmount,
msg.sender
);
} else {
// Pay the entire msg.value in one call
_paymaster.payForGas{value: _value}(
_messageId,
_destinationDomain,
HANDLE_GAS_AMOUNT,
msg.sender
);
}
} }
function handle( function handle(

@ -6,3 +6,4 @@ cache/
test/outputs test/outputs
config/environments/test/core/ config/environments/test/core/
config/environments/test/igp/ config/environments/test/igp/
config/environments/test/ism/

@ -1,12 +1,12 @@
import { import {
AgentConnectionType, GasPaymentEnforcementPolicyType,
RpcConsensusType,
chainMetadata, chainMetadata,
hyperlaneEnvironments, hyperlaneEnvironments,
} from '@hyperlane-xyz/sdk'; } from '@hyperlane-xyz/sdk';
import { objMap } from '@hyperlane-xyz/utils'; import { objMap } from '@hyperlane-xyz/utils';
import { import {
GasPaymentEnforcementPolicyType,
RootAgentConfig, RootAgentConfig,
allAgentChainNames, allAgentChainNames,
routerMatchingList, routerMatchingList,
@ -80,7 +80,7 @@ const hyperlane: RootAgentConfig = {
context: Contexts.Hyperlane, context: Contexts.Hyperlane,
rolesWithKeys: ALL_KEY_ROLES, rolesWithKeys: ALL_KEY_ROLES,
relayer: { relayer: {
connectionType: AgentConnectionType.HttpFallback, rpcConsensusType: RpcConsensusType.Fallback,
docker: { docker: {
repo, repo,
tag: '3b0685f-20230815-110725', tag: '3b0685f-20230815-110725',
@ -117,11 +117,11 @@ const hyperlane: RootAgentConfig = {
tag: '3b0685f-20230815-110725', tag: '3b0685f-20230815-110725',
}, },
}, },
connectionType: AgentConnectionType.HttpQuorum, rpcConsensusType: RpcConsensusType.Quorum,
chains: validatorChainConfig(Contexts.Hyperlane), chains: validatorChainConfig(Contexts.Hyperlane),
}, },
scraper: { scraper: {
connectionType: AgentConnectionType.HttpFallback, rpcConsensusType: RpcConsensusType.Fallback,
docker: { docker: {
repo, repo,
tag: 'aaddba7-20230620-154941', tag: 'aaddba7-20230620-154941',
@ -134,7 +134,7 @@ const releaseCandidate: RootAgentConfig = {
context: Contexts.ReleaseCandidate, context: Contexts.ReleaseCandidate,
rolesWithKeys: [Role.Relayer, Role.Kathy, Role.Validator], rolesWithKeys: [Role.Relayer, Role.Kathy, Role.Validator],
relayer: { relayer: {
connectionType: AgentConnectionType.HttpFallback, rpcConsensusType: RpcConsensusType.Fallback,
docker: { docker: {
repo, repo,
tag: '3b0685f-20230815-110725', tag: '3b0685f-20230815-110725',
@ -144,14 +144,14 @@ const releaseCandidate: RootAgentConfig = {
transactionGasLimit: 750000, transactionGasLimit: 750000,
// Skipping arbitrum because the gas price estimates are inclusive of L1 // Skipping arbitrum because the gas price estimates are inclusive of L1
// fees which leads to wildly off predictions. // fees which leads to wildly off predictions.
skipTransactionGasLimitFor: [chainMetadata.arbitrum.chainId], skipTransactionGasLimitFor: [chainMetadata.arbitrum.name],
}, },
validators: { validators: {
docker: { docker: {
repo, repo,
tag: 'ed7569d-20230725-171222', tag: 'ed7569d-20230725-171222',
}, },
connectionType: AgentConnectionType.HttpQuorum, rpcConsensusType: RpcConsensusType.Quorum,
chains: validatorChainConfig(Contexts.ReleaseCandidate), chains: validatorChainConfig(Contexts.ReleaseCandidate),
}, },
}; };

@ -1,4 +1,4 @@
import { ChainMap, CoreConfig } from '@hyperlane-xyz/sdk'; import { ChainMap, CoreConfig, HookType } from '@hyperlane-xyz/sdk';
import { objMap } from '@hyperlane-xyz/utils'; import { objMap } from '@hyperlane-xyz/utils';
import { aggregationIsm } from '../../aggregationIsm'; import { aggregationIsm } from '../../aggregationIsm';
@ -9,18 +9,15 @@ import { owners } from './owners';
export const core: ChainMap<CoreConfig> = objMap(owners, (local, owner) => { export const core: ChainMap<CoreConfig> = objMap(owners, (local, owner) => {
const defaultIsm = aggregationIsm('mainnet2', local, Contexts.Hyperlane); const defaultIsm = aggregationIsm('mainnet2', local, Contexts.Hyperlane);
let upgrade: CoreConfig['upgrade'];
if (local === 'arbitrum') { if (local === 'arbitrum') {
return { upgrade = {
owner, timelock: {
defaultIsm, // 7 days in seconds
upgrade: { delay: 7 * 24 * 60 * 60,
timelock: { roles: {
// 7 days in seconds proposer: owner,
delay: 7 * 24 * 60 * 60, executor: owner,
roles: {
proposer: owner,
executor: owner,
},
}, },
}, },
}; };
@ -28,6 +25,13 @@ export const core: ChainMap<CoreConfig> = objMap(owners, (local, owner) => {
return { return {
owner, owner,
upgrade,
defaultIsm, defaultIsm,
defaultHook: {
type: HookType.INTERCHAIN_GAS_PAYMASTER,
},
requiredHook: {
type: HookType.MERKLE_TREE,
},
}; };
}); });

@ -1,4 +1,4 @@
import { AgentConnectionType } from '@hyperlane-xyz/sdk'; import { RpcConsensusType } from '@hyperlane-xyz/sdk';
import { KeyFunderConfig } from '../../../src/config/funding'; import { KeyFunderConfig } from '../../../src/config/funding';
import { Role } from '../../../src/roles'; import { Role } from '../../../src/roles';
@ -23,5 +23,5 @@ export const keyFunderConfig: KeyFunderConfig = {
[Contexts.Hyperlane]: [Role.Relayer, Role.Kathy], [Contexts.Hyperlane]: [Role.Relayer, Role.Kathy],
[Contexts.ReleaseCandidate]: [Role.Relayer, Role.Kathy], [Contexts.ReleaseCandidate]: [Role.Relayer, Role.Kathy],
}, },
connectionType: AgentConnectionType.Http, connectionType: RpcConsensusType.Single,
}; };

@ -1,4 +1,4 @@
import { AgentConnectionType } from '@hyperlane-xyz/sdk'; import { RpcConsensusType } from '@hyperlane-xyz/sdk';
import { HelloWorldConfig } from '../../../src/config'; import { HelloWorldConfig } from '../../../src/config';
import { HelloWorldKathyRunMode } from '../../../src/config/helloworld'; import { HelloWorldKathyRunMode } from '../../../src/config/helloworld';
@ -24,7 +24,7 @@ export const hyperlane: HelloWorldConfig = {
}, },
messageSendTimeout: 1000 * 60 * 8, // 8 min messageSendTimeout: 1000 * 60 * 8, // 8 min
messageReceiptTimeout: 1000 * 60 * 20, // 20 min messageReceiptTimeout: 1000 * 60 * 20, // 20 min
connectionType: AgentConnectionType.HttpFallback, connectionType: RpcConsensusType.Fallback,
cyclesBetweenEthereumMessages: 3, // Skip 3 cycles of Ethereum, i.e. send/receive Ethereum messages every 32 hours. cyclesBetweenEthereumMessages: 3, // Skip 3 cycles of Ethereum, i.e. send/receive Ethereum messages every 32 hours.
}, },
}; };
@ -44,7 +44,7 @@ export const releaseCandidate: HelloWorldConfig = {
}, },
messageSendTimeout: 1000 * 60 * 8, // 8 min messageSendTimeout: 1000 * 60 * 8, // 8 min
messageReceiptTimeout: 1000 * 60 * 20, // 20 min messageReceiptTimeout: 1000 * 60 * 20, // 20 min
connectionType: AgentConnectionType.Http, connectionType: RpcConsensusType.Single,
}, },
}; };

@ -1,4 +1,4 @@
import { AgentConnectionType } from '@hyperlane-xyz/sdk'; import { RpcConsensusType } from '@hyperlane-xyz/sdk';
import { import {
getKeysForRole, getKeysForRole,
@ -25,7 +25,7 @@ export const environment: EnvironmentConfig = {
getMultiProvider: ( getMultiProvider: (
context: Contexts = Contexts.Hyperlane, context: Contexts = Contexts.Hyperlane,
role: Role = Role.Deployer, role: Role = Role.Deployer,
connectionType?: AgentConnectionType, connectionType?: RpcConsensusType,
) => ) =>
getMultiProviderForRole( getMultiProviderForRole(
mainnetConfigs, mainnetConfigs,

@ -1,9 +1,9 @@
import { import {
AgentConnectionType,
BridgeAdapterConfig, BridgeAdapterConfig,
BridgeAdapterType, BridgeAdapterType,
ChainMap, ChainMap,
Chains, Chains,
RpcConsensusType,
chainMetadata, chainMetadata,
} from '@hyperlane-xyz/sdk'; } from '@hyperlane-xyz/sdk';
@ -45,5 +45,5 @@ export const relayerConfig: LiquidityLayerRelayerConfig = {
namespace: environment, namespace: environment,
prometheusPushGateway: prometheusPushGateway:
'http://prometheus-pushgateway.monitoring.svc.cluster.local:9091', 'http://prometheus-pushgateway.monitoring.svc.cluster.local:9091',
connectionType: AgentConnectionType.Http, connectionType: RpcConsensusType.Single,
}; };

@ -1,9 +1,9 @@
import { AgentConnectionType } from '@hyperlane-xyz/sdk';
import { import {
GasPaymentEnforcementPolicyType, GasPaymentEnforcementPolicyType,
RootAgentConfig, RpcConsensusType,
} from '../../../src/config'; } from '@hyperlane-xyz/sdk';
import { RootAgentConfig } from '../../../src/config';
import { ALL_KEY_ROLES } from '../../../src/roles'; import { ALL_KEY_ROLES } from '../../../src/roles';
import { Contexts } from '../../contexts'; import { Contexts } from '../../contexts';
@ -15,7 +15,7 @@ const roleBase = {
repo: 'gcr.io/abacus-labs-dev/hyperlane-agent', repo: 'gcr.io/abacus-labs-dev/hyperlane-agent',
tag: '8852db3d88e87549269487da6da4ea5d67fdbfed', tag: '8852db3d88e87549269487da6da4ea5d67fdbfed',
}, },
connectionType: AgentConnectionType.Http, rpcConsensusType: RpcConsensusType.Single,
} as const; } as const;
const hyperlane: RootAgentConfig = { const hyperlane: RootAgentConfig = {

@ -1,6 +1,7 @@
import { import {
ChainMap, ChainMap,
CoreConfig, CoreConfig,
HookType,
ModuleType, ModuleType,
RoutingIsmConfig, RoutingIsmConfig,
} from '@hyperlane-xyz/sdk'; } from '@hyperlane-xyz/sdk';
@ -24,5 +25,11 @@ export const core: ChainMap<CoreConfig> = objMap(owners, (local, owner) => {
return { return {
owner, owner,
defaultIsm, defaultIsm,
defaultHook: {
type: HookType.INTERCHAIN_GAS_PAYMASTER,
},
requiredHook: {
type: HookType.MERKLE_TREE,
},
}; };
}); });

@ -9,7 +9,7 @@ export const merkleTree: ChainMap<MerkleTreeHookConfig> = objMap(
owners, owners,
(_, __) => { (_, __) => {
const config: MerkleTreeHookConfig = { const config: MerkleTreeHookConfig = {
type: HookType.MERKLE_TREE_HOOK, type: HookType.MERKLE_TREE,
}; };
return config; return config;
}, },

@ -1,5 +1,6 @@
import { import {
AgentConnectionType, GasPaymentEnforcementPolicyType,
RpcConsensusType,
chainMetadata, chainMetadata,
getDomainId, getDomainId,
hyperlaneEnvironments, hyperlaneEnvironments,
@ -7,7 +8,6 @@ import {
import { objMap } from '@hyperlane-xyz/utils'; import { objMap } from '@hyperlane-xyz/utils';
import { import {
GasPaymentEnforcementPolicyType,
RootAgentConfig, RootAgentConfig,
allAgentChainNames, allAgentChainNames,
routerMatchingList, routerMatchingList,
@ -71,7 +71,7 @@ const hyperlane: RootAgentConfig = {
context: Contexts.Hyperlane, context: Contexts.Hyperlane,
rolesWithKeys: ALL_KEY_ROLES, rolesWithKeys: ALL_KEY_ROLES,
relayer: { relayer: {
connectionType: AgentConnectionType.HttpFallback, rpcConsensusType: RpcConsensusType.Fallback,
docker: { docker: {
repo, repo,
tag: 'ed7569d-20230725-171222', tag: 'ed7569d-20230725-171222',
@ -88,7 +88,7 @@ const hyperlane: RootAgentConfig = {
gasPaymentEnforcement, gasPaymentEnforcement,
}, },
validators: { validators: {
connectionType: AgentConnectionType.HttpFallback, rpcConsensusType: RpcConsensusType.Fallback,
docker: { docker: {
repo, repo,
tag: 'ed7569d-20230725-171222', tag: 'ed7569d-20230725-171222',
@ -104,7 +104,7 @@ const hyperlane: RootAgentConfig = {
chains: validatorChainConfig(Contexts.Hyperlane), chains: validatorChainConfig(Contexts.Hyperlane),
}, },
scraper: { scraper: {
connectionType: AgentConnectionType.HttpFallback, rpcConsensusType: RpcConsensusType.Fallback,
docker: { docker: {
repo, repo,
tag: 'aaddba7-20230620-154941', tag: 'aaddba7-20230620-154941',
@ -117,7 +117,7 @@ const releaseCandidate: RootAgentConfig = {
context: Contexts.ReleaseCandidate, context: Contexts.ReleaseCandidate,
rolesWithKeys: [Role.Relayer, Role.Kathy, Role.Validator], rolesWithKeys: [Role.Relayer, Role.Kathy, Role.Validator],
relayer: { relayer: {
connectionType: AgentConnectionType.HttpFallback, rpcConsensusType: RpcConsensusType.Fallback,
docker: { docker: {
repo, repo,
tag: 'c7c44b2-20230811-133851', tag: 'c7c44b2-20230811-133851',
@ -175,10 +175,10 @@ const releaseCandidate: RootAgentConfig = {
transactionGasLimit: 750000, transactionGasLimit: 750000,
// Skipping arbitrum because the gas price estimates are inclusive of L1 // Skipping arbitrum because the gas price estimates are inclusive of L1
// fees which leads to wildly off predictions. // fees which leads to wildly off predictions.
skipTransactionGasLimitFor: [chainMetadata.arbitrumgoerli.chainId], skipTransactionGasLimitFor: [chainMetadata.arbitrumgoerli.name],
}, },
validators: { validators: {
connectionType: AgentConnectionType.HttpFallback, rpcConsensusType: RpcConsensusType.Fallback,
docker: { docker: {
repo, repo,
tag: 'ed7569d-20230725-171222', tag: 'ed7569d-20230725-171222',

@ -1,4 +1,4 @@
import { ChainMap, CoreConfig } from '@hyperlane-xyz/sdk'; import { ChainMap, CoreConfig, HookType } from '@hyperlane-xyz/sdk';
import { objMap } from '@hyperlane-xyz/utils'; import { objMap } from '@hyperlane-xyz/utils';
import { aggregationIsm } from '../../aggregationIsm'; import { aggregationIsm } from '../../aggregationIsm';
@ -11,5 +11,11 @@ export const core: ChainMap<CoreConfig> = objMap(owners, (local, owner) => {
return { return {
owner, owner,
defaultIsm, defaultIsm,
defaultHook: {
type: HookType.INTERCHAIN_GAS_PAYMASTER,
},
requiredHook: {
type: HookType.MERKLE_TREE,
},
}; };
}); });

@ -1,4 +1,4 @@
import { AgentConnectionType } from '@hyperlane-xyz/sdk'; import { RpcConsensusType } from '@hyperlane-xyz/sdk';
import { KeyFunderConfig } from '../../../src/config/funding'; import { KeyFunderConfig } from '../../../src/config/funding';
import { Role } from '../../../src/roles'; import { Role } from '../../../src/roles';
@ -23,5 +23,5 @@ export const keyFunderConfig: KeyFunderConfig = {
[Contexts.Hyperlane]: [Role.Relayer, Role.Kathy], [Contexts.Hyperlane]: [Role.Relayer, Role.Kathy],
[Contexts.ReleaseCandidate]: [Role.Relayer, Role.Kathy], [Contexts.ReleaseCandidate]: [Role.Relayer, Role.Kathy],
}, },
connectionType: AgentConnectionType.HttpQuorum, connectionType: RpcConsensusType.Quorum,
}; };

@ -1,4 +1,4 @@
import { AgentConnectionType } from '@hyperlane-xyz/sdk'; import { RpcConsensusType } from '@hyperlane-xyz/sdk';
import { HelloWorldConfig } from '../../../src/config'; import { HelloWorldConfig } from '../../../src/config';
import { HelloWorldKathyRunMode } from '../../../src/config/helloworld'; import { HelloWorldKathyRunMode } from '../../../src/config/helloworld';
@ -24,7 +24,7 @@ export const hyperlaneHelloworld: HelloWorldConfig = {
}, },
messageSendTimeout: 1000 * 60 * 8, // 8 min messageSendTimeout: 1000 * 60 * 8, // 8 min
messageReceiptTimeout: 1000 * 60 * 20, // 20 min messageReceiptTimeout: 1000 * 60 * 20, // 20 min
connectionType: AgentConnectionType.HttpFallback, connectionType: RpcConsensusType.Fallback,
}, },
}; };
@ -43,7 +43,7 @@ export const releaseCandidateHelloworld: HelloWorldConfig = {
}, },
messageSendTimeout: 1000 * 60 * 8, // 8 min messageSendTimeout: 1000 * 60 * 8, // 8 min
messageReceiptTimeout: 1000 * 60 * 20, // 20 min messageReceiptTimeout: 1000 * 60 * 20, // 20 min
connectionType: AgentConnectionType.Http, connectionType: RpcConsensusType.Single,
}, },
}; };

@ -1,4 +1,4 @@
import { AgentConnectionType } from '@hyperlane-xyz/sdk'; import { RpcConsensusType } from '@hyperlane-xyz/sdk';
import { import {
getKeysForRole, getKeysForRole,
@ -26,7 +26,7 @@ export const environment: EnvironmentConfig = {
getMultiProvider: ( getMultiProvider: (
context: Contexts = Contexts.Hyperlane, context: Contexts = Contexts.Hyperlane,
role: Role = Role.Deployer, role: Role = Role.Deployer,
connectionType?: AgentConnectionType, connectionType?: RpcConsensusType,
) => ) =>
getMultiProviderForRole( getMultiProviderForRole(
testnetConfigs, testnetConfigs,

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save