### Description

Merge v3 to main
pull/2934/head
Yorke Rhodes 1 year ago committed by GitHub
commit 7c3bd9d14a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      .gitattributes
  2. 2
      .github/CODEOWNERS
  3. 1
      .github/workflows/e2e.yml
  4. 21
      .github/workflows/node.yml
  5. 2
      .github/workflows/rust-skipped.yml
  6. 1
      .gitignore
  7. 3
      .gitmodules
  8. 6
      .prettierrc
  9. 785
      .yarn/releases/yarn-3.2.0.cjs
  10. 893
      .yarn/releases/yarn-4.0.1.cjs
  11. 12
      .yarnrc.yml
  12. 6
      Dockerfile
  13. 6
      README.md
  14. 31
      codecov.yml
  15. 22
      package.json
  16. 1
      rust/Cargo.lock
  17. 4
      rust/README.md
  18. 1
      rust/agents/relayer/Cargo.toml
  19. 3
      rust/agents/relayer/src/main.rs
  20. 64
      rust/agents/relayer/src/merkle_tree/builder.rs
  21. 2
      rust/agents/relayer/src/merkle_tree/mod.rs
  22. 101
      rust/agents/relayer/src/merkle_tree/processor.rs
  23. 152
      rust/agents/relayer/src/msg/gas_payment/mod.rs
  24. 2
      rust/agents/relayer/src/msg/gas_payment/policies/minimum.rs
  25. 1
      rust/agents/relayer/src/msg/gas_payment/policies/none.rs
  26. 1
      rust/agents/relayer/src/msg/gas_payment/policies/on_chain_fee_quoting.rs
  27. 48
      rust/agents/relayer/src/msg/metadata/aggregation.rs
  28. 115
      rust/agents/relayer/src/msg/metadata/base.rs
  29. 124
      rust/agents/relayer/src/msg/metadata/multisig/base.rs
  30. 69
      rust/agents/relayer/src/msg/metadata/multisig/legacy_multisig.rs
  31. 63
      rust/agents/relayer/src/msg/metadata/multisig/merkle_root_multisig.rs
  32. 52
      rust/agents/relayer/src/msg/metadata/multisig/message_id_multisig.rs
  33. 2
      rust/agents/relayer/src/msg/metadata/multisig/mod.rs
  34. 150
      rust/agents/relayer/src/msg/processor.rs
  35. 37
      rust/agents/relayer/src/processor.rs
  36. 63
      rust/agents/relayer/src/relayer.rs
  37. 83
      rust/agents/relayer/src/settings/matching_list.rs
  38. 399
      rust/agents/relayer/src/settings/mod.rs
  39. 56
      rust/agents/scraper/migration/src/m20230309_000001_create_table_domain.rs
  40. 25
      rust/agents/scraper/src/chain_scraper/mod.rs
  41. 2
      rust/agents/scraper/src/db/block_cursor.rs
  42. 77
      rust/agents/scraper/src/settings.rs
  43. 129
      rust/agents/validator/src/settings.rs
  44. 327
      rust/agents/validator/src/submit.rs
  45. 96
      rust/agents/validator/src/validator.rs
  46. 2
      rust/build.sh
  47. 2
      rust/chains/hyperlane-ethereum/abis/IAggregationIsm.abi.json
  48. 6
      rust/chains/hyperlane-ethereum/abis/IInterchainGasPaymaster.abi.json
  49. 182
      rust/chains/hyperlane-ethereum/abis/IMailbox.abi.json
  50. 347
      rust/chains/hyperlane-ethereum/abis/Mailbox.abi.json
  51. 156
      rust/chains/hyperlane-ethereum/abis/MerkleTreeHook.abi.json
  52. 95
      rust/chains/hyperlane-ethereum/src/config.rs
  53. 13
      rust/chains/hyperlane-ethereum/src/interchain_gas.rs
  54. 8
      rust/chains/hyperlane-ethereum/src/lib.rs
  55. 138
      rust/chains/hyperlane-ethereum/src/mailbox.rs
  56. 249
      rust/chains/hyperlane-ethereum/src/merkle_tree_hook.rs
  57. 17
      rust/chains/hyperlane-ethereum/src/signers.rs
  58. 27
      rust/chains/hyperlane-ethereum/src/tx.rs
  59. 56
      rust/chains/hyperlane-ethereum/tests/signer_output.rs
  60. 35
      rust/chains/hyperlane-fuel/src/mailbox.rs
  61. 29
      rust/chains/hyperlane-fuel/src/trait_builder.rs
  62. 1
      rust/chains/hyperlane-sealevel/src/interchain_gas.rs
  63. 2
      rust/chains/hyperlane-sealevel/src/lib.rs
  64. 69
      rust/chains/hyperlane-sealevel/src/mailbox.rs
  65. 101
      rust/chains/hyperlane-sealevel/src/merkle_tree_hook.rs
  66. 32
      rust/chains/hyperlane-sealevel/src/trait_builder.rs
  67. 633
      rust/config/mainnet3_config.json
  68. 166
      rust/config/mainnet_config.json
  69. 46
      rust/config/test_sealevel_config.json
  70. 1013
      rust/config/testnet4_config.json
  71. 163
      rust/config/testnet_config.json
  72. 24
      rust/helm/agent-common/templates/_helpers.tpl
  73. 44
      rust/helm/hyperlane-agent/README.md
  74. 9
      rust/helm/hyperlane-agent/templates/_helpers.tpl
  75. 10
      rust/helm/hyperlane-agent/templates/configmap.yaml
  76. 12
      rust/helm/hyperlane-agent/templates/external-secret.yaml
  77. 9
      rust/helm/hyperlane-agent/templates/relayer-external-secret.yaml
  78. 9
      rust/helm/hyperlane-agent/templates/relayer-statefulset.yaml
  79. 2
      rust/helm/hyperlane-agent/templates/scraper-external-secret.yaml
  80. 9
      rust/helm/hyperlane-agent/templates/scraper-statefulset.yaml
  81. 8
      rust/helm/hyperlane-agent/templates/validator-configmap.yaml
  82. 18
      rust/helm/hyperlane-agent/templates/validator-external-secret.yaml
  83. 10
      rust/helm/hyperlane-agent/templates/validator-statefulset.yaml
  84. 37
      rust/helm/hyperlane-agent/values.yaml
  85. 91
      rust/hyperlane-base/src/db/rocks/hyperlane_db.rs
  86. 3
      rust/hyperlane-base/src/db/rocks/storage_types.rs
  87. 4
      rust/hyperlane-base/src/settings/base.rs
  88. 91
      rust/hyperlane-base/src/settings/chains.rs
  89. 435
      rust/hyperlane-base/src/settings/deprecated_parser.rs
  90. 20
      rust/hyperlane-base/src/settings/loader/arguments.rs
  91. 66
      rust/hyperlane-base/src/settings/loader/case_adapter.rs
  92. 343
      rust/hyperlane-base/src/settings/loader/deprecated_arguments.rs
  93. 38
      rust/hyperlane-base/src/settings/loader/environment.rs
  94. 132
      rust/hyperlane-base/src/settings/loader/mod.rs
  95. 32
      rust/hyperlane-base/src/settings/mod.rs
  96. 37
      rust/hyperlane-base/src/settings/parser/json_value_parser.rs
  97. 158
      rust/hyperlane-base/src/settings/parser/mod.rs
  98. 3
      rust/hyperlane-base/src/settings/trace/mod.rs
  99. 16
      rust/hyperlane-base/src/traits/checkpoint_syncer.rs
  100. 47
      rust/hyperlane-base/src/types/local_storage.rs
  101. Some files were not shown because too many files have changed in this diff Show More

2
.gitattributes vendored

@ -0,0 +1,2 @@
typescript/sdk/src/cw-types/*.types.ts linguist-generated=true
rust/chains/hyperlane-ethereum/abis/*.abi.json linguist-generated=true

@ -23,7 +23,7 @@ typescript/token @yorhodes @jmrossy @tkporter @aroralanuk
typescript/helloworld @yorhodes @nambrot
## CLI
typescript/cli @jmrossy @yorhodes
typescript/cli @jmrossy @yorhodes @aroralanuk
## Infra
typescript/infra @tkporter @nambrot

@ -4,6 +4,7 @@ on:
push:
branches: [main]
pull_request:
branches: '*'
workflow_dispatch:
concurrency:

@ -3,10 +3,9 @@ name: node
on:
# Triggers the workflow on push or pull request against main
push:
branches: [main]
branches: [v3]
pull_request:
branches: [main]
branches: [v3]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
@ -50,9 +49,6 @@ jobs:
with:
node-version: 18
- name: Install Foundry
uses: onbjerg/foundry-toolchain@v1
- name: yarn-cache
uses: actions/cache@v3
with:
@ -102,9 +98,6 @@ jobs:
with:
submodules: recursive
- name: Install Foundry
uses: onbjerg/foundry-toolchain@v1
- uses: actions/cache@v3
with:
path: ./*
@ -116,9 +109,6 @@ jobs:
- name: helloworld
run: yarn workspace @hyperlane-xyz/helloworld run test
- name: token
run: yarn workspace @hyperlane-xyz/hyperlane-token run test
- name: infra
run: yarn workspace @hyperlane-xyz/infra run test
@ -145,10 +135,9 @@ jobs:
runs-on: ubuntu-latest
needs: [yarn-build]
strategy:
fail-fast: false
matrix:
environment: [testnet3, mainnet2]
module: [ism, core, igp, ica, helloworld]
environment: [testnet4, mainnet3]
module: [ism, core, helloworld]
steps:
- uses: actions/checkout@v3
@ -200,7 +189,7 @@ jobs:
- name: Unit tests
run: yarn workspace @hyperlane-xyz/core run test
- name: Run Slither
- name: Static analysis
uses: crytic/slither-action@v0.3.0
id: slither
with:

@ -3,7 +3,7 @@ name: rust
on:
pull_request:
branches: [main]
branches: [main, v3]
paths-ignore:
- 'rust/**'

1
.gitignore vendored

@ -28,3 +28,4 @@ yarn-error.log
**/*.ignore
.vscode
tsconfig.editor.json

3
.gitmodules vendored

@ -1,6 +1,3 @@
[submodule "solidity/lib/forge-std"]
path = solidity/lib/forge-std
url = https://github.com/foundry-rs/forge-std
[submodule "typescript/token/lib/forge-std"]
path = typescript/token/lib/forge-std
url = https://github.com/foundry-rs/forge-std

@ -10,12 +10,12 @@
"tabWidth": 4,
"useTabs": false,
"singleQuote": false,
"bracketSpacing": false,
"explicitTypes": "always"
"bracketSpacing": false
}
}
],
"importOrder": ["^@hyperlane-xyz/(.*)$", "^../(.*)$", "^./(.*)$"],
"importOrderSeparation": true,
"importOrderSortSpecifiers": true
"importOrderSortSpecifiers": true,
"plugins": ["prettier-plugin-solidity", "@trivago/prettier-plugin-sort-imports"]
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -1,11 +1,13 @@
compressionLevel: mixed
enableGlobalCache: false
enableScripts: false
nodeLinker: node-modules
plugins:
- path: .yarn/plugins/@yarnpkg/plugin-workspace-tools.cjs
spec: "@yarnpkg/plugin-workspace-tools"
- path: .yarn/plugins/@yarnpkg/plugin-outdated.cjs
spec: "https://mskelton.dev/yarn-outdated/v3"
- path: .yarn/plugins/@yarnpkg/plugin-version.cjs
spec: "@yarnpkg/plugin-version"
yarnPath: .yarn/releases/yarn-3.2.0.cjs
yarnPath: .yarn/releases/yarn-4.0.1.cjs

@ -1,11 +1,10 @@
FROM node:16-alpine
FROM node:18-alpine
WORKDIR /hyperlane-monorepo
RUN apk add --update --no-cache git g++ make py3-pip
RUN yarn set version 3.2.0
RUN yarn plugin import workspace-tools
RUN yarn set version 4.0.1
# Copy package.json and friends
COPY package.json yarn.lock .yarnrc.yml ./
@ -14,7 +13,6 @@ COPY .yarn/releases ./.yarn/releases
COPY typescript/utils/package.json ./typescript/utils/
COPY typescript/sdk/package.json ./typescript/sdk/
COPY typescript/helloworld/package.json ./typescript/helloworld/
COPY typescript/token/package.json ./typescript/token/
COPY typescript/cli/package.json ./typescript/cli/
COPY typescript/infra/package.json ./typescript/infra/
COPY solidity/package.json ./solidity/

@ -12,9 +12,11 @@
## Versioning
Note this is the branch for Hyperlane v2.
Note this is the branch for Hyperlane v3.
V1 has since been deprecated in favor of V2, but if you are looking for code relating to the existing V1 deployments of the `testnet2` or `mainnet` environments, refer to the [v1](https://github.com/hyperlane-xyz/hyperlane-monorepo/tree/v1) branch.
V2 is still in operation but is not being actively developed. The code for V2 can be found in the [v2](https://github.com/hyperlane-xyz/hyperlane-monorepo/tree/v2) branch.
V1 has since been deprecated in favor of V2, but if you are looking for code relating to the existing V1 deployments, refer to the [v1](https://github.com/hyperlane-xyz/hyperlane-monorepo/tree/v1) branch.
## Overview

@ -0,0 +1,31 @@
comment:
layout: "header, diff, flags, components" # show component info in the PR comment
component_management:
default_rules: # default rules that will be inherited by all components
statuses:
- type: project # in this case every component that doens't have a status defined will have a project type one
target: auto
branches:
- "!main"
individual_components:
- component_id: module_core
name: core
paths:
- solidity/contracts/Mailbox.sol
- component_id: module_hooks
name: hooks
paths:
- solidity/contracts/hooks/**
- component_id: module_isms
name: isms
paths:
- solidity/contracts/isms/**
- component_id: module_token
name: token
paths:
- solidity/contracts/token/**
- component_id: module_middlewares
name: middlewares
paths:
- solidity/contracts/middleware/**

@ -3,7 +3,7 @@
"description": "A yarn workspace of core Hyperlane packages",
"version": "0.0.0",
"devDependencies": {
"@trivago/prettier-plugin-sort-imports": "^4.2.0",
"@trivago/prettier-plugin-sort-imports": "^4.2.1",
"@typescript-eslint/eslint-plugin": "^5.62.0",
"@typescript-eslint/parser": "^5.62.0",
"eslint": "^8.43.0",
@ -12,19 +12,19 @@
"lint-staged": "^12.4.3",
"prettier": "^2.8.8"
},
"packageManager": "yarn@3.2.0",
"packageManager": "yarn@4.0.1",
"private": true,
"scripts": {
"build": "yarn workspaces foreach --verbose --parallel --topological run build",
"clean": "yarn workspaces foreach --verbose --parallel run clean",
"build": "yarn workspaces foreach --all --parallel --topological run build",
"clean": "yarn workspaces foreach --all --parallel run clean",
"prettier": "yarn workspaces foreach --all --parallel run prettier",
"lint": "yarn workspaces foreach --all --parallel run lint",
"test": "yarn workspaces foreach --all --parallel run test",
"coverage": "yarn workspaces foreach --all --parallel run coverage",
"version:prepare": "yarn workspaces foreach --all --no-private --topological version --immediate",
"publish:all": "yarn workspaces foreach --all --no-private --topological npm publish --access public",
"postinstall": "husky install",
"prettier": "yarn workspaces foreach --verbose --parallel run prettier",
"lint": "yarn workspaces foreach --verbose --parallel run lint",
"test": "yarn workspaces foreach --verbose --parallel run test",
"coverage": "yarn workspaces foreach --verbose --parallel run coverage",
"version:check": "yarn version check --interactive",
"version:prepare": "yarn workspaces foreach --no-private --verbose --topological version --immediate",
"publish:all": "yarn workspaces foreach --no-private --verbose --topological npm publish --access public"
"version:check": "yarn version check --interactive"
},
"workspaces": [
"solidity",

1
rust/Cargo.lock generated

@ -6225,6 +6225,7 @@ version = "0.1.0"
dependencies = [
"async-trait",
"config",
"convert_case 0.6.0",
"derive-new",
"derive_more",
"enum_dispatch",

@ -63,9 +63,9 @@ kubectl cp testnet3/fuji-hyperlane-agent-validator-0:/usr/share/hyperlane /tmp/f
Configure additional env variables appropriately:
```bash
HYP_BASE_DB=/tmp/fuji-validator-db
HYP_DB=/tmp/fuji-validator-db
CONFIG_FILES=./config/testnet_config.json
HYP_BASE_TRACING_FMT=pretty
HYP_TRACING_FMT=pretty
DATABASE_URL=<READ_REPLICA_POSTGRES_URL> # for scraper
```

@ -12,6 +12,7 @@ version.workspace = true
[dependencies]
async-trait.workspace = true
config.workspace = true
convert_case.workspace = true
derive-new.workspace = true
derive_more.workspace = true
enum_dispatch.workspace = true

@ -13,8 +13,9 @@ use hyperlane_base::agent_main;
use crate::relayer::Relayer;
mod merkle_tree_builder;
mod merkle_tree;
mod msg;
mod processor;
mod prover;
mod relayer;
mod settings;

@ -1,9 +1,9 @@
use std::fmt::Display;
use eyre::Result;
use eyre::{Context, Result};
use tracing::{debug, error, instrument};
use hyperlane_base::db::{DbError, HyperlaneRocksDB};
use hyperlane_base::db::DbError;
use hyperlane_core::{
accumulator::{incremental::IncrementalMerkle, merkle::Proof},
ChainCommunicationError, H256,
@ -14,7 +14,6 @@ use crate::prover::{Prover, ProverError};
/// Struct to sync prover.
#[derive(Debug)]
pub struct MerkleTreeBuilder {
db: HyperlaneRocksDB,
prover: Prover,
incremental: IncrementalMerkle,
}
@ -50,12 +49,6 @@ pub enum MerkleTreeBuilderError {
/// Root of the incremental merkle tree
incremental_root: H256,
},
/// Nonce was not found in DB, despite batch providing messages after
#[error("Nonce was not found {nonce:?}")]
UnavailableNonce {
/// Root of prover's local merkle tree
nonce: u32,
},
/// MerkleTreeBuilder attempts Prover operation and receives ProverError
#[error(transparent)]
ProverError(#[from] ProverError),
@ -68,13 +61,12 @@ pub enum MerkleTreeBuilderError {
}
impl MerkleTreeBuilder {
pub fn new(db: HyperlaneRocksDB) -> Self {
pub fn new() -> Self {
let prover = Prover::default();
let incremental = IncrementalMerkle::default();
Self {
prover,
incremental,
db,
}
}
@ -86,49 +78,25 @@ impl MerkleTreeBuilder {
) -> Result<Proof, MerkleTreeBuilderError> {
self.prover
.prove_against_previous(leaf_index as usize, root_index as usize)
.map_err(Into::into)
}
fn ingest_nonce(&mut self, nonce: u32) -> Result<(), MerkleTreeBuilderError> {
match self.db.retrieve_message_id_by_nonce(&nonce) {
Ok(Some(leaf)) => {
debug!(nonce, "Ingesting leaf");
self.prover.ingest(leaf).expect("!tree full");
self.incremental.ingest(leaf);
assert_eq!(self.prover.root(), self.incremental.root());
Ok(())
}
Ok(None) => {
error!("We should not arrive here");
Err(MerkleTreeBuilderError::UnavailableNonce { nonce })
}
Err(e) => Err(e.into()),
}
.map_err(MerkleTreeBuilderError::from)
}
pub fn count(&self) -> u32 {
self.prover.count() as u32
}
#[instrument(err, skip(self), level = "debug")]
pub async fn update_to_index(&mut self, index: u32) -> Result<(), MerkleTreeBuilderError> {
if index >= self.count() {
let starting_index = self.prover.count() as u32;
for i in starting_index..=index {
self.db.wait_for_message_nonce(i).await?;
self.ingest_nonce(i)?;
}
let prover_root = self.prover.root();
let incremental_root = self.incremental.root();
if prover_root != incremental_root {
return Err(MerkleTreeBuilderError::MismatchedRoots {
prover_root,
incremental_root,
});
}
pub async fn ingest_message_id(&mut self, message_id: H256) -> Result<()> {
const CTX: &str = "When ingesting message id";
debug!(?message_id, "Ingesting leaf");
self.prover.ingest(message_id).expect("tree full");
self.incremental.ingest(message_id);
match self.prover.root().eq(&self.incremental.root()) {
true => Ok(()),
false => Err(MerkleTreeBuilderError::MismatchedRoots {
prover_root: self.prover.root(),
incremental_root: self.incremental.root(),
}),
}
Ok(())
.context(CTX)
}
}

@ -0,0 +1,2 @@
pub(crate) mod builder;
pub(crate) mod processor;

@ -0,0 +1,101 @@
use std::{
fmt::{Debug, Formatter},
sync::Arc,
time::Duration,
};
use async_trait::async_trait;
use derive_new::new;
use eyre::Result;
use hyperlane_base::db::HyperlaneRocksDB;
use hyperlane_core::{HyperlaneDomain, MerkleTreeInsertion};
use prometheus::IntGauge;
use tokio::sync::RwLock;
use tracing::debug;
use crate::processor::ProcessorExt;
use super::builder::MerkleTreeBuilder;
/// Finds unprocessed merkle tree insertions and adds them to the prover sync
#[derive(new)]
pub struct MerkleTreeProcessor {
db: HyperlaneRocksDB,
metrics: MerkleTreeProcessorMetrics,
prover_sync: Arc<RwLock<MerkleTreeBuilder>>,
#[new(default)]
leaf_index: u32,
}
impl Debug for MerkleTreeProcessor {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"MerkleTreeProcessor {{ leaf_index: {:?} }}",
self.leaf_index
)
}
}
#[async_trait]
impl ProcessorExt for MerkleTreeProcessor {
/// The domain this processor is getting merkle tree hook insertions from.
fn domain(&self) -> &HyperlaneDomain {
self.db.domain()
}
/// One round of processing, extracted from infinite work loop for
/// testing purposes.
async fn tick(&mut self) -> Result<()> {
if let Some(insertion) = self.next_unprocessed_leaf()? {
// Feed the message to the prover sync
self.prover_sync
.write()
.await
.ingest_message_id(insertion.message_id())
.await?;
// Increase the leaf index to move on to the next leaf
self.leaf_index += 1;
} else {
tokio::time::sleep(Duration::from_secs(1)).await;
}
Ok(())
}
}
impl MerkleTreeProcessor {
fn next_unprocessed_leaf(&mut self) -> Result<Option<MerkleTreeInsertion>> {
let leaf = if let Some(insertion) = self
.db
.retrieve_merkle_tree_insertion_by_leaf_index(&self.leaf_index)?
{
// Update the metrics
self.metrics
.max_leaf_index_gauge
.set(insertion.index() as i64);
Some(insertion)
} else {
debug!(leaf_index=?self.leaf_index, "No message found in DB for leaf index");
None
};
Ok(leaf)
}
}
#[derive(Debug)]
pub struct MerkleTreeProcessorMetrics {
max_leaf_index_gauge: IntGauge,
}
impl MerkleTreeProcessorMetrics {
pub fn new() -> Self {
Self {
max_leaf_index_gauge: IntGauge::new(
"max_leaf_index_gauge",
"The max merkle tree leaf index",
)
.unwrap(),
}
}
}

@ -2,20 +2,20 @@ use std::fmt::Debug;
use async_trait::async_trait;
use eyre::Result;
use tracing::{debug, error, trace};
use hyperlane_base::db::HyperlaneRocksDB;
use hyperlane_core::{
HyperlaneMessage, InterchainGasExpenditure, InterchainGasPayment, TxCostEstimate, TxOutcome,
U256,
};
use crate::msg::gas_payment::policies::GasPaymentPolicyOnChainFeeQuoting;
use crate::settings::{
matching_list::MatchingList, GasPaymentEnforcementConf, GasPaymentEnforcementPolicy,
GasPaymentKey, HyperlaneMessage, InterchainGasExpenditure, InterchainGasPayment,
TxCostEstimate, TxOutcome, U256,
};
use tracing::{debug, error, trace};
use self::policies::{GasPaymentPolicyMinimum, GasPaymentPolicyNone};
use crate::{
msg::gas_payment::policies::GasPaymentPolicyOnChainFeeQuoting,
settings::{
matching_list::MatchingList, GasPaymentEnforcementConf, GasPaymentEnforcementPolicy,
},
};
mod policies;
@ -44,6 +44,8 @@ pub struct GasPaymentEnforcer {
}
impl GasPaymentEnforcer {
/// Note that `policy_configs` should not be empty. In the settings,
/// a default of vec![GasPaymentEnforcementConf::default()] is used.
pub fn new(
policy_configs: impl IntoIterator<Item = GasPaymentEnforcementConf>,
db: HyperlaneRocksDB,
@ -78,7 +80,13 @@ impl GasPaymentEnforcer {
tx_cost_estimate: &TxCostEstimate,
) -> Result<Option<U256>> {
let msg_id = message.id();
let current_payment = self.db.retrieve_gas_payment_by_message_id(msg_id)?;
let gas_payment_key = GasPaymentKey {
message_id: msg_id,
destination: message.destination,
};
let current_payment = self
.db
.retrieve_gas_payment_by_gas_payment_key(gas_payment_key)?;
let current_expenditure = self.db.retrieve_gas_expenditure_by_message_id(msg_id)?;
for (policy, whitelist) in &self.policies {
if !whitelist.msg_matches(message, true) {
@ -137,14 +145,16 @@ mod test {
use std::str::FromStr;
use hyperlane_base::db::{test_utils, HyperlaneRocksDB};
use hyperlane_core::{HyperlaneDomain, HyperlaneMessage, TxCostEstimate, H160, H256, U256};
use hyperlane_core::{
HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, LogMeta, TxCostEstimate, H160,
H256, U256,
};
use super::GasPaymentEnforcer;
use crate::settings::{
matching_list::MatchingList, GasPaymentEnforcementConf, GasPaymentEnforcementPolicy,
};
use super::GasPaymentEnforcer;
#[tokio::test]
async fn test_empty_whitelist() {
test_utils::run_test_db(|db| async move {
@ -186,7 +196,7 @@ mod test {
test_utils::run_test_db(|db| async move {
let hyperlane_db =
HyperlaneRocksDB::new(&HyperlaneDomain::new_test_domain("test_no_match"), db);
let matching_list = serde_json::from_str(r#"[{"originDomain": 234}]"#).unwrap();
let matching_list = serde_json::from_str(r#"[{"origindomain": 234}]"#).unwrap();
let enforcer = GasPaymentEnforcer::new(
// Require a payment
vec![GasPaymentEnforcementConf {
@ -209,6 +219,118 @@ mod test {
.await;
}
#[tokio::test]
async fn test_different_destinations() {
#[allow(unused_must_use)]
test_utils::run_test_db(|db| async move {
let msg = HyperlaneMessage {
destination: 123,
..HyperlaneMessage::default()
};
let hyperlane_db = HyperlaneRocksDB::new(
&HyperlaneDomain::new_test_domain("test_different_destinations"),
db,
);
let enforcer = GasPaymentEnforcer::new(
vec![GasPaymentEnforcementConf {
policy: GasPaymentEnforcementPolicy::Minimum {
payment: U256::one(),
},
matching_list: MatchingList::default(),
}],
hyperlane_db.clone(),
);
let wrong_destination_payment = InterchainGasPayment {
message_id: msg.id(),
destination: 456,
payment: U256::one(),
gas_amount: U256::one(),
};
hyperlane_db.process_gas_payment(wrong_destination_payment, &LogMeta::random());
// Ensure if the gas payment was made to the incorrect destination, it does not meet
// the requirement
assert!(enforcer
.message_meets_gas_payment_requirement(&msg, &TxCostEstimate::default(),)
.await
.unwrap()
.is_none());
let correct_destination_payment = InterchainGasPayment {
message_id: msg.id(),
destination: msg.destination,
payment: U256::one(),
gas_amount: U256::one(),
};
hyperlane_db.process_gas_payment(correct_destination_payment, &LogMeta::random());
// Ensure if the gas payment was made to the correct destination, it meets the
// requirement
assert!(enforcer
.message_meets_gas_payment_requirement(&msg, &TxCostEstimate::default(),)
.await
.unwrap()
.is_some());
})
.await;
}
#[tokio::test]
async fn test_half_and_half_payment() {
#[allow(unused_must_use)]
test_utils::run_test_db(|db| async move {
let msg = HyperlaneMessage {
destination: 123,
..HyperlaneMessage::default()
};
let hyperlane_db = HyperlaneRocksDB::new(
&HyperlaneDomain::new_test_domain("test_half_and_half_payment"),
db,
);
let enforcer = GasPaymentEnforcer::new(
vec![GasPaymentEnforcementConf {
policy: GasPaymentEnforcementPolicy::Minimum {
payment: U256::from(2),
},
matching_list: MatchingList::default(),
}],
hyperlane_db.clone(),
);
let initial_payment = InterchainGasPayment {
message_id: msg.id(),
destination: msg.destination,
payment: U256::one(),
gas_amount: U256::one(),
};
hyperlane_db.process_gas_payment(initial_payment, &LogMeta::random());
// Ensure if only half gas payment was made, it does not meet the requirement
assert!(enforcer
.message_meets_gas_payment_requirement(&msg, &TxCostEstimate::default(),)
.await
.unwrap()
.is_none());
let deficit_payment = InterchainGasPayment {
message_id: msg.id(),
destination: msg.destination,
payment: U256::one(),
gas_amount: U256::one(),
};
hyperlane_db.process_gas_payment(deficit_payment, &LogMeta::random());
// Ensure if the full gas payment was made, it meets the requirement
assert!(enforcer
.message_meets_gas_payment_requirement(&msg, &TxCostEstimate::default(),)
.await
.unwrap()
.is_some());
})
.await;
}
#[tokio::test]
async fn test_non_empty_matching_list() {
test_utils::run_test_db(|db| async move {
@ -218,7 +340,7 @@ mod test {
let recipient_address = "0xbb000000000000000000000000000000000000bb";
let matching_list = serde_json::from_str(
&format!(r#"[{{"senderAddress": "{sender_address}", "recipientAddress": "{recipient_address}"}}]"#)
&format!(r#"[{{"senderaddress": "{sender_address}", "recipientaddress": "{recipient_address}"}}]"#)
).unwrap();
let enforcer = GasPaymentEnforcer::new(

@ -41,6 +41,7 @@ async fn test_gas_payment_policy_minimum() {
// If the payment is less than the minimum, returns false
let current_payment = InterchainGasPayment {
message_id: H256::zero(),
destination: message.destination,
payment: U256::from(999u32),
gas_amount: U256::zero(),
};
@ -70,6 +71,7 @@ async fn test_gas_payment_policy_minimum() {
// If the payment is at least the minimum, returns false
let current_payment = InterchainGasPayment {
message_id: H256::zero(),
destination: message.destination,
payment: U256::from(1000u32),
gas_amount: U256::zero(),
};

@ -33,6 +33,7 @@ async fn test_gas_payment_policy_none() {
let current_payment = InterchainGasPayment {
message_id: H256::zero(),
destination: message.destination,
payment: U256::zero(),
gas_amount: U256::zero(),
};

@ -70,6 +70,7 @@ mod test {
fn current_payment(gas_amount: impl Into<U256>) -> InterchainGasPayment {
InterchainGasPayment {
message_id: H256::zero(),
destination: 0,
payment: U256::zero(),
gas_amount: gas_amount.into(),
}

@ -4,9 +4,10 @@ use futures_util::future::join_all;
use derive_new::new;
use eyre::Context;
use itertools::{Either, Itertools};
use tracing::{info, instrument};
use hyperlane_core::{HyperlaneMessage, InterchainSecurityModule, H256, U256};
use hyperlane_core::{HyperlaneMessage, InterchainSecurityModule, ModuleType, H256, U256};
use super::{BaseMetadataBuilder, MetadataBuilder};
@ -90,6 +91,7 @@ impl AggregationIsmMetadataBuilder {
sub_modules: Vec<IsmAndMetadata>,
message: &HyperlaneMessage,
threshold: usize,
err_isms: Vec<(H256, Option<ModuleType>)>,
) -> Option<Vec<SubModuleMetadata>> {
let gas_cost_results: Vec<_> = join_all(
sub_modules
@ -97,8 +99,7 @@ impl AggregationIsmMetadataBuilder {
.map(|module| module.ism.dry_run_verify(message, &(module.meta.metadata))),
)
.await;
// Filter out the ISMs without a gas cost estimate
// Filter out the ISMs with a gas cost estimate
let metas_and_gas: Vec<_> = sub_modules
.into_iter()
.zip(gas_cost_results.into_iter())
@ -107,7 +108,7 @@ impl AggregationIsmMetadataBuilder {
let metas_and_gas_count = metas_and_gas.len();
if metas_and_gas_count < threshold {
info!("Could not fetch all metadata: Found {metas_and_gas_count} of the {threshold} required ISM metadata pieces");
info!(?err_isms, %metas_and_gas_count, %threshold, message_id=message.id().to_string(), "Could not fetch all metadata, ISM metadata count did not reach aggregation threshold");
return None;
}
Some(Self::n_cheapest_metas(metas_and_gas, threshold))
@ -127,34 +128,33 @@ impl MetadataBuilder for AggregationIsmMetadataBuilder {
let (ism_addresses, threshold) = ism.modules_and_threshold(message).await.context(CTX)?;
let threshold = threshold as usize;
let metas = join_all(
ism_addresses
.iter()
.map(|ism_address| self.base.build(*ism_address, message)),
)
.await;
let sub_modules = join_all(
let sub_modules_and_metas = join_all(
ism_addresses
.iter()
.map(|ism_address| self.base.build_ism(*ism_address)),
.map(|ism_address| self.base.build_ism_and_metadata(*ism_address, message)),
)
.await;
let filtered_sub_module_metas = metas
// Partitions things into
// 1. ok_sub_modules: ISMs with metadata with valid metadata
// 2. err_sub_modules: ISMs with invalid metadata
let (ok_sub_modules, err_sub_modules): (Vec<_>, Vec<_>) = sub_modules_and_metas
.into_iter()
.zip(ism_addresses.iter())
.enumerate()
.zip(sub_modules.into_iter())
.filter_map(|((index, meta_result), sub_module_result)| {
match (meta_result, sub_module_result) {
(Ok(Some(meta)), Ok(ism)) => Some(IsmAndMetadata::new(ism, index, meta)),
_ => None,
}
})
.collect();
.partition_map(|(index, (result, ism_address))| match result {
Ok(sub_module_and_meta) => match sub_module_and_meta.metadata {
Some(metadata) => Either::Left(IsmAndMetadata::new(
sub_module_and_meta.ism,
index,
metadata,
)),
None => Either::Right((*ism_address, Some(sub_module_and_meta.module_type))),
},
Err(_) => Either::Right((*ism_address, None)),
});
let maybe_aggregation_metadata =
Self::cheapest_valid_metas(filtered_sub_module_metas, message, threshold)
Self::cheapest_valid_metas(ok_sub_modules, message, threshold, err_sub_modules)
.await
.map(|mut metas| Self::format_metadata(&mut metas, ism_addresses.len()));
Ok(maybe_aggregation_metadata)

@ -1,8 +1,17 @@
use std::{collections::HashMap, fmt::Debug, str::FromStr, sync::Arc};
use crate::{
merkle_tree::builder::MerkleTreeBuilder,
msg::metadata::{
multisig::{MerkleRootMultisigMetadataBuilder, MessageIdMultisigMetadataBuilder},
AggregationIsmMetadataBuilder, CcipReadIsmMetadataBuilder, NullMetadataBuilder,
RoutingIsmMetadataBuilder,
},
};
use async_trait::async_trait;
use derive_new::new;
use eyre::{Context, Result};
use hyperlane_base::db::HyperlaneRocksDB;
use hyperlane_base::{
settings::{ChainConf, CheckpointSyncerConf},
CheckpointSyncer, CoreMetrics, MultisigCheckpointSyncer,
@ -15,18 +24,6 @@ use hyperlane_core::{
use tokio::sync::RwLock;
use tracing::{debug, info, instrument, warn};
use crate::{
merkle_tree_builder::MerkleTreeBuilder,
msg::metadata::{
multisig::{
LegacyMultisigMetadataBuilder, MerkleRootMultisigMetadataBuilder,
MessageIdMultisigMetadataBuilder,
},
AggregationIsmMetadataBuilder, CcipReadIsmMetadataBuilder, NullMetadataBuilder,
RoutingIsmMetadataBuilder,
},
};
#[derive(Debug, thiserror::Error)]
pub enum MetadataBuilderError {
#[error("Unknown or invalid module type ({0})")]
@ -35,6 +32,12 @@ pub enum MetadataBuilderError {
MaxDepthExceeded(u32),
}
pub struct IsmWithMetadataAndType {
pub ism: Box<dyn InterchainSecurityModule>,
pub metadata: Option<Vec<u8>>,
pub module_type: ModuleType,
}
#[async_trait]
pub trait MetadataBuilder: Send + Sync {
#[allow(clippy::async_yields_async)]
@ -49,6 +52,7 @@ pub struct BaseMetadataBuilder {
origin_validator_announce: Arc<dyn ValidatorAnnounce>,
allow_local_checkpoint_syncers: bool,
metrics: Arc<CoreMetrics>,
db: HyperlaneRocksDB,
/// ISMs can be structured recursively. We keep track of the depth
/// of the recursion to avoid infinite loops.
#[new(default)]
@ -74,32 +78,9 @@ impl MetadataBuilder for BaseMetadataBuilder {
ism_address: H256,
message: &HyperlaneMessage,
) -> Result<Option<Vec<u8>>> {
let ism = self
.build_ism(ism_address)
.await
.context("When building ISM")?;
let module_type = ism
.module_type()
.await
.context("When fetching module type")?;
let base = self.clone_with_incremented_depth()?;
let metadata_builder: Box<dyn MetadataBuilder> = match module_type {
ModuleType::LegacyMultisig => Box::new(LegacyMultisigMetadataBuilder::new(base)),
ModuleType::MerkleRootMultisig => {
Box::new(MerkleRootMultisigMetadataBuilder::new(base))
}
ModuleType::MessageIdMultisig => Box::new(MessageIdMultisigMetadataBuilder::new(base)),
ModuleType::Routing => Box::new(RoutingIsmMetadataBuilder::new(base)),
ModuleType::Aggregation => Box::new(AggregationIsmMetadataBuilder::new(base)),
ModuleType::Null => Box::new(NullMetadataBuilder::new()),
ModuleType::CcipRead => Box::new(CcipReadIsmMetadataBuilder::new(base)),
_ => return Err(MetadataBuilderError::UnsupportedModuleType(module_type).into()),
};
metadata_builder
.build(ism_address, message)
self.build_ism_and_metadata(ism_address, message)
.await
.context("When building metadata")
.map(|ism_with_metadata| ism_with_metadata.metadata)
}
}
@ -118,31 +99,34 @@ impl BaseMetadataBuilder {
}
}
pub async fn get_proof(&self, nonce: u32, checkpoint: Checkpoint) -> Result<Option<Proof>> {
pub async fn get_proof(&self, leaf_index: u32, checkpoint: Checkpoint) -> Result<Proof> {
const CTX: &str = "When fetching message proof";
let proof = self
.origin_prover_sync
.read()
.await
.get_proof(nonce, checkpoint.index)
.get_proof(leaf_index, checkpoint.index)
.context(CTX)?;
// checkpoint may be fraudulent if the root does not
// match the canonical root at the checkpoint's index
if proof.root() != checkpoint.root {
info!(
?checkpoint,
canonical_root = ?proof.root(),
"Could not fetch metadata: checkpoint root does not match canonical root from merkle proof"
);
Ok(None)
} else {
Ok(Some(proof))
}
Ok(proof)
}
pub async fn highest_known_leaf_index(&self) -> Option<u32> {
self.origin_prover_sync.read().await.count().checked_sub(1)
}
pub async fn highest_known_nonce(&self) -> u32 {
self.origin_prover_sync.read().await.count() - 1
pub async fn get_merkle_leaf_id_by_message_id(&self, message_id: H256) -> Result<Option<u32>> {
let merkle_leaf = self
.db
.retrieve_merkle_leaf_index_by_message_id(&message_id)?;
Ok(merkle_leaf)
}
pub async fn build_ism(&self, address: H256) -> Result<Box<dyn InterchainSecurityModule>> {
@ -239,4 +223,43 @@ impl BaseMetadataBuilder {
}
Ok(MultisigCheckpointSyncer::new(checkpoint_syncers))
}
#[instrument(err, skip(self), fields(domain=self.domain().name()))]
pub async fn build_ism_and_metadata(
&self,
ism_address: H256,
message: &HyperlaneMessage,
) -> Result<IsmWithMetadataAndType> {
let ism: Box<dyn InterchainSecurityModule> = self
.build_ism(ism_address)
.await
.context("When building ISM")?;
let module_type = ism
.module_type()
.await
.context("When fetching module type")?;
let base = self.clone_with_incremented_depth()?;
let metadata_builder: Box<dyn MetadataBuilder> = match module_type {
ModuleType::MerkleRootMultisig => {
Box::new(MerkleRootMultisigMetadataBuilder::new(base))
}
ModuleType::MessageIdMultisig => Box::new(MessageIdMultisigMetadataBuilder::new(base)),
ModuleType::Routing => Box::new(RoutingIsmMetadataBuilder::new(base)),
ModuleType::Aggregation => Box::new(AggregationIsmMetadataBuilder::new(base)),
ModuleType::Null => Box::new(NullMetadataBuilder::new()),
ModuleType::CcipRead => Box::new(CcipReadIsmMetadataBuilder::new(base)),
_ => return Err(MetadataBuilderError::UnsupportedModuleType(module_type).into()),
};
let meta = metadata_builder
.build(ism_address, message)
.await
.context("When building metadata");
Ok(IsmWithMetadataAndType {
ism,
metadata: meta?,
module_type,
})
}
}

@ -1,38 +1,38 @@
use std::collections::HashMap;
use std::fmt::Debug;
use async_trait::async_trait;
use derive_more::{AsRef, Deref};
use derive_new::new;
use ethers::abi::Token;
use eyre::{Context, Result};
use hyperlane_base::MultisigCheckpointSyncer;
use hyperlane_core::accumulator::merkle::Proof;
use hyperlane_core::{Checkpoint, HyperlaneMessage, SignatureWithSigner, H256};
use hyperlane_core::{HyperlaneMessage, MultisigSignedCheckpoint, H256};
use strum::Display;
use tracing::{debug, info};
use crate::msg::metadata::BaseMetadataBuilder;
use crate::msg::metadata::MetadataBuilder;
#[derive(new)]
#[derive(new, AsRef, Deref)]
pub struct MultisigMetadata {
checkpoint: Checkpoint,
signatures: Vec<SignatureWithSigner>,
message_id: Option<H256>,
#[deref]
quorum_checkpoint: MultisigSignedCheckpoint,
merkle_leaf_index: u32,
// optional because it's only used for MerkleRootMultisig
proof: Option<Proof>,
}
#[derive(Debug, Display, PartialEq, Eq, Clone)]
pub enum MetadataToken {
CheckpointRoot,
CheckpointMerkleRoot,
CheckpointIndex,
CheckpointMailbox,
CheckpointMerkleTreeHook,
MessageId,
MerkleProof,
Threshold,
MessageMerkleLeafIndex,
Signatures,
Validators,
}
#[async_trait]
@ -47,45 +47,46 @@ pub trait MultisigIsmMetadataBuilder: AsRef<BaseMetadataBuilder> + Send + Sync {
fn token_layout(&self) -> Vec<MetadataToken>;
fn format_metadata(
&self,
validators: &[H256],
threshold: u8,
metadata: MultisigMetadata,
) -> Vec<u8> {
let build_token = |token: &MetadataToken| match token {
MetadataToken::CheckpointRoot => metadata.checkpoint.root.to_fixed_bytes().into(),
MetadataToken::CheckpointIndex => metadata.checkpoint.index.to_be_bytes().into(),
MetadataToken::CheckpointMailbox => {
metadata.checkpoint.mailbox_address.to_fixed_bytes().into()
}
MetadataToken::MessageId => metadata.message_id.unwrap().to_fixed_bytes().into(),
MetadataToken::Threshold => Vec::from([threshold]),
MetadataToken::MerkleProof => {
let proof_tokens: Vec<Token> = metadata
.proof
.unwrap()
.path
.iter()
.map(|x| Token::FixedBytes(x.to_fixed_bytes().into()))
.collect();
ethers::abi::encode(&proof_tokens)
}
MetadataToken::Validators => {
let validator_tokens: Vec<Token> = validators
fn format_metadata(&self, metadata: MultisigMetadata) -> Result<Vec<u8>> {
let build_token = |token: &MetadataToken| -> Result<Vec<u8>> {
match token {
MetadataToken::CheckpointMerkleRoot => {
Ok(metadata.checkpoint.root.to_fixed_bytes().into())
}
MetadataToken::MessageMerkleLeafIndex => {
Ok(metadata.merkle_leaf_index.to_be_bytes().into())
}
MetadataToken::CheckpointIndex => {
Ok(metadata.checkpoint.index.to_be_bytes().into())
}
MetadataToken::CheckpointMerkleTreeHook => Ok(metadata
.checkpoint
.merkle_tree_hook_address
.to_fixed_bytes()
.into()),
MetadataToken::MessageId => {
Ok(metadata.checkpoint.message_id.to_fixed_bytes().into())
}
MetadataToken::MerkleProof => {
let proof_tokens: Vec<Token> = metadata
.proof
.unwrap()
.path
.iter()
.map(|x| Token::FixedBytes(x.to_fixed_bytes().into()))
.collect();
Ok(ethers::abi::encode(&proof_tokens))
}
MetadataToken::Signatures => Ok(metadata
.signatures
.iter()
.map(|x| Token::FixedBytes(x.to_fixed_bytes().into()))
.collect();
ethers::abi::encode(&[Token::FixedArray(validator_tokens)])
}
MetadataToken::Signatures => {
let ordered_signatures = order_signatures(validators, &metadata.signatures);
let threshold_signatures = &ordered_signatures[..threshold as usize];
threshold_signatures.concat()
.map(|x| x.to_vec())
.collect::<Vec<_>>()
.concat()),
}
};
self.token_layout().iter().flat_map(build_token).collect()
let metas: Result<Vec<Vec<u8>>> = self.token_layout().iter().map(build_token).collect();
Ok(metas?.into_iter().flatten().collect())
}
}
@ -126,7 +127,7 @@ impl<T: MultisigIsmMetadataBuilder> MetadataBuilder for T {
.context(CTX)?
{
debug!(?message, ?metadata.checkpoint, "Found checkpoint with quorum");
Ok(Some(self.format_metadata(&validators, threshold, metadata)))
Ok(Some(self.format_metadata(metadata)?))
} else {
info!(
?message, ?validators, threshold, ism=%multisig_ism.address(),
@ -136,32 +137,3 @@ impl<T: MultisigIsmMetadataBuilder> MetadataBuilder for T {
}
}
}
/// Orders `signatures` by the signers according to the `desired_order`.
/// Returns a Vec of the signature raw bytes in the correct order.
/// Panics if any signers in `signatures` are not present in `desired_order`
fn order_signatures(desired_order: &[H256], signatures: &[SignatureWithSigner]) -> Vec<Vec<u8>> {
// Signer address => index to sort by
let ordering_map: HashMap<H256, usize> = desired_order
.iter()
.enumerate()
.map(|(index, a)| (*a, index))
.collect();
// Create a tuple of (SignatureWithSigner, index to sort by)
let mut ordered_signatures = signatures
.iter()
.cloned()
.map(|s| {
let order_index = ordering_map.get(&H256::from(s.signer)).unwrap();
(s, *order_index)
})
.collect::<Vec<_>>();
// Sort by the index
ordered_signatures.sort_by_key(|s| s.1);
// Now collect only the raw signature bytes
ordered_signatures
.into_iter()
.map(|s| s.0.signature.to_vec())
.collect()
}

@ -1,69 +0,0 @@
use std::fmt::Debug;
use async_trait::async_trait;
use derive_more::{AsRef, Deref};
use derive_new::new;
use eyre::{Context, Result};
use hyperlane_base::MultisigCheckpointSyncer;
use hyperlane_core::{HyperlaneMessage, H256};
use crate::msg::metadata::BaseMetadataBuilder;
use super::base::{MetadataToken, MultisigIsmMetadataBuilder, MultisigMetadata};
#[derive(Debug, Clone, Deref, new, AsRef)]
pub struct LegacyMultisigMetadataBuilder(BaseMetadataBuilder);
#[async_trait]
impl MultisigIsmMetadataBuilder for LegacyMultisigMetadataBuilder {
fn token_layout(&self) -> Vec<MetadataToken> {
vec![
MetadataToken::CheckpointRoot,
MetadataToken::CheckpointIndex,
MetadataToken::CheckpointMailbox,
MetadataToken::MerkleProof,
MetadataToken::Threshold,
MetadataToken::Signatures,
MetadataToken::Validators,
]
}
async fn fetch_metadata(
&self,
validators: &[H256],
threshold: u8,
message: &HyperlaneMessage,
checkpoint_syncer: &MultisigCheckpointSyncer,
) -> Result<Option<MultisigMetadata>> {
const CTX: &str = "When fetching LegacyMultisig metadata";
let highest_nonce = self.highest_known_nonce().await;
let Some(quorum_checkpoint) = checkpoint_syncer
.legacy_fetch_checkpoint_in_range(
validators,
threshold as usize,
message.nonce,
highest_nonce,
)
.await
.context(CTX)?
else {
return Ok(None);
};
let Some(proof) = self
.get_proof(message.nonce, quorum_checkpoint.checkpoint)
.await
.context(CTX)?
else {
return Ok(None);
};
Ok(Some(MultisigMetadata::new(
quorum_checkpoint.checkpoint,
quorum_checkpoint.signatures,
None,
Some(proof),
)))
}
}

@ -6,7 +6,8 @@ use derive_new::new;
use eyre::{Context, Result};
use hyperlane_base::MultisigCheckpointSyncer;
use hyperlane_core::{HyperlaneMessage, H256};
use hyperlane_core::{unwrap_or_none_result, HyperlaneMessage, H256};
use tracing::debug;
use crate::msg::metadata::BaseMetadataBuilder;
@ -18,10 +19,11 @@ pub struct MerkleRootMultisigMetadataBuilder(BaseMetadataBuilder);
impl MultisigIsmMetadataBuilder for MerkleRootMultisigMetadataBuilder {
fn token_layout(&self) -> Vec<MetadataToken> {
vec![
MetadataToken::CheckpointMailbox,
MetadataToken::CheckpointIndex,
MetadataToken::CheckpointMerkleTreeHook,
MetadataToken::MessageMerkleLeafIndex,
MetadataToken::MessageId,
MetadataToken::MerkleProof,
MetadataToken::CheckpointIndex,
MetadataToken::Signatures,
]
}
@ -34,27 +36,44 @@ impl MultisigIsmMetadataBuilder for MerkleRootMultisigMetadataBuilder {
checkpoint_syncer: &MultisigCheckpointSyncer,
) -> Result<Option<MultisigMetadata>> {
const CTX: &str = "When fetching MerkleRootMultisig metadata";
let highest_nonce = self.highest_known_nonce().await;
let Some(quorum_checkpoint) = checkpoint_syncer
.fetch_checkpoint_in_range(validators, threshold as usize, message.nonce, highest_nonce)
unwrap_or_none_result!(
highest_leaf_index,
self.highest_known_leaf_index().await,
debug!("Couldn't get highest known leaf index")
);
unwrap_or_none_result!(
leaf_index,
self.get_merkle_leaf_id_by_message_id(message.id())
.await
.context(CTX)?,
debug!(
?message,
"No merkle leaf found for message id, must have not been enqueued in the tree"
)
);
unwrap_or_none_result!(
quorum_checkpoint,
checkpoint_syncer
.fetch_checkpoint_in_range(
validators,
threshold as usize,
leaf_index,
highest_leaf_index
)
.await
.context(CTX)?,
debug!(
leaf_index,
highest_leaf_index, "Couldn't get checkpoint in range"
)
);
let proof = self
.get_proof(leaf_index, quorum_checkpoint.checkpoint.checkpoint)
.await
.context(CTX)?
else {
return Ok(None);
};
let Some(proof) = self
.get_proof(message.nonce, quorum_checkpoint.checkpoint.checkpoint)
.await
.context(CTX)?
else {
return Ok(None);
};
.context(CTX)?;
Ok(Some(MultisigMetadata::new(
quorum_checkpoint.checkpoint.checkpoint,
quorum_checkpoint.signatures,
Some(quorum_checkpoint.checkpoint.message_id),
quorum_checkpoint,
leaf_index,
Some(proof),
)))
}

@ -6,8 +6,8 @@ use derive_new::new;
use eyre::{Context, Result};
use hyperlane_base::MultisigCheckpointSyncer;
use hyperlane_core::{HyperlaneMessage, H256};
use tracing::warn;
use hyperlane_core::{unwrap_or_none_result, HyperlaneMessage, H256};
use tracing::{debug, trace, warn};
use crate::msg::metadata::BaseMetadataBuilder;
@ -20,8 +20,9 @@ pub struct MessageIdMultisigMetadataBuilder(BaseMetadataBuilder);
impl MultisigIsmMetadataBuilder for MessageIdMultisigMetadataBuilder {
fn token_layout(&self) -> Vec<MetadataToken> {
vec![
MetadataToken::CheckpointMailbox,
MetadataToken::CheckpointRoot,
MetadataToken::CheckpointMerkleTreeHook,
MetadataToken::CheckpointMerkleRoot,
MetadataToken::CheckpointIndex,
MetadataToken::Signatures,
]
}
@ -33,28 +34,45 @@ impl MultisigIsmMetadataBuilder for MessageIdMultisigMetadataBuilder {
message: &HyperlaneMessage,
checkpoint_syncer: &MultisigCheckpointSyncer,
) -> Result<Option<MultisigMetadata>> {
let message_id = message.id();
const CTX: &str = "When fetching MessageIdMultisig metadata";
let Some(quorum_checkpoint) = checkpoint_syncer
.fetch_checkpoint(validators, threshold as usize, message.nonce)
.await
.context(CTX)?
else {
return Ok(None);
};
unwrap_or_none_result!(
leaf_index,
self.get_merkle_leaf_id_by_message_id(message_id)
.await
.context(CTX)?,
debug!(
?message,
"No merkle leaf found for message id, must have not been enqueued in the tree"
)
);
unwrap_or_none_result!(
quorum_checkpoint,
checkpoint_syncer
.fetch_checkpoint(validators, threshold as usize, leaf_index)
.await
.context(CTX)?,
trace!("No quorum checkpoint found")
);
if quorum_checkpoint.checkpoint.message_id != message.id() {
if quorum_checkpoint.checkpoint.message_id != message_id {
warn!(
"Quorum checkpoint message id {} does not match message id {}",
quorum_checkpoint.checkpoint.message_id,
message.id()
quorum_checkpoint.checkpoint.message_id, message_id
);
if quorum_checkpoint.checkpoint.index != leaf_index {
warn!(
"Quorum checkpoint index {} does not match leaf index {}",
quorum_checkpoint.checkpoint.index, leaf_index
);
}
return Ok(None);
}
Ok(Some(MultisigMetadata::new(
quorum_checkpoint.checkpoint.checkpoint,
quorum_checkpoint.signatures,
None,
quorum_checkpoint,
leaf_index,
None,
)))
}

@ -1,10 +1,8 @@
mod base;
mod legacy_multisig;
mod merkle_root_multisig;
mod message_id_multisig;
pub use base::{MetadataToken, MultisigIsmMetadataBuilder, MultisigMetadata};
pub use legacy_multisig::LegacyMultisigMetadataBuilder;
pub use merkle_root_multisig::MerkleRootMultisigMetadataBuilder;
pub use message_id_multisig::MessageIdMultisigMetadataBuilder;

@ -5,22 +5,18 @@ use std::{
time::Duration,
};
use async_trait::async_trait;
use derive_new::new;
use eyre::Result;
use hyperlane_base::{db::HyperlaneRocksDB, CoreMetrics};
use hyperlane_core::{HyperlaneDomain, HyperlaneMessage};
use prometheus::IntGauge;
use tokio::{
sync::{mpsc::UnboundedSender, RwLock},
task::JoinHandle,
};
use tracing::{debug, info_span, instrument, instrument::Instrumented, trace, Instrument};
use tokio::sync::mpsc::UnboundedSender;
use tracing::{debug, trace};
use super::pending_message::*;
use crate::{
merkle_tree_builder::MerkleTreeBuilder, msg::pending_operation::DynPendingOperation,
settings::matching_list::MatchingList,
};
use crate::msg::pending_operation::DynPendingOperation;
use crate::{processor::ProcessorExt, settings::matching_list::MatchingList};
/// Finds unprocessed messages from an origin and submits then through a channel
/// for to the appropriate destination.
@ -30,7 +26,6 @@ pub struct MessageProcessor {
whitelist: Arc<MatchingList>,
blacklist: Arc<MatchingList>,
metrics: MessageProcessorMetrics,
prover_sync: Arc<RwLock<MerkleTreeBuilder>>,
/// channel for each destination chain to send operations (i.e. message
/// submissions) to
send_channels: HashMap<u32, UnboundedSender<Box<DynPendingOperation>>>,
@ -44,76 +39,26 @@ impl Debug for MessageProcessor {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"MessageProcessor {{ whitelist: {:?}, blacklist: {:?}, prover_sync: {:?}, message_nonce: {:?} }}",
self.whitelist,
self.blacklist,
self.prover_sync,
self.message_nonce
"MessageProcessor {{ whitelist: {:?}, blacklist: {:?}, message_nonce: {:?} }}",
self.whitelist, self.blacklist, self.message_nonce
)
}
}
impl MessageProcessor {
#[async_trait]
impl ProcessorExt for MessageProcessor {
/// The domain this processor is getting messages from.
pub fn domain(&self) -> &HyperlaneDomain {
fn domain(&self) -> &HyperlaneDomain {
self.db.domain()
}
pub fn spawn(self) -> Instrumented<JoinHandle<Result<()>>> {
let span = info_span!("MessageProcessor");
tokio::spawn(async move { self.main_loop().await }).instrument(span)
}
#[instrument(ret, err, skip(self), level = "info", fields(domain=%self.domain()))]
async fn main_loop(mut self) -> Result<()> {
/// One round of processing, extracted from infinite work loop for
/// testing purposes.
async fn tick(&mut self) -> Result<()> {
// Forever, scan HyperlaneRocksDB looking for new messages to send. When criteria are
// satisfied or the message is disqualified, push the message onto
// self.tx_msg and then continue the scan at the next highest
// nonce.
loop {
self.tick().await?;
}
}
/// Tries to get the next message to process.
///
/// If no message with self.message_nonce is found, returns None.
/// If the message with self.message_nonce is found and has previously
/// been marked as processed, increments self.message_nonce and returns
/// None.
fn try_get_unprocessed_message(&mut self) -> Result<Option<HyperlaneMessage>> {
loop {
// First, see if we can find the message so we can update the gauge.
if let Some(message) = self.db.retrieve_message_by_nonce(self.message_nonce)? {
// Update the latest nonce gauges
self.metrics
.max_last_known_message_nonce_gauge
.set(message.nonce as i64);
if let Some(metrics) = self.metrics.get(message.destination) {
metrics.set(message.nonce as i64);
}
// If this message has already been processed, on to the next one.
if !self
.db
.retrieve_processed_by_nonce(&self.message_nonce)?
.unwrap_or(false)
{
return Ok(Some(message));
} else {
debug!(nonce=?self.message_nonce, "Message already marked as processed in DB");
self.message_nonce += 1;
}
} else {
trace!(nonce=?self.message_nonce, "No message found in DB for nonce");
return Ok(None);
}
}
}
/// One round of processing, extracted from infinite work loop for
/// testing purposes.
async fn tick(&mut self) -> Result<()> {
// Scan until we find next nonce without delivery confirmation.
if let Some(msg) = self.try_get_unprocessed_message()? {
debug!(?msg, "Processor working on message");
@ -147,13 +92,6 @@ impl MessageProcessor {
return Ok(());
}
// Feed the message to the prover sync
self.prover_sync
.write()
.await
.update_to_index(msg.nonce)
.await?;
debug!(%msg, "Sending message to submitter");
// Finally, build the submit arg and dispatch it to the submitter.
@ -170,6 +108,38 @@ impl MessageProcessor {
}
}
impl MessageProcessor {
fn try_get_unprocessed_message(&mut self) -> Result<Option<HyperlaneMessage>> {
loop {
// First, see if we can find the message so we can update the gauge.
if let Some(message) = self.db.retrieve_message_by_nonce(self.message_nonce)? {
// Update the latest nonce gauges
self.metrics
.max_last_known_message_nonce_gauge
.set(message.nonce as i64);
if let Some(metrics) = self.metrics.get(message.destination) {
metrics.set(message.nonce as i64);
}
// If this message has already been processed, on to the next one.
if !self
.db
.retrieve_processed_by_nonce(&self.message_nonce)?
.unwrap_or(false)
{
return Ok(Some(message));
} else {
debug!(nonce=?self.message_nonce, "Message already marked as processed in DB");
self.message_nonce += 1;
}
} else {
trace!(nonce=?self.message_nonce, "No message found in DB for nonce");
return Ok(None);
}
}
}
}
#[derive(Debug)]
pub struct MessageProcessorMetrics {
max_last_known_message_nonce_gauge: IntGauge,
@ -210,6 +180,16 @@ impl MessageProcessorMetrics {
mod test {
use std::time::Instant;
use crate::{
merkle_tree::builder::MerkleTreeBuilder,
msg::{
gas_payment::GasPaymentEnforcer, metadata::BaseMetadataBuilder,
pending_operation::PendingOperation,
},
processor::Processor,
};
use super::*;
use hyperlane_base::{
db::{test_utils, HyperlaneRocksDB},
settings::{ChainConf, ChainConnectionConf, Settings},
@ -217,16 +197,13 @@ mod test {
use hyperlane_test::mocks::{MockMailboxContract, MockValidatorAnnounceContract};
use prometheus::{IntCounter, Registry};
use tokio::{
sync::mpsc::{self, UnboundedReceiver},
sync::{
mpsc::{self, UnboundedReceiver},
RwLock,
},
time::sleep,
};
use super::*;
use crate::msg::{
gas_payment::GasPaymentEnforcer, metadata::BaseMetadataBuilder,
pending_operation::PendingOperation,
};
fn dummy_processor_metrics(domain_id: u32) -> MessageProcessorMetrics {
MessageProcessorMetrics {
max_last_known_message_nonce_gauge: IntGauge::new(
@ -252,7 +229,7 @@ mod test {
ChainConf {
domain: domain.clone(),
signer: Default::default(),
finality_blocks: Default::default(),
reorg_period: Default::default(),
addresses: Default::default(),
connection: ChainConnectionConf::Ethereum(hyperlane_ethereum::ConnectionConf::Http {
url: "http://example.com".parse().unwrap(),
@ -274,10 +251,11 @@ mod test {
let core_metrics = CoreMetrics::new("dummy_relayer", 37582, Registry::new()).unwrap();
BaseMetadataBuilder::new(
destination_chain_conf.clone(),
Arc::new(RwLock::new(MerkleTreeBuilder::new(db.clone()))),
Arc::new(RwLock::new(MerkleTreeBuilder::new())),
Arc::new(MockValidatorAnnounceContract::default()),
false,
Arc::new(core_metrics),
db.clone(),
5,
)
}
@ -307,7 +285,6 @@ mod test {
Default::default(),
Default::default(),
dummy_processor_metrics(origin_domain.id()),
Arc::new(RwLock::new(MerkleTreeBuilder::new(db.clone()))),
HashMap::from([(destination_domain.id(), send_channel)]),
HashMap::from([(destination_domain.id(), message_context)]),
),
@ -373,7 +350,8 @@ mod test {
let (message_processor, mut receive_channel) =
dummy_message_processor(origin_domain, destination_domain, db);
let process_fut = message_processor.spawn();
let processor = Processor::new(Box::new(message_processor));
let process_fut = processor.spawn();
let mut pending_messages = vec![];
let pending_message_accumulator = async {
while let Some(pm) = receive_channel.recv().await {

@ -0,0 +1,37 @@
use std::fmt::Debug;
use async_trait::async_trait;
use derive_new::new;
use eyre::Result;
use hyperlane_core::HyperlaneDomain;
use tokio::task::JoinHandle;
use tracing::{info_span, instrument, instrument::Instrumented, Instrument};
#[async_trait]
pub trait ProcessorExt: Send + Debug {
/// The domain this processor is getting messages from.
fn domain(&self) -> &HyperlaneDomain;
/// One round of processing, extracted from infinite work loop for
/// testing purposes.
async fn tick(&mut self) -> Result<()>;
}
#[derive(new)]
pub struct Processor {
ticker: Box<dyn ProcessorExt>,
}
impl Processor {
pub fn spawn(self) -> Instrumented<JoinHandle<Result<()>>> {
let span = info_span!("MessageProcessor");
tokio::spawn(async move { self.main_loop().await }).instrument(span)
}
#[instrument(ret, err, skip(self), level = "info", fields(domain=%self.ticker.domain()))]
async fn main_loop(mut self) -> Result<()> {
loop {
self.ticker.tick().await?;
}
}
}

@ -12,7 +12,7 @@ use hyperlane_base::{
run_all, BaseAgent, ContractSyncMetrics, CoreMetrics, HyperlaneAgentCore, MessageContractSync,
WatermarkContractSync,
};
use hyperlane_core::{HyperlaneDomain, InterchainGasPayment, U256};
use hyperlane_core::{HyperlaneDomain, InterchainGasPayment, MerkleTreeInsertion, U256};
use tokio::{
sync::{
mpsc::{self, UnboundedReceiver, UnboundedSender},
@ -22,8 +22,10 @@ use tokio::{
};
use tracing::{info, info_span, instrument::Instrumented, Instrument};
use crate::merkle_tree::processor::{MerkleTreeProcessor, MerkleTreeProcessorMetrics};
use crate::processor::{Processor, ProcessorExt};
use crate::{
merkle_tree_builder::MerkleTreeBuilder,
merkle_tree::builder::MerkleTreeBuilder,
msg::{
gas_payment::GasPaymentEnforcer,
metadata::BaseMetadataBuilder,
@ -55,6 +57,8 @@ pub struct Relayer {
/// sent between
msg_ctxs: HashMap<ContextKey, Arc<MessageContext>>,
prover_syncs: HashMap<HyperlaneDomain, Arc<RwLock<MerkleTreeBuilder>>>,
merkle_tree_hook_syncs:
HashMap<HyperlaneDomain, Arc<WatermarkContractSync<MerkleTreeInsertion>>>,
dbs: HashMap<HyperlaneDomain, HyperlaneRocksDB>,
whitelist: Arc<MatchingList>,
blacklist: Arc<MatchingList>,
@ -127,6 +131,16 @@ impl BaseAgent for Relayer {
.collect(),
)
.await?;
let merkle_tree_hook_syncs = settings
.build_merkle_tree_hook_indexers(
settings.origin_chains.iter(),
&metrics,
&contract_sync_metrics,
dbs.iter()
.map(|(d, db)| (d.clone(), Arc::new(db.clone()) as _))
.collect(),
)
.await?;
let whitelist = Arc::new(settings.whitelist);
let blacklist = Arc::new(settings.blacklist);
@ -146,10 +160,9 @@ impl BaseAgent for Relayer {
.origin_chains
.iter()
.map(|origin| {
let db = dbs.get(origin).unwrap().clone();
(
origin.clone(),
Arc::new(RwLock::new(MerkleTreeBuilder::new(db))),
Arc::new(RwLock::new(MerkleTreeBuilder::new())),
)
})
.collect::<HashMap<_, _>>();
@ -184,12 +197,14 @@ impl BaseAgent for Relayer {
};
for origin in &settings.origin_chains {
let db = dbs.get(origin).unwrap().clone();
let metadata_builder = BaseMetadataBuilder::new(
destination_chain_setup.clone(),
prover_syncs[origin].clone(),
validator_announces[origin].clone(),
settings.allow_local_checkpoint_syncers,
core.metrics.clone(),
db,
5,
);
@ -219,6 +234,7 @@ impl BaseAgent for Relayer {
message_syncs,
interchain_gas_payment_syncs,
prover_syncs,
merkle_tree_hook_syncs,
whitelist,
blacklist,
transaction_gas_limit,
@ -244,11 +260,13 @@ impl BaseAgent for Relayer {
for origin in &self.origin_chains {
tasks.push(self.run_message_sync(origin).await);
tasks.push(self.run_interchain_gas_payment_sync(origin).await);
tasks.push(self.run_merkle_tree_hook_syncs(origin).await);
}
// each message process attempts to send messages from a chain
for origin in &self.origin_chains {
tasks.push(self.run_message_processor(origin, send_channels.clone()));
tasks.push(self.run_merkle_tree_processor(origin));
}
run_all(tasks)
@ -289,6 +307,17 @@ impl Relayer {
.instrument(info_span!("ContractSync"))
}
async fn run_merkle_tree_hook_syncs(
&self,
origin: &HyperlaneDomain,
) -> Instrumented<JoinHandle<eyre::Result<()>>> {
let index_settings = self.as_ref().settings.chains[origin.name()].index.clone();
let contract_sync = self.merkle_tree_hook_syncs.get(origin).unwrap().clone();
let cursor = contract_sync.rate_limited_cursor(index_settings).await;
tokio::spawn(async move { contract_sync.clone().sync("merkle_tree_hook", cursor).await })
.instrument(info_span!("ContractSync"))
}
fn run_message_processor(
&self,
origin: &HyperlaneDomain,
@ -319,21 +348,41 @@ impl Relayer {
self.whitelist.clone(),
self.blacklist.clone(),
metrics,
self.prover_syncs[origin].clone(),
send_channels,
destination_ctxs,
);
let span = info_span!("MessageProcessor", origin=%message_processor.domain());
let process_fut = message_processor.spawn();
let processor = Processor::new(Box::new(message_processor));
tokio::spawn(async move {
let res = tokio::try_join!(process_fut)?;
let res = tokio::try_join!(processor.spawn())?;
info!(?res, "try_join finished for message processor");
Ok(())
})
.instrument(span)
}
fn run_merkle_tree_processor(
&self,
origin: &HyperlaneDomain,
) -> Instrumented<JoinHandle<Result<()>>> {
let metrics = MerkleTreeProcessorMetrics::new();
let merkle_tree_processor = MerkleTreeProcessor::new(
self.dbs.get(origin).unwrap().clone(),
metrics,
self.prover_syncs[origin].clone(),
);
let span = info_span!("MerkleTreeProcessor", origin=%merkle_tree_processor.domain());
let processor = Processor::new(Box::new(merkle_tree_processor));
tokio::spawn(async move {
let res = tokio::try_join!(processor.spawn())?;
info!(?res, "try_join finished for merkle tree processor");
Ok(())
})
.instrument(span)
}
#[allow(clippy::too_many_arguments)]
#[tracing::instrument(skip(self, receiver))]
fn run_destination_submitter(

@ -22,8 +22,7 @@ use serde::{
/// - wildcard "*"
/// - single value in decimal or hex (must start with `0x`) format
/// - list of values in decimal or hex format
#[derive(Debug, Deserialize, Default, Clone)]
#[serde(transparent)]
#[derive(Debug, Default, Clone)]
pub struct MatchingList(Option<Vec<ListElement>>);
#[derive(Debug, Clone, PartialEq)]
@ -63,6 +62,55 @@ impl<T: Debug> Display for Filter<T> {
}
}
struct MatchingListVisitor;
impl<'de> Visitor<'de> for MatchingListVisitor {
type Value = MatchingList;
fn expecting(&self, fmt: &mut Formatter) -> fmt::Result {
write!(fmt, "an optional list of matching rules")
}
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: Error,
{
Ok(MatchingList(None))
}
fn visit_some<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
let list: Vec<ListElement> = deserializer.deserialize_seq(MatchingListArrayVisitor)?;
Ok(if list.is_empty() {
// this allows for empty matching lists to be treated as if no matching list was set
MatchingList(None)
} else {
MatchingList(Some(list))
})
}
}
struct MatchingListArrayVisitor;
impl<'de> Visitor<'de> for MatchingListArrayVisitor {
type Value = Vec<ListElement>;
fn expecting(&self, fmt: &mut Formatter) -> fmt::Result {
write!(fmt, "a list of matching rules")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let mut rules = seq.size_hint().map(Vec::with_capacity).unwrap_or_default();
while let Some(rule) = seq.next_element::<ListElement>()? {
rules.push(rule);
}
Ok(rules)
}
}
struct FilterVisitor<T>(PhantomData<T>);
impl<'de> Visitor<'de> for FilterVisitor<u32> {
type Value = Filter<u32>;
@ -145,6 +193,15 @@ impl<'de> Visitor<'de> for FilterVisitor<H256> {
}
}
impl<'de> Deserialize<'de> for MatchingList {
fn deserialize<D>(d: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
d.deserialize_option(MatchingListVisitor)
}
}
impl<'de> Deserialize<'de> for Filter<u32> {
fn deserialize<D>(d: D) -> Result<Self, D::Error>
where
@ -166,13 +223,13 @@ impl<'de> Deserialize<'de> for Filter<H256> {
#[derive(Debug, Deserialize, Clone)]
#[serde(tag = "type")]
struct ListElement {
#[serde(default, rename = "originDomain")]
#[serde(default, rename = "origindomain")]
origin_domain: Filter<u32>,
#[serde(default, rename = "senderAddress")]
#[serde(default, rename = "senderaddress")]
sender_address: Filter<H256>,
#[serde(default, rename = "destinationDomain")]
#[serde(default, rename = "destinationdomain")]
destination_domain: Filter<u32>,
#[serde(default, rename = "recipientAddress")]
#[serde(default, rename = "recipientaddress")]
recipient_address: Filter<H256>,
}
@ -266,7 +323,7 @@ mod test {
#[test]
fn basic_config() {
let list: MatchingList = serde_json::from_str(r#"[{"originDomain": "*", "senderAddress": "*", "destinationDomain": "*", "recipientAddress": "*"}, {}]"#).unwrap();
let list: MatchingList = serde_json::from_str(r#"[{"origindomain": "*", "senderaddress": "*", "destinationdomain": "*", "recipientaddress": "*"}, {}]"#).unwrap();
assert!(list.0.is_some());
assert_eq!(list.0.as_ref().unwrap().len(), 2);
let elem = &list.0.as_ref().unwrap()[0];
@ -307,7 +364,7 @@ mod test {
#[test]
fn config_with_address() {
let list: MatchingList = serde_json::from_str(r#"[{"senderAddress": "0x9d4454B023096f34B160D6B654540c56A1F81688", "recipientAddress": "0x9d4454B023096f34B160D6B654540c56A1F81688"}]"#).unwrap();
let list: MatchingList = serde_json::from_str(r#"[{"senderaddress": "0x9d4454B023096f34B160D6B654540c56A1F81688", "recipientaddress": "0x9d4454B023096f34B160D6B654540c56A1F81688"}]"#).unwrap();
assert!(list.0.is_some());
assert_eq!(list.0.as_ref().unwrap().len(), 1);
let elem = &list.0.as_ref().unwrap()[0];
@ -361,7 +418,7 @@ mod test {
#[test]
fn config_with_multiple_domains() {
let whitelist: MatchingList =
serde_json::from_str(r#"[{"destinationDomain": ["13372", "13373"]}]"#).unwrap();
serde_json::from_str(r#"[{"destinationdomain": ["13372", "13373"]}]"#).unwrap();
assert!(whitelist.0.is_some());
assert_eq!(whitelist.0.as_ref().unwrap().len(), 1);
let elem = &whitelist.0.as_ref().unwrap()[0];
@ -371,6 +428,12 @@ mod test {
assert_eq!(elem.sender_address, Wildcard);
}
#[test]
fn config_with_empty_list_is_none() {
let whitelist: MatchingList = serde_json::from_str(r#"[]"#).unwrap();
assert!(whitelist.0.is_none());
}
#[test]
fn matches_empty_list() {
let info = MatchInfo {
@ -388,7 +451,7 @@ mod test {
#[test]
fn supports_base58() {
serde_json::from_str::<MatchingList>(
r#"[{"originDomain":1399811151,"senderAddress":"DdTMkk9nuqH5LnD56HLkPiKMV3yB3BNEYSQfgmJHa5i7","destinationDomain":11155111,"recipientAddress":"0x6AD4DEBA8A147d000C09de6465267a9047d1c217"}]"#,
r#"[{"origindomain":1399811151,"senderaddress":"DdTMkk9nuqH5LnD56HLkPiKMV3yB3BNEYSQfgmJHa5i7","destinationdomain":11155111,"recipientaddress":"0x6AD4DEBA8A147d000C09de6465267a9047d1c217"}]"#,
).unwrap();
}
}

@ -6,13 +6,13 @@
use std::{collections::HashSet, path::PathBuf};
use convert_case::Case;
use derive_more::{AsMut, AsRef, Deref, DerefMut};
use eyre::{eyre, Context};
use hyperlane_base::{
impl_loadable_from_settings,
settings::{
deprecated_parser::DeprecatedRawSettings,
parser::{RawAgentConf, ValueParser},
parser::{recase_json_value, RawAgentConf, ValueParser},
Settings,
},
};
@ -20,128 +20,11 @@ use hyperlane_core::{cfg_unwrap_all, config::*, HyperlaneDomain, U256};
use itertools::Itertools;
use serde::Deserialize;
use serde_json::Value;
use tracing::warn;
use crate::settings::matching_list::MatchingList;
pub mod matching_list;
/// Config for a GasPaymentEnforcementPolicy
#[derive(Debug, Clone, Default)]
pub enum GasPaymentEnforcementPolicy {
/// No requirement - all messages are processed regardless of gas payment
#[default]
None,
/// Messages that have paid a minimum amount will be processed
Minimum { payment: U256 },
/// The required amount of gas on the foreign chain has been paid according
/// to on-chain fee quoting.
OnChainFeeQuoting {
gas_fraction_numerator: u64,
gas_fraction_denominator: u64,
},
}
#[derive(Debug, Deserialize)]
#[serde(tag = "type", rename_all = "camelCase")]
enum RawGasPaymentEnforcementPolicy {
None,
Minimum {
payment: Option<StrOrInt>,
},
OnChainFeeQuoting {
/// Optional fraction of gas which must be paid before attempting to run
/// the transaction. Must be written as `"numerator /
/// denominator"` where both are integers.
#[serde(default = "default_gasfraction")]
gasfraction: String,
},
#[serde(other)]
Unknown,
}
impl FromRawConf<RawGasPaymentEnforcementPolicy> for GasPaymentEnforcementPolicy {
fn from_config_filtered(
raw: RawGasPaymentEnforcementPolicy,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
use RawGasPaymentEnforcementPolicy::*;
match raw {
None => Ok(Self::None),
Minimum { payment } => Ok(Self::Minimum {
payment: payment
.ok_or_else(|| {
eyre!("Missing `payment` for Minimum gas payment enforcement policy")
})
.into_config_result(|| cwp + "payment")?
.try_into()
.into_config_result(|| cwp + "payment")?,
}),
OnChainFeeQuoting { gasfraction } => {
let (numerator, denominator) =
gasfraction
.replace(' ', "")
.split_once('/')
.map(|(a, b)| (a.to_owned(), b.to_owned()))
.ok_or_else(|| eyre!("Invalid `gasfraction` for OnChainFeeQuoting gas payment enforcement policy; expected `numerator / denominator`"))
.into_config_result(|| cwp + "gasfraction")?;
Ok(Self::OnChainFeeQuoting {
gas_fraction_numerator: numerator
.parse()
.into_config_result(|| cwp + "gasfraction")?,
gas_fraction_denominator: denominator
.parse()
.into_config_result(|| cwp + "gasfraction")?,
})
}
Unknown => Err(eyre!("Unknown gas payment enforcement policy"))
.into_config_result(|| cwp.clone()),
}
}
}
/// Config for gas payment enforcement
#[derive(Debug, Clone, Default)]
pub struct GasPaymentEnforcementConf {
/// The gas payment enforcement policy
pub policy: GasPaymentEnforcementPolicy,
/// An optional matching list, any message that matches will use this
/// policy. By default all messages will match.
pub matching_list: MatchingList,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct RawGasPaymentEnforcementConf {
#[serde(flatten)]
policy: Option<RawGasPaymentEnforcementPolicy>,
#[serde(default)]
matching_list: Option<MatchingList>,
}
impl FromRawConf<RawGasPaymentEnforcementConf> for GasPaymentEnforcementConf {
fn from_config_filtered(
raw: RawGasPaymentEnforcementConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
let policy = raw.policy
.ok_or_else(|| eyre!("Missing policy for gas payment enforcement config; required if a matching list is provided"))
.take_err(&mut err, || cwp.clone()).and_then(|r| {
r.parse_config(cwp).take_config_err(&mut err)
});
let matching_list = raw.matching_list.unwrap_or_default();
err.into_result(Self {
policy: policy.unwrap(),
matching_list,
})
}
}
/// Settings for `Relayer`
#[derive(Debug, AsRef, AsMut, Deref, DerefMut)]
pub struct RelayerSettings {
@ -173,48 +56,38 @@ pub struct RelayerSettings {
pub allow_local_checkpoint_syncers: bool,
}
#[derive(Debug, Deserialize, AsMut)]
#[serde(rename_all = "camelCase")]
pub struct DeprecatedRawRelayerSettings {
#[serde(flatten)]
#[as_mut]
base: DeprecatedRawSettings,
/// Database path (path on the fs)
db: Option<String>,
// Comma separated list of chains to relay between.
relaychains: Option<String>,
// Comma separated list of origin chains.
#[deprecated(note = "Use `relaychains` instead")]
originchainname: Option<String>,
// Comma separated list of destination chains.
#[deprecated(note = "Use `relaychains` instead")]
destinationchainnames: Option<String>,
/// The gas payment enforcement configuration as JSON. Expects an ordered array of `GasPaymentEnforcementConfig`.
gaspaymentenforcement: Option<String>,
/// This is optional. If no whitelist is provided ALL messages will be considered on the
/// whitelist.
whitelist: Option<String>,
/// This is optional. If no blacklist is provided ALL will be considered to not be on
/// the blacklist.
blacklist: Option<String>,
/// This is optional. If not specified, any amount of gas will be valid, otherwise this
/// is the max allowed gas in wei to relay a transaction.
transactiongaslimit: Option<StrOrInt>,
// TODO: this should be a list of chain names to be consistent
/// Comma separated List of domain ids to skip applying the transaction gas limit to.
skiptransactiongaslimitfor: Option<String>,
/// If true, allows local storage based checkpoint syncers.
/// Not intended for production use. Defaults to false.
#[serde(default)]
allowlocalcheckpointsyncers: bool,
/// Config for gas payment enforcement
#[derive(Debug, Clone, Default)]
pub struct GasPaymentEnforcementConf {
/// The gas payment enforcement policy
pub policy: GasPaymentEnforcementPolicy,
/// An optional matching list, any message that matches will use this
/// policy. By default all messages will match.
pub matching_list: MatchingList,
}
impl_loadable_from_settings!(Relayer, DeprecatedRawRelayerSettings -> RelayerSettings);
/// Config for a GasPaymentEnforcementPolicy
#[derive(Debug, Clone, Default)]
pub enum GasPaymentEnforcementPolicy {
/// No requirement - all messages are processed regardless of gas payment
#[default]
None,
/// Messages that have paid a minimum amount will be processed
Minimum { payment: U256 },
/// The required amount of gas on the foreign chain has been paid according
/// to on-chain fee quoting.
OnChainFeeQuoting {
gas_fraction_numerator: u64,
gas_fraction_denominator: u64,
},
}
#[derive(Debug, Deserialize)]
#[serde(transparent)]
struct RawRelayerSettings(Value);
impl_loadable_from_settings!(Relayer, RawRelayerSettings -> RelayerSettings);
impl FromRawConf<RawRelayerSettings> for RelayerSettings {
fn from_config_filtered(
raw: RawRelayerSettings,
@ -256,7 +129,7 @@ impl FromRawConf<RawRelayerSettings> for RelayerSettings {
}) => serde_json::from_str::<Value>(policy_str)
.context("Expected JSON string")
.take_err(&mut err, || cwp.clone())
.map(|v| (cwp, v)),
.map(|v| (cwp, recase_json_value(v, Case::Flat))),
Some(ValueParser {
val: value @ Value::Array(_),
cwp,
@ -287,7 +160,7 @@ impl FromRawConf<RawRelayerSettings> for RelayerSettings {
.get_opt_key("gasFraction")
.parse_string()
.map(|v| v.replace(' ', ""))
.unwrap_or_else(|| default_gasfraction().to_owned());
.unwrap_or_else(|| "1/2".to_owned());
let (numerator, denominator) = gas_fraction
.split_once('/')
.ok_or_else(|| eyre!("Invalid `gas_fraction` for OnChainFeeQuoting gas payment enforcement policy; expected `numerator / denominator`"))
@ -314,7 +187,7 @@ impl FromRawConf<RawRelayerSettings> for RelayerSettings {
matching_list,
})
}).collect_vec()
}).unwrap_or_default();
}).unwrap_or_else(|_| vec![GasPaymentEnforcementConf::default()]);
let whitelist = p
.chain(&mut err)
@ -394,7 +267,8 @@ fn parse_matching_list(p: ValueParser) -> ConfigResult<MatchingList> {
cwp,
} => serde_json::from_str::<Value>(matching_list_str)
.context("Expected JSON string")
.take_err(&mut err, || cwp.clone()),
.take_err(&mut err, || cwp.clone())
.map(|v| recase_json_value(v, Case::Flat)),
ValueParser {
val: value @ Value::Array(_),
..
@ -413,212 +287,3 @@ fn parse_matching_list(p: ValueParser) -> ConfigResult<MatchingList> {
err.into_result(ml)
}
impl FromRawConf<DeprecatedRawRelayerSettings> for RelayerSettings {
fn from_config_filtered(
raw: DeprecatedRawRelayerSettings,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
let gas_payment_enforcement = raw
.gaspaymentenforcement
.and_then(|j| {
serde_json::from_str::<Vec<RawGasPaymentEnforcementConf>>(&j)
.take_err(&mut err, || cwp + "gaspaymentenforcement")
})
.map(|rv| {
let cwp = cwp + "gaspaymentenforcement";
rv.into_iter()
.enumerate()
.filter_map(|(i, r)| {
r.parse_config(&cwp.join(i.to_string()))
.take_config_err(&mut err)
})
.collect()
})
.unwrap_or_else(|| vec![Default::default()]);
let whitelist = raw
.whitelist
.and_then(|j| {
serde_json::from_str::<MatchingList>(&j).take_err(&mut err, || cwp + "whitelist")
})
.unwrap_or_default();
let blacklist = raw
.blacklist
.and_then(|j| {
serde_json::from_str::<MatchingList>(&j).take_err(&mut err, || cwp + "blacklist")
})
.unwrap_or_default();
let transaction_gas_limit = raw.transactiongaslimit.and_then(|r| {
r.try_into()
.take_err(&mut err, || cwp + "transactiongaslimit")
});
let skip_transaction_gas_limit_for = raw
.skiptransactiongaslimitfor
.and_then(|r| {
r.split(',')
.map(str::parse)
.collect::<Result<_, _>>()
.context("Error parsing domain id")
.take_err(&mut err, || cwp + "skiptransactiongaslimitfor")
})
.unwrap_or_default();
let mut origin_chain_names = {
#[allow(deprecated)]
raw.originchainname
}
.map(parse_chains);
if origin_chain_names.is_some() {
warn!(
path = (cwp + "originchainname").json_name(),
"`originchainname` is deprecated, use `relaychains` instead"
);
}
let mut destination_chain_names = {
#[allow(deprecated)]
raw.destinationchainnames
}
.map(parse_chains);
if destination_chain_names.is_some() {
warn!(
path = (cwp + "destinationchainnames").json_name(),
"`destinationchainnames` is deprecated, use `relaychains` instead"
);
}
if let Some(relay_chain_names) = raw.relaychains.map(parse_chains) {
if origin_chain_names.is_some() {
err.push(
cwp + "originchainname",
eyre!("Cannot use `relaychains` and `originchainname` at the same time"),
);
}
if destination_chain_names.is_some() {
err.push(
cwp + "destinationchainnames",
eyre!("Cannot use `relaychains` and `destinationchainnames` at the same time"),
);
}
if relay_chain_names.len() < 2 {
err.push(
cwp + "relaychains",
eyre!(
"The relayer must be configured with at least two chains to relay between"
),
)
}
origin_chain_names = Some(relay_chain_names.clone());
destination_chain_names = Some(relay_chain_names);
} else if origin_chain_names.is_none() && destination_chain_names.is_none() {
err.push(
cwp + "relaychains",
eyre!("The relayer must be configured with at least two chains to relay between"),
);
} else if origin_chain_names.is_none() {
err.push(
cwp + "originchainname",
eyre!("The relayer must be configured with an origin chain (alternatively use `relaychains`)"),
);
} else if destination_chain_names.is_none() {
err.push(
cwp + "destinationchainnames",
eyre!("The relayer must be configured with at least one destination chain (alternatively use `relaychains`)"),
);
}
let db = raw
.db
.and_then(|r| r.parse().take_err(&mut err, || cwp + "db"))
.unwrap_or_else(|| std::env::current_dir().unwrap().join("hyperlane_db"));
let (Some(origin_chain_names), Some(destination_chain_names)) =
(origin_chain_names, destination_chain_names)
else {
return Err(err);
};
let chain_filter = origin_chain_names
.iter()
.chain(&destination_chain_names)
.map(String::as_str)
.collect();
let base = raw
.base
.parse_config_with_filter::<Settings>(cwp, Some(&chain_filter))
.take_config_err(&mut err);
let origin_chains = base
.as_ref()
.map(|base| {
origin_chain_names
.iter()
.filter_map(|origin| {
base.lookup_domain(origin)
.context("Missing configuration for an origin chain")
.take_err(&mut err, || cwp + "chains" + origin)
})
.collect()
})
.unwrap_or_default();
// validate all destination chains are present and get their HyperlaneDomain.
let destination_chains: HashSet<_> = base
.as_ref()
.map(|base| {
destination_chain_names
.iter()
.filter_map(|destination| {
base.lookup_domain(destination)
.context("Missing configuration for a destination chain")
.take_err(&mut err, || cwp + "chains" + destination)
})
.collect()
})
.unwrap_or_default();
if let Some(base) = &base {
for domain in &destination_chains {
base.chain_setup(domain)
.unwrap()
.signer
.as_ref()
.ok_or_else(|| eyre!("Signer is required for destination chains"))
.take_err(&mut err, || cwp + "chains" + domain.name() + "signer");
}
}
cfg_unwrap_all!(cwp, err: [base]);
err.into_result(Self {
base,
db,
origin_chains,
destination_chains,
gas_payment_enforcement,
whitelist,
blacklist,
transaction_gas_limit,
skip_transaction_gas_limit_for,
allow_local_checkpoint_syncers: raw.allowlocalcheckpointsyncers,
})
}
}
fn default_gasfraction() -> String {
"1/2".into()
}
fn parse_chains(chains_str: String) -> Vec<String> {
chains_str.split(',').map(str::to_ascii_lowercase).collect()
}

@ -46,6 +46,14 @@ const DOMAINS: &[RawDomain] = &[
is_test_net: false,
is_deprecated: false,
},
RawDomain {
name: "basegoerli",
token: "ETH",
domain: 84531,
chain_id: 84531,
is_test_net: false,
is_deprecated: false,
},
RawDomain {
name: "bsc",
token: "BNB",
@ -150,6 +158,14 @@ const DOMAINS: &[RawDomain] = &[
is_test_net: false,
is_deprecated: false,
},
RawDomain {
name: "scrollsepolia",
token: "ETH",
domain: 534351,
chain_id: 534351,
is_test_net: true,
is_deprecated: false,
},
RawDomain {
name: "sepolia",
token: "ETH",
@ -158,6 +174,38 @@ const DOMAINS: &[RawDomain] = &[
is_test_net: true,
is_deprecated: false,
},
RawDomain {
name: "polygonzkevmtestnet",
token: "ETH",
domain: 1442,
chain_id: 1442,
is_test_net: true,
is_deprecated: false,
},
RawDomain {
name: "polygonzkevm",
token: "ETH",
domain: 1101,
chain_id: 1101,
is_test_net: false,
is_deprecated: false,
},
RawDomain {
name: "base",
token: "ETH",
domain: 8453,
chain_id: 8453,
is_test_net: false,
is_deprecated: false,
},
RawDomain {
name: "scroll",
token: "ETH",
domain: 534352,
chain_id: 534352,
is_test_net: false,
is_deprecated: false,
},
RawDomain {
name: "test1",
token: "ETH",
@ -182,14 +230,6 @@ const DOMAINS: &[RawDomain] = &[
is_test_net: true,
is_deprecated: false,
},
RawDomain {
name: "zksync2testnet",
token: "ETH",
domain: 280,
chain_id: 280,
is_test_net: true,
is_deprecated: false,
},
];
#[derive(DeriveMigrationName)]

@ -7,9 +7,9 @@ use async_trait::async_trait;
use eyre::Result;
use hyperlane_base::settings::IndexSettings;
use hyperlane_core::{
BlockInfo, Delivery, HyperlaneDomain, HyperlaneLogStore, HyperlaneMessage,
HyperlaneMessageStore, HyperlaneProvider, HyperlaneWatermarkedLogStore, InterchainGasPayment,
LogMeta, H256,
unwrap_or_none_result, BlockInfo, Delivery, HyperlaneDomain, HyperlaneLogStore,
HyperlaneMessage, HyperlaneMessageStore, HyperlaneProvider, HyperlaneWatermarkedLogStore,
InterchainGasPayment, LogMeta, H256,
};
use itertools::Itertools;
use tracing::trace;
@ -383,18 +383,13 @@ impl HyperlaneMessageStore for HyperlaneSqlDb {
/// Retrieves the block number at which the message with the provided nonce
/// was dispatched.
async fn retrieve_dispatched_block_number(&self, nonce: u32) -> Result<Option<u64>> {
let Some(tx_id) = self
.db
.retrieve_dispatched_tx_id(self.domain().id(), &self.mailbox_address, nonce)
.await?
else {
return Ok(None);
};
let Some(block_id) = self.db.retrieve_block_id(tx_id).await? else {
return Ok(None);
};
unwrap_or_none_result!(
tx_id,
self.db
.retrieve_dispatched_tx_id(self.domain().id(), &self.mailbox_address, nonce)
.await?
);
unwrap_or_none_result!(block_id, self.db.retrieve_block_id(tx_id).await?);
Ok(self.db.retrieve_block_number(block_id).await?)
}
}

@ -91,7 +91,7 @@ impl BlockCursor {
};
debug!(?model, "Inserting cursor");
if let Err(e) = Insert::one(model).exec(&self.db).await {
warn!(error = ?e, "Failed to update database with new cursor")
warn!(error = ?e, "Failed to update database with new cursor. When you just started this, ensure that the migrations included this domain.")
} else {
debug!(cursor = ?*inner, "Updated cursor")
}

@ -7,17 +7,15 @@
use std::{collections::HashSet, default::Default};
use derive_more::{AsMut, AsRef, Deref, DerefMut};
use eyre::{eyre, Context};
use eyre::Context;
use hyperlane_base::{
impl_loadable_from_settings,
settings::{
deprecated_parser::DeprecatedRawSettings,
parser::{RawAgentConf, ValueParser},
Settings,
},
};
use hyperlane_core::{cfg_unwrap_all, config::*, HyperlaneDomain};
use itertools::Itertools;
use serde::Deserialize;
use serde_json::Value;
@ -34,25 +32,12 @@ pub struct ScraperSettings {
pub chains_to_scrape: Vec<HyperlaneDomain>,
}
/// Raw settings for `Scraper`
#[derive(Debug, Deserialize, AsMut)]
#[serde(rename_all = "camelCase")]
pub struct DeprecatedRawScraperSettings {
#[serde(flatten, default)]
#[as_mut]
base: DeprecatedRawSettings,
/// Database connection string
db: Option<String>,
/// Comma separated list of chains to scrape
chainstoscrape: Option<String>,
}
impl_loadable_from_settings!(Scraper, DeprecatedRawScraperSettings -> ScraperSettings);
#[derive(Debug, Deserialize)]
#[serde(transparent)]
struct RawScraperSettings(Value);
impl_loadable_from_settings!(Scraper, RawScraperSettings -> ScraperSettings);
impl FromRawConf<RawScraperSettings> for ScraperSettings {
fn from_config_filtered(
raw: RawScraperSettings,
@ -107,59 +92,3 @@ impl FromRawConf<RawScraperSettings> for ScraperSettings {
})
}
}
impl FromRawConf<DeprecatedRawScraperSettings> for ScraperSettings {
fn from_config_filtered(
raw: DeprecatedRawScraperSettings,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
let db = raw
.db
.ok_or_else(|| eyre!("Missing `db` connection string"))
.take_err(&mut err, || cwp + "db");
let Some(chains_to_scrape) = raw
.chainstoscrape
.ok_or_else(|| eyre!("Missing `chainstoscrape` list"))
.take_err(&mut err, || cwp + "chainstoscrape")
.map(|s| {
s.split(',')
.map(str::to_ascii_lowercase)
.collect::<Vec<_>>()
})
else {
return Err(err);
};
let base = raw
.base
.parse_config_with_filter::<Settings>(
cwp,
Some(&chains_to_scrape.iter().map(String::as_str).collect()),
)
.take_config_err(&mut err);
let chains_to_scrape = base
.as_ref()
.map(|base| {
chains_to_scrape
.iter()
.filter_map(|chain| {
base.lookup_domain(chain)
.context("Missing configuration for a chain in `chainstoscrape`")
.take_err(&mut err, || cwp + "chains" + chain)
})
.collect_vec()
})
.unwrap_or_default();
err.into_result(Self {
base: base.unwrap(),
db: db.unwrap(),
chains_to_scrape,
})
}
}

@ -11,9 +11,6 @@ use eyre::{eyre, Context};
use hyperlane_base::{
impl_loadable_from_settings,
settings::{
deprecated_parser::{
DeprecatedRawCheckpointSyncerConf, DeprecatedRawSettings, DeprecatedRawSignerConf,
},
parser::{RawAgentConf, RawAgentSignerConf, ValueParser},
CheckpointSyncerConf, Settings, SignerConf,
},
@ -45,34 +42,12 @@ pub struct ValidatorSettings {
pub interval: Duration,
}
/// Raw settings for `Validator`
#[derive(Debug, Deserialize, AsMut)]
#[serde(rename_all = "camelCase")]
pub struct DeprecatedRawValidatorSettings {
#[serde(flatten, default)]
#[as_mut]
base: DeprecatedRawSettings,
/// Database path (path on the fs)
db: Option<String>,
// Name of the chain to validate message on
originchainname: Option<String>,
/// The validator attestation signer
#[serde(default)]
validator: DeprecatedRawSignerConf,
/// The checkpoint syncer configuration
checkpointsyncer: Option<DeprecatedRawCheckpointSyncerConf>,
/// The reorg_period in blocks
reorgperiod: Option<StrOrInt>,
/// How frequently to check for new checkpoints
interval: Option<StrOrInt>,
}
impl_loadable_from_settings!(Validator, DeprecatedRawValidatorSettings -> ValidatorSettings);
#[derive(Debug, Deserialize)]
#[serde(transparent)]
struct RawValidatorSettings(Value);
impl_loadable_from_settings!(Validator, RawValidatorSettings -> ValidatorSettings);
impl FromRawConf<RawValidatorSettings> for ValidatorSettings {
fn from_config_filtered(
raw: RawValidatorSettings,
@ -151,6 +126,14 @@ impl FromRawConf<RawValidatorSettings> for ValidatorSettings {
cfg_unwrap_all!(cwp, err: [base, origin_chain, validator, checkpoint_syncer]);
let mut base: Settings = base;
// If the origin chain is an EVM chain, then we can use the validator as the signer if needed.
if origin_chain.domain_protocol() == HyperlaneDomainProtocol::Ethereum {
if let Some(origin) = base.chains.get_mut(origin_chain.name()) {
origin.signer.get_or_insert_with(|| validator.clone());
}
}
err.into_result(Self {
base,
db,
@ -210,95 +193,3 @@ fn parse_checkpoint_syncer(syncer: ValueParser) -> ConfigResult<CheckpointSyncer
None => Err(err),
}
}
impl FromRawConf<DeprecatedRawValidatorSettings> for ValidatorSettings {
fn from_config_filtered(
raw: DeprecatedRawValidatorSettings,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
let validator = raw
.validator
.parse_config::<SignerConf>(&cwp.join("validator"))
.take_config_err(&mut err);
let checkpoint_syncer = raw
.checkpointsyncer
.ok_or_else(|| eyre!("Missing `checkpointsyncer`"))
.take_err(&mut err, || cwp + "checkpointsyncer")
.and_then(|r| {
r.parse_config(&cwp.join("checkpointsyncer"))
.take_config_err(&mut err)
});
let reorg_period = raw
.reorgperiod
.ok_or_else(|| eyre!("Missing `reorgperiod`"))
.take_err(&mut err, || cwp + "reorgperiod")
.and_then(|r| r.try_into().take_err(&mut err, || cwp + "reorgperiod"));
let interval = raw
.interval
.and_then(|r| {
r.try_into()
.map(Duration::from_secs)
.take_err(&mut err, || cwp + "interval")
})
.unwrap_or(Duration::from_secs(5));
let Some(origin_chain_name) = raw
.originchainname
.ok_or_else(|| eyre!("Missing `originchainname`"))
.take_err(&mut err, || cwp + "originchainname")
.map(|s| s.to_ascii_lowercase())
else {
return Err(err);
};
let db = raw
.db
.and_then(|r| r.parse().take_err(&mut err, || cwp + "db"))
.unwrap_or_else(|| {
std::env::current_dir()
.unwrap()
.join(format!("validator_db_{origin_chain_name}"))
});
let base = raw
.base
.parse_config_with_filter::<Settings>(
cwp,
Some(&[origin_chain_name.as_ref()].into_iter().collect()),
)
.take_config_err(&mut err);
let origin_chain = base.as_ref().and_then(|base| {
base.lookup_domain(&origin_chain_name)
.context("Missing configuration for the origin chain")
.take_err(&mut err, || cwp + "chains" + &origin_chain_name)
});
cfg_unwrap_all!(cwp, err: [base, origin_chain, validator, checkpoint_syncer, reorg_period]);
let mut base = base;
if origin_chain.domain_protocol() == HyperlaneDomainProtocol::Ethereum {
// if an EVM chain we can assume the chain signer is the validator signer when not
// specified
if let Some(chain) = base.chains.get_mut(origin_chain.name()) {
chain.signer.get_or_insert_with(|| validator.clone());
}
}
err.into_result(Self {
base,
db,
origin_chain,
validator,
checkpoint_syncer,
reorg_period,
interval,
})
}
}

@ -3,16 +3,17 @@ use std::sync::Arc;
use std::time::{Duration, Instant};
use std::vec;
use eyre::Result;
use eyre::{bail, Result};
use hyperlane_core::MerkleTreeHook;
use prometheus::IntGauge;
use tokio::time::sleep;
use tracing::instrument;
use tracing::{debug, info};
use tracing::{error, instrument};
use hyperlane_base::{db::HyperlaneRocksDB, CheckpointSyncer, CoreMetrics};
use hyperlane_core::{
accumulator::incremental::IncrementalMerkle, Checkpoint, CheckpointWithMessageId,
HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneSignerExt, Mailbox,
HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneSignerExt,
};
use hyperlane_ethereum::SingletonSignerHandle;
@ -21,7 +22,7 @@ pub(crate) struct ValidatorSubmitter {
interval: Duration,
reorg_period: Option<NonZeroU64>,
signer: SingletonSignerHandle,
mailbox: Arc<dyn Mailbox>,
merkle_tree_hook: Arc<dyn MerkleTreeHook>,
checkpoint_syncer: Arc<dyn CheckpointSyncer>,
message_db: HyperlaneRocksDB,
metrics: ValidatorSubmitterMetrics,
@ -31,7 +32,7 @@ impl ValidatorSubmitter {
pub(crate) fn new(
interval: Duration,
reorg_period: u64,
mailbox: Arc<dyn Mailbox>,
merkle_tree_hook: Arc<dyn MerkleTreeHook>,
signer: SingletonSignerHandle,
checkpoint_syncer: Arc<dyn CheckpointSyncer>,
message_db: HyperlaneRocksDB,
@ -40,7 +41,7 @@ impl ValidatorSubmitter {
Self {
reorg_period: NonZeroU64::new(reorg_period),
interval,
mailbox,
merkle_tree_hook,
signer,
checkpoint_syncer,
message_db,
@ -52,112 +53,36 @@ impl ValidatorSubmitter {
Checkpoint {
root: tree.root(),
index: tree.index(),
mailbox_address: self.mailbox.address(),
mailbox_domain: self.mailbox.domain().id(),
merkle_tree_hook_address: self.merkle_tree_hook.address(),
mailbox_domain: self.merkle_tree_hook.domain().id(),
}
}
#[instrument(err, skip(self, tree), fields(domain=%self.mailbox.domain()))]
pub(crate) async fn checkpoint_submitter(
/// Submits signed checkpoints from index 0 until the target checkpoint (inclusive).
/// Runs idly forever once the target checkpoint is reached to avoid exiting the task.
#[instrument(err, skip(self), fields(domain=%self.merkle_tree_hook.domain()))]
pub(crate) async fn backfill_checkpoint_submitter(
self,
mut tree: IncrementalMerkle,
target_checkpoint: Option<Checkpoint>,
target_checkpoint: Checkpoint,
) -> Result<()> {
let mut checkpoint_queue = vec![];
let mut reached_target = false;
while !reached_target {
let correctness_checkpoint = if let Some(c) = target_checkpoint {
c
} else {
// lag by reorg period to match message indexing
let latest_checkpoint = self.mailbox.latest_checkpoint(self.reorg_period).await?;
self.metrics
.latest_checkpoint_observed
.set(latest_checkpoint.index as i64);
latest_checkpoint
};
// ingest available messages from DB
while let Some(message) = self
.message_db
.retrieve_message_by_nonce(tree.count() as u32)?
{
debug!(index = message.nonce, "Ingesting leaf to tree");
let message_id = message.id();
tree.ingest(message_id);
let checkpoint = self.checkpoint(&tree);
checkpoint_queue.push(CheckpointWithMessageId {
checkpoint,
message_id,
});
let mut tree = IncrementalMerkle::default();
self.submit_checkpoints_until_correctness_checkpoint(&mut tree, &target_checkpoint)
.await?;
// compare against every queued checkpoint to prevent ingesting past target
if checkpoint == correctness_checkpoint {
debug!(index = checkpoint.index, "Reached tree consistency");
// drain and sign all checkpoints in the queue
for queued_checkpoint in checkpoint_queue.drain(..) {
let existing = self
.checkpoint_syncer
.fetch_checkpoint(queued_checkpoint.index)
.await?;
if existing.is_some() {
debug!(
index = queued_checkpoint.index,
"Checkpoint already submitted"
);
continue;
}
let signed_checkpoint = self.signer.sign(queued_checkpoint).await?;
self.checkpoint_syncer
.write_checkpoint(&signed_checkpoint)
.await?;
debug!(
index = queued_checkpoint.index,
"Signed and submitted checkpoint"
);
// small sleep before signing next checkpoint to avoid rate limiting
sleep(Duration::from_millis(100)).await;
}
info!(index = checkpoint.index, "Signed all queued checkpoints");
self.metrics
.latest_checkpoint_processed
.set(checkpoint.index as i64);
// break out of submitter loop if target checkpoint is reached
reached_target = target_checkpoint.is_some();
break;
}
}
sleep(self.interval).await;
}
info!(
?target_checkpoint,
"Backfill checkpoint submitter successfully reached target checkpoint"
);
// TODO: remove this once validator is tolerant of tasks exiting
// TODO: remove this once validator is tolerant of tasks exiting.
loop {
sleep(Duration::from_secs(u64::MAX)).await;
}
}
pub(crate) async fn legacy_checkpoint_submitter(self) -> Result<()> {
// current_index will be None if the validator cannot find
// a previously signed checkpoint
let mut current_index = self.checkpoint_syncer.latest_index().await?;
if let Some(current_index) = current_index {
self.metrics
.legacy_latest_checkpoint_processed
.set(current_index as i64);
}
/// Submits signed checkpoints indefinitely, starting from the `tree`.
#[instrument(err, skip(self, tree), fields(domain=%self.merkle_tree_hook.domain()))]
pub(crate) async fn checkpoint_submitter(self, mut tree: IncrementalMerkle) -> Result<()> {
// How often to log checkpoint info - once every minute
let checkpoint_info_log_period = Duration::from_secs(60);
// The instant in which we last logged checkpoint info, if at all
@ -176,73 +101,197 @@ impl ValidatorSubmitter {
};
loop {
// Check the latest checkpoint
let latest_checkpoint = self.mailbox.latest_checkpoint(self.reorg_period).await?;
// Lag by reorg period because this is our correctness checkpoint.
let latest_checkpoint = self
.merkle_tree_hook
.latest_checkpoint(self.reorg_period)
.await?;
self.metrics
.legacy_latest_checkpoint_observed
.latest_checkpoint_observed
.set(latest_checkpoint.index as i64);
// Occasional info to make it clear to a validator operator whether things are
// working correctly without using the debug log level.
if should_log_checkpoint_info() {
info!(
latest_signed_checkpoint_index=?current_index,
latest_known_checkpoint_index=?latest_checkpoint.index,
"Latest checkpoint infos"
?latest_checkpoint,
tree_count = tree.count(),
"Latest checkpoint"
);
}
debug!(
latest_signed_checkpoint_index=?current_index,
latest_known_checkpoint_index=?latest_checkpoint.index,
"Polled latest checkpoint"
);
// This may occur e.g. if RPC providers are unreliable and make calls against
// inconsistent block tips.
//
// In this case, we just sleep a bit until we fetch a new latest checkpoint
// that at least meets the tree.
if tree_exceeds_checkpoint(&latest_checkpoint, &tree) {
debug!(
?latest_checkpoint,
tree_count = tree.count(),
"Latest checkpoint is behind tree, sleeping briefly"
);
sleep(self.interval).await;
continue;
}
// If current_index is None, we were unable to find a previously
// signed checkpoint, and we should sign the latest checkpoint.
// This ensures that we still sign even if the latest checkpoint
// has index 0.
if current_index
.map(|i| i < latest_checkpoint.index)
.unwrap_or(true)
self.submit_checkpoints_until_correctness_checkpoint(&mut tree, &latest_checkpoint)
.await?;
self.metrics
.latest_checkpoint_processed
.set(latest_checkpoint.index as i64);
sleep(self.interval).await;
}
}
/// Submits signed checkpoints relating to the given tree until the correctness checkpoint (inclusive).
/// Only submits the signed checkpoints once the correctness checkpoint is reached.
async fn submit_checkpoints_until_correctness_checkpoint(
&self,
tree: &mut IncrementalMerkle,
correctness_checkpoint: &Checkpoint,
) -> Result<()> {
// This should never be called with a tree that is ahead of the correctness checkpoint.
assert!(
!tree_exceeds_checkpoint(correctness_checkpoint, tree),
"tree (count: {}) is ahead of correctness checkpoint {:?}",
tree.count(),
correctness_checkpoint,
);
// All intermediate checkpoints will be stored here and signed once the correctness
// checkpoint is reached.
let mut checkpoint_queue = vec![];
// If the correctness checkpoint is ahead of the tree, we need to ingest more messages.
//
// tree.index() will panic if the tree is empty, so we use tree.count() instead
// and convert the correctness_checkpoint.index to a count by adding 1.
while tree.count() as u32 <= correctness_checkpoint.index {
if let Some(insertion) = self
.message_db
.retrieve_merkle_tree_insertion_by_leaf_index(&(tree.count() as u32))?
{
let signed_checkpoint = self.signer.sign(latest_checkpoint).await?;
debug!(
index = insertion.index(),
queue_length = checkpoint_queue.len(),
"Ingesting leaf to tree"
);
let message_id = insertion.message_id();
tree.ingest(message_id);
info!(signed_checkpoint = ?signed_checkpoint, signer=?self.signer, "Signed new latest checkpoint");
current_index = Some(latest_checkpoint.index);
let checkpoint = self.checkpoint(tree);
self.checkpoint_syncer
.legacy_write_checkpoint(&signed_checkpoint)
.await?;
self.metrics
.legacy_latest_checkpoint_processed
.set(signed_checkpoint.value.index as i64);
checkpoint_queue.push(CheckpointWithMessageId {
checkpoint,
message_id,
});
} else {
// If we haven't yet indexed the next merkle tree insertion but know that
// it will soon exist (because we know the correctness checkpoint), wait a bit and
// try again.
sleep(Duration::from_millis(100)).await
}
}
sleep(self.interval).await;
// At this point we know that correctness_checkpoint.index == tree.index().
assert_eq!(
correctness_checkpoint.index,
tree.index(),
"correctness checkpoint index {} != tree index {}",
correctness_checkpoint.index,
tree.index(),
);
let checkpoint = self.checkpoint(tree);
// If the tree's checkpoint doesn't match the correctness checkpoint, something went wrong
// and we bail loudly.
if checkpoint != *correctness_checkpoint {
error!(
?checkpoint,
?correctness_checkpoint,
"Incorrect tree root, something went wrong"
);
bail!("Incorrect tree root, something went wrong");
}
if !checkpoint_queue.is_empty() {
info!(
index = checkpoint.index,
queue_len = checkpoint_queue.len(),
"Reached tree consistency"
);
self.sign_and_submit_checkpoints(checkpoint_queue).await?;
info!(
index = checkpoint.index,
"Signed all queued checkpoints until index"
);
}
Ok(())
}
/// Signs and submits any previously unsubmitted checkpoints.
async fn sign_and_submit_checkpoints(
&self,
checkpoints: Vec<CheckpointWithMessageId>,
) -> Result<()> {
let last_checkpoint = checkpoints.as_slice()[checkpoints.len() - 1];
for queued_checkpoint in checkpoints {
let existing = self
.checkpoint_syncer
.fetch_checkpoint(queued_checkpoint.index)
.await?;
if existing.is_some() {
debug!(
index = queued_checkpoint.index,
"Checkpoint already submitted"
);
continue;
}
let signed_checkpoint = self.signer.sign(queued_checkpoint).await?;
self.checkpoint_syncer
.write_checkpoint(&signed_checkpoint)
.await?;
debug!(
index = queued_checkpoint.index,
"Signed and submitted checkpoint"
);
// TODO: move these into S3 implementations
// small sleep before signing next checkpoint to avoid rate limiting
sleep(Duration::from_millis(100)).await;
}
self.checkpoint_syncer
.update_latest_index(last_checkpoint.index)
.await?;
Ok(())
}
}
/// Returns whether the tree exceeds the checkpoint.
fn tree_exceeds_checkpoint(checkpoint: &Checkpoint, tree: &IncrementalMerkle) -> bool {
// tree.index() will panic if the tree is empty, so we use tree.count() instead
// and convert the correctness_checkpoint.index to a count by adding 1.
checkpoint.index + 1 < tree.count() as u32
}
#[derive(Clone)]
pub(crate) struct ValidatorSubmitterMetrics {
latest_checkpoint_observed: IntGauge,
latest_checkpoint_processed: IntGauge,
legacy_latest_checkpoint_observed: IntGauge,
legacy_latest_checkpoint_processed: IntGauge,
}
impl ValidatorSubmitterMetrics {
pub fn new(metrics: &CoreMetrics, mailbox_chain: &HyperlaneDomain) -> Self {
let chain_name = mailbox_chain.name();
Self {
legacy_latest_checkpoint_observed: metrics
.latest_checkpoint()
.with_label_values(&["legacy_validator_observed", chain_name]),
legacy_latest_checkpoint_processed: metrics
.latest_checkpoint()
.with_label_values(&["legacy_validator_processed", chain_name]),
latest_checkpoint_observed: metrics
.latest_checkpoint()
.with_label_values(&["validator_observed", chain_name]),

@ -3,15 +3,16 @@ use std::{num::NonZeroU64, sync::Arc, time::Duration};
use async_trait::async_trait;
use derive_more::AsRef;
use eyre::Result;
use futures_util::future::ready;
use hyperlane_base::{
db::{HyperlaneRocksDB, DB},
run_all, BaseAgent, CheckpointSyncer, ContractSyncMetrics, CoreMetrics, HyperlaneAgentCore,
MessageContractSync,
WatermarkContractSync,
};
use hyperlane_core::{
accumulator::incremental::IncrementalMerkle, Announcement, ChainResult, HyperlaneChain,
HyperlaneContract, HyperlaneDomain, HyperlaneSigner, HyperlaneSignerExt, Mailbox, TxOutcome,
ValidatorAnnounce, H256, U256,
Announcement, ChainResult, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneSigner,
HyperlaneSignerExt, Mailbox, MerkleTreeHook, MerkleTreeInsertion, TxOutcome, ValidatorAnnounce,
H256, U256,
};
use hyperlane_ethereum::{SingletonSigner, SingletonSignerHandle};
use tokio::{task::JoinHandle, time::sleep};
@ -29,8 +30,9 @@ pub struct Validator {
#[as_ref]
core: HyperlaneAgentCore,
db: HyperlaneRocksDB,
message_sync: Arc<MessageContractSync>,
merkle_tree_hook_sync: Arc<WatermarkContractSync<MerkleTreeInsertion>>,
mailbox: Arc<dyn Mailbox>,
merkle_tree_hook: Arc<dyn MerkleTreeHook>,
validator_announce: Arc<dyn ValidatorAnnounce>,
signer: SingletonSignerHandle,
// temporary holder until `run` is called
@ -39,6 +41,7 @@ pub struct Validator {
interval: Duration,
checkpoint_syncer: Arc<dyn CheckpointSyncer>,
}
#[async_trait]
impl BaseAgent for Validator {
const AGENT_NAME: &'static str = "validator";
@ -62,14 +65,18 @@ impl BaseAgent for Validator {
.build_mailbox(&settings.origin_chain, &metrics)
.await?;
let merkle_tree_hook = settings
.build_merkle_tree_hook(&settings.origin_chain, &metrics)
.await?;
let validator_announce = settings
.build_validator_announce(&settings.origin_chain, &metrics)
.await?;
let contract_sync_metrics = Arc::new(ContractSyncMetrics::new(&metrics));
let message_sync = settings
.build_message_indexer(
let merkle_tree_hook_sync = settings
.build_merkle_tree_hook_indexer(
&settings.origin_chain,
&metrics,
&contract_sync_metrics,
@ -83,7 +90,8 @@ impl BaseAgent for Validator {
core,
db: msg_db,
mailbox: mailbox.into(),
message_sync,
merkle_tree_hook: merkle_tree_hook.into(),
merkle_tree_hook_sync,
validator_announce: validator_announce.into(),
signer,
signer_instance: Some(Box::new(signer_instance)),
@ -112,22 +120,26 @@ impl BaseAgent for Validator {
let reorg_period = NonZeroU64::new(self.reorg_period);
// Ensure that the mailbox has count > 0 before we begin indexing
// Ensure that the merkle tree hook has count > 0 before we begin indexing
// messages or submitting checkpoints.
while self
.mailbox
.count(reorg_period)
.await
.expect("Failed to get count of mailbox")
== 0
{
info!("Waiting for first message to mailbox");
sleep(self.interval).await;
}
tasks.push(self.run_message_sync().await);
for checkpoint_sync_task in self.run_checkpoint_submitters().await {
tasks.push(checkpoint_sync_task);
loop {
match self.merkle_tree_hook.count(reorg_period).await {
Ok(0) => {
info!("Waiting for first message in merkle tree hook");
sleep(self.interval).await;
}
Ok(_) => {
tasks.push(self.run_merkle_tree_hook_sync().await);
for checkpoint_sync_task in self.run_checkpoint_submitters().await {
tasks.push(checkpoint_sync_task);
}
break;
}
_ => {
// Future that immediately resolves
return tokio::spawn(ready(Ok(()))).instrument(info_span!("Validator"));
}
}
}
run_all(tasks)
@ -135,64 +147,54 @@ impl BaseAgent for Validator {
}
impl Validator {
async fn run_message_sync(&self) -> Instrumented<JoinHandle<Result<()>>> {
async fn run_merkle_tree_hook_sync(&self) -> Instrumented<JoinHandle<Result<()>>> {
let index_settings =
self.as_ref().settings.chains[self.origin_chain.name()].index_settings();
let contract_sync = self.message_sync.clone();
let cursor = contract_sync
.forward_backward_message_sync_cursor(index_settings)
.await;
tokio::spawn(async move {
contract_sync
.clone()
.sync("dispatched_messages", cursor)
.await
})
.instrument(info_span!("MailboxMessageSyncer"))
let contract_sync = self.merkle_tree_hook_sync.clone();
let cursor = contract_sync.rate_limited_cursor(index_settings).await;
tokio::spawn(async move { contract_sync.clone().sync("merkle_tree_hook", cursor).await })
.instrument(info_span!("MerkleTreeHookSyncer"))
}
async fn run_checkpoint_submitters(&self) -> Vec<Instrumented<JoinHandle<Result<()>>>> {
let submitter = ValidatorSubmitter::new(
self.interval,
self.reorg_period,
self.mailbox.clone(),
self.merkle_tree_hook.clone(),
self.signer.clone(),
self.checkpoint_syncer.clone(),
self.db.clone(),
ValidatorSubmitterMetrics::new(&self.core.metrics, &self.origin_chain),
);
let empty_tree = IncrementalMerkle::default();
let reorg_period = NonZeroU64::new(self.reorg_period);
let tip_tree = self
.mailbox
.merkle_tree_hook
.tree(reorg_period)
.await
.expect("failed to get mailbox tree");
assert!(tip_tree.count() > 0, "mailbox tree is empty");
.expect("failed to get merkle tree");
// This function is only called after we have already checked that the
// merkle tree hook has count > 0, but we assert to be extra sure this is
// the case.
assert!(tip_tree.count() > 0, "merkle tree is empty");
let backfill_target = submitter.checkpoint(&tip_tree);
let legacy_submitter = submitter.clone();
let backfill_submitter = submitter.clone();
let mut tasks = vec![];
tasks.push(
tokio::spawn(async move {
backfill_submitter
.checkpoint_submitter(empty_tree, Some(backfill_target))
.backfill_checkpoint_submitter(backfill_target)
.await
})
.instrument(info_span!("BackfillCheckpointSubmitter")),
);
tasks.push(
tokio::spawn(async move { submitter.checkpoint_submitter(tip_tree, None).await })
tokio::spawn(async move { submitter.checkpoint_submitter(tip_tree).await })
.instrument(info_span!("TipCheckpointSubmitter")),
);
tasks.push(
tokio::spawn(async move { legacy_submitter.legacy_checkpoint_submitter().await })
.instrument(info_span!("LegacyCheckpointSubmitter")),
);
tasks
}

@ -13,7 +13,7 @@ if [[ -z $TAG ]]; then
# compatible with our K8s infrastructure.
# More info: https://stackoverflow.com/a/71102144
if [[ $USE_DEFAULT_PLATFORM != "true" ]]; then
PLATFORM="--platform=linux/amd64"
PLATFORM="--platform=linux/amd64/v8"
fi
fi

@ -60,4 +60,4 @@
"stateMutability": "nonpayable",
"type": "function"
}
]
]

@ -8,6 +8,12 @@
"name": "messageId",
"type": "bytes32"
},
{
"indexed": true,
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"indexed": false,
"internalType": "uint256",

@ -83,12 +83,12 @@
},
{
"inputs": [],
"name": "count",
"name": "defaultHook",
"outputs": [
{
"internalType": "uint32",
"internalType": "contract IPostDispatchHook",
"name": "",
"type": "uint32"
"type": "address"
}
],
"stateMutability": "view",
@ -130,17 +130,22 @@
"inputs": [
{
"internalType": "uint32",
"name": "_destinationDomain",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "_recipientAddress",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "_messageBody",
"name": "body",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "defaultHookMetadata",
"type": "bytes"
}
],
@ -148,22 +153,98 @@
"outputs": [
{
"internalType": "bytes32",
"name": "",
"name": "messageId",
"type": "bytes32"
}
],
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "body",
"type": "bytes"
},
{
"internalType": "contract IPostDispatchHook",
"name": "customHook",
"type": "address"
},
{
"internalType": "bytes",
"name": "customHookMetadata",
"type": "bytes"
}
],
"name": "dispatch",
"outputs": [
{
"internalType": "bytes32",
"name": "messageId",
"type": "bytes32"
}
],
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
}
],
"name": "dispatch",
"outputs": [
{
"internalType": "bytes32",
"name": "messageId",
"type": "bytes32"
}
],
"stateMutability": "nonpayable",
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [],
"name": "latestCheckpoint",
"name": "latestDispatchedId",
"outputs": [
{
"internalType": "bytes32",
"name": "",
"type": "bytes32"
},
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "localDomain",
"outputs": [
{
"internalType": "uint32",
"name": "",
@ -175,7 +256,7 @@
},
{
"inputs": [],
"name": "localDomain",
"name": "nonce",
"outputs": [
{
"internalType": "uint32",
@ -190,25 +271,88 @@
"inputs": [
{
"internalType": "bytes",
"name": "_metadata",
"name": "metadata",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "_message",
"name": "message",
"type": "bytes"
}
],
"name": "process",
"outputs": [],
"stateMutability": "nonpayable",
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
}
],
"name": "quoteDispatch",
"outputs": [
{
"internalType": "uint256",
"name": "fee",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "defaultHookMetadata",
"type": "bytes"
}
],
"name": "quoteDispatch",
"outputs": [
{
"internalType": "uint256",
"name": "fee",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "_recipient",
"name": "recipient",
"type": "address"
}
],
@ -216,7 +360,7 @@
"outputs": [
{
"internalType": "contract IInterchainSecurityModule",
"name": "",
"name": "module",
"type": "address"
}
],
@ -225,12 +369,12 @@
},
{
"inputs": [],
"name": "root",
"name": "requiredHook",
"outputs": [
{
"internalType": "bytes32",
"internalType": "contract IPostDispatchHook",
"name": "",
"type": "bytes32"
"type": "address"
}
],
"stateMutability": "view",

@ -10,6 +10,19 @@
"stateMutability": "nonpayable",
"type": "constructor"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "hook",
"type": "address"
}
],
"name": "DefaultHookSet",
"type": "event"
},
{
"anonymous": false,
"inputs": [
@ -99,12 +112,6 @@
"name": "OwnershipTransferred",
"type": "event"
},
{
"anonymous": false,
"inputs": [],
"name": "Paused",
"type": "event"
},
{
"anonymous": false,
"inputs": [
@ -145,22 +152,16 @@
},
{
"anonymous": false,
"inputs": [],
"name": "Unpaused",
"type": "event"
},
{
"inputs": [],
"name": "MAX_MESSAGE_BODY_BYTES",
"outputs": [
"inputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
"indexed": true,
"internalType": "address",
"name": "hook",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
"name": "RequiredHookSet",
"type": "event"
},
{
"inputs": [],
@ -177,12 +178,12 @@
},
{
"inputs": [],
"name": "count",
"name": "defaultHook",
"outputs": [
{
"internalType": "uint32",
"internalType": "contract IPostDispatchHook",
"name": "",
"type": "uint32"
"type": "address"
}
],
"stateMutability": "view",
@ -205,7 +206,7 @@
"inputs": [
{
"internalType": "bytes32",
"name": "",
"name": "_id",
"type": "bytes32"
}
],
@ -220,6 +221,92 @@
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "deployedBlock",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "metadata",
"type": "bytes"
},
{
"internalType": "contract IPostDispatchHook",
"name": "hook",
"type": "address"
}
],
"name": "dispatch",
"outputs": [
{
"internalType": "bytes32",
"name": "",
"type": "bytes32"
}
],
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "hookMetadata",
"type": "bytes"
}
],
"name": "dispatch",
"outputs": [
{
"internalType": "bytes32",
"name": "",
"type": "bytes32"
}
],
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
@ -246,7 +333,7 @@
"type": "bytes32"
}
],
"stateMutability": "nonpayable",
"stateMutability": "payable",
"type": "function"
},
{
@ -260,6 +347,16 @@
"internalType": "address",
"name": "_defaultIsm",
"type": "address"
},
{
"internalType": "address",
"name": "_defaultHook",
"type": "address"
},
{
"internalType": "address",
"name": "_requiredHook",
"type": "address"
}
],
"name": "initialize",
@ -269,12 +366,12 @@
},
{
"inputs": [],
"name": "isPaused",
"name": "latestDispatchedId",
"outputs": [
{
"internalType": "bool",
"internalType": "bytes32",
"name": "",
"type": "bool"
"type": "bytes32"
}
],
"stateMutability": "view",
@ -282,13 +379,8 @@
},
{
"inputs": [],
"name": "latestCheckpoint",
"name": "localDomain",
"outputs": [
{
"internalType": "bytes32",
"name": "",
"type": "bytes32"
},
{
"internalType": "uint32",
"name": "",
@ -300,7 +392,7 @@
},
{
"inputs": [],
"name": "localDomain",
"name": "nonce",
"outputs": [
{
"internalType": "uint32",
@ -324,13 +416,6 @@
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "pause",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
@ -346,7 +431,147 @@
],
"name": "process",
"outputs": [],
"stateMutability": "nonpayable",
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "bytes32",
"name": "_id",
"type": "bytes32"
}
],
"name": "processedAt",
"outputs": [
{
"internalType": "uint48",
"name": "",
"type": "uint48"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "bytes32",
"name": "_id",
"type": "bytes32"
}
],
"name": "processor",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "metadata",
"type": "bytes"
},
{
"internalType": "contract IPostDispatchHook",
"name": "hook",
"type": "address"
}
],
"name": "quoteDispatch",
"outputs": [
{
"internalType": "uint256",
"name": "fee",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
}
],
"name": "quoteDispatch",
"outputs": [
{
"internalType": "uint256",
"name": "fee",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint32",
"name": "destinationDomain",
"type": "uint32"
},
{
"internalType": "bytes32",
"name": "recipientAddress",
"type": "bytes32"
},
{
"internalType": "bytes",
"name": "messageBody",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "defaultHookMetadata",
"type": "bytes"
}
],
"name": "quoteDispatch",
"outputs": [
{
"internalType": "uint256",
"name": "fee",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
@ -377,12 +602,12 @@
},
{
"inputs": [],
"name": "root",
"name": "requiredHook",
"outputs": [
{
"internalType": "bytes32",
"internalType": "contract IPostDispatchHook",
"name": "",
"type": "bytes32"
"type": "address"
}
],
"stateMutability": "view",
@ -392,11 +617,11 @@
"inputs": [
{
"internalType": "address",
"name": "_module",
"name": "_hook",
"type": "address"
}
],
"name": "setDefaultIsm",
"name": "setDefaultHook",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
@ -405,31 +630,37 @@
"inputs": [
{
"internalType": "address",
"name": "newOwner",
"name": "_module",
"type": "address"
}
],
"name": "transferOwnership",
"name": "setDefaultIsm",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [],
"name": "tree",
"outputs": [
"inputs": [
{
"internalType": "uint256",
"name": "count",
"type": "uint256"
"internalType": "address",
"name": "_hook",
"type": "address"
}
],
"stateMutability": "view",
"name": "setRequiredHook",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [],
"name": "unpause",
"inputs": [
{
"internalType": "address",
"name": "newOwner",
"type": "address"
}
],
"name": "transferOwnership",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"

@ -0,0 +1,156 @@
[
{
"inputs": [
{
"internalType": "address",
"name": "_mailbox",
"type": "address"
}
],
"stateMutability": "nonpayable",
"type": "constructor"
},
{
"anonymous": false,
"inputs": [
{
"indexed": false,
"internalType": "bytes32",
"name": "messageId",
"type": "bytes32"
},
{
"indexed": false,
"internalType": "uint32",
"name": "index",
"type": "uint32"
}
],
"name": "InsertedIntoTree",
"type": "event"
},
{
"inputs": [],
"name": "count",
"outputs": [
{
"internalType": "uint32",
"name": "",
"type": "uint32"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "deployedBlock",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "latestCheckpoint",
"outputs": [
{
"internalType": "bytes32",
"name": "",
"type": "bytes32"
},
{
"internalType": "uint32",
"name": "",
"type": "uint32"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "bytes",
"name": "",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "message",
"type": "bytes"
}
],
"name": "postDispatch",
"outputs": [],
"stateMutability": "payable",
"type": "function"
},
{
"inputs": [
{
"internalType": "bytes",
"name": "",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "",
"type": "bytes"
}
],
"name": "quoteDispatch",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "pure",
"type": "function"
},
{
"inputs": [],
"name": "root",
"outputs": [
{
"internalType": "bytes32",
"name": "",
"type": "bytes32"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "tree",
"outputs": [
{
"components": [
{
"internalType": "bytes32[32]",
"name": "branch",
"type": "bytes32[32]"
},
{
"internalType": "uint256",
"name": "count",
"type": "uint256"
}
],
"internalType": "struct MerkleLib.Tree",
"name": "",
"type": "tuple"
}
],
"stateMutability": "view",
"type": "function"
}
]

@ -1,5 +1,3 @@
use hyperlane_core::config::*;
use serde::Deserialize;
use url::Url;
/// Ethereum connection configuration
@ -26,96 +24,3 @@ pub enum ConnectionConf {
url: Url,
},
}
/// Ethereum connection configuration
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RawConnectionConf {
/// The type of connection to use
#[serde(rename = "type")]
connection_type: Option<String>,
/// A single url to connect to
url: Option<String>,
/// A comma separated list of urls to connect to
urls: Option<String>,
}
/// Error type when parsing a connection configuration.
#[derive(Debug, thiserror::Error)]
pub enum ConnectionConfError {
/// Unknown connection type was specified
#[error("Unsupported connection type '{0}'")]
UnsupportedConnectionType(String),
/// The url was not specified
#[error("Missing `url` for connection configuration")]
MissingConnectionUrl,
/// The urls were not specified
#[error("Missing `urls` for connection configuration")]
MissingConnectionUrls,
/// The could not be parsed
#[error("Invalid `url` for connection configuration: `{0}` ({1})")]
InvalidConnectionUrl(String, url::ParseError),
/// One of the urls could not be parsed
#[error("Invalid `urls` list for connection configuration: `{0}` ({1})")]
InvalidConnectionUrls(String, url::ParseError),
/// The url was empty
#[error("The `url` value is empty")]
EmptyUrl,
/// The urls were empty
#[error("The `urls` value is empty")]
EmptyUrls,
}
impl FromRawConf<RawConnectionConf> for ConnectionConf {
fn from_config_filtered(
raw: RawConnectionConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
use ConnectionConfError::*;
let connection_type = raw.connection_type.as_deref().unwrap_or("http");
let urls = (|| -> ConfigResult<Vec<Url>> {
raw.urls
.as_ref()
.ok_or(MissingConnectionUrls)
.into_config_result(|| cwp + "urls")?
.split(',')
.map(|s| s.parse())
.collect::<Result<Vec<_>, _>>()
.map_err(|e| InvalidConnectionUrls(raw.urls.clone().unwrap(), e))
.into_config_result(|| cwp + "urls")
})();
let url = (|| -> ConfigResult<Url> {
raw.url
.as_ref()
.ok_or(MissingConnectionUrl)
.into_config_result(|| cwp + "url")?
.parse()
.map_err(|e| InvalidConnectionUrl(raw.url.clone().unwrap(), e))
.into_config_result(|| cwp + "url")
})();
macro_rules! make_with_urls {
($variant:ident) => {
if let Ok(urls) = urls {
Ok(Self::$variant { urls })
} else if let Ok(url) = url {
Ok(Self::$variant { urls: vec![url] })
} else {
Err(urls.unwrap_err())
}
};
}
match connection_type {
"httpQuorum" => make_with_urls!(HttpQuorum),
"httpFallback" => make_with_urls!(HttpFallback),
"http" => Ok(Self::Http { url: url? }),
"ws" => Ok(Self::Ws { url: url? }),
t => Err(UnsupportedConnectionType(t.into())).into_config_result(|| cwp.join("type")),
}
}
}

@ -31,7 +31,7 @@ where
pub struct InterchainGasPaymasterIndexerBuilder {
pub mailbox_address: H160,
pub finality_blocks: u32,
pub reorg_period: u32,
}
#[async_trait]
@ -46,7 +46,7 @@ impl BuildableWithProvider for InterchainGasPaymasterIndexerBuilder {
Box::new(EthereumInterchainGasPaymasterIndexer::new(
Arc::new(provider),
locator,
self.finality_blocks,
self.reorg_period,
))
}
}
@ -59,7 +59,7 @@ where
{
contract: Arc<EthereumInterchainGasPaymasterInternal<M>>,
provider: Arc<M>,
finality_blocks: u32,
reorg_period: u32,
}
impl<M> EthereumInterchainGasPaymasterIndexer<M>
@ -67,14 +67,14 @@ where
M: Middleware + 'static,
{
/// Create new EthereumInterchainGasPaymasterIndexer
pub fn new(provider: Arc<M>, locator: &ContractLocator, finality_blocks: u32) -> Self {
pub fn new(provider: Arc<M>, locator: &ContractLocator, reorg_period: u32) -> Self {
Self {
contract: Arc::new(EthereumInterchainGasPaymasterInternal::new(
locator.address,
provider.clone(),
)),
provider,
finality_blocks,
reorg_period,
}
}
}
@ -103,6 +103,7 @@ where
(
InterchainGasPayment {
message_id: H256::from(log.message_id),
destination: log.destination_domain,
payment: log.payment.into(),
gas_amount: log.gas_amount.into(),
},
@ -120,7 +121,7 @@ where
.await
.map_err(ChainCommunicationError::from_other)?
.as_u32()
.saturating_sub(self.finality_blocks))
.saturating_sub(self.reorg_period))
}
}

@ -12,8 +12,8 @@ use ethers::prelude::{abi, Lazy, Middleware};
pub use self::{
aggregation_ism::*, ccip_read_ism::*, config::*, config::*, interchain_gas::*,
interchain_gas::*, interchain_security_module::*, interchain_security_module::*, mailbox::*,
mailbox::*, multisig_ism::*, provider::*, routing_ism::*, rpc_clients::*, signers::*,
singleton_signer::*, trait_builder::*, validator_announce::*,
mailbox::*, merkle_tree_hook::*, multisig_ism::*, provider::*, routing_ism::*, rpc_clients::*,
signers::*, singleton_signer::*, trait_builder::*, validator_announce::*,
};
#[cfg(not(doctest))]
@ -38,6 +38,10 @@ mod interchain_gas;
#[cfg(not(doctest))]
mod interchain_security_module;
/// Merkle tree hook abi
#[cfg(not(doctest))]
mod merkle_tree_hook;
/// MultisigIsm abi
#[cfg(not(doctest))]
mod multisig_ism;

@ -12,24 +12,19 @@ use ethers::prelude::Middleware;
use ethers_contract::builders::ContractCall;
use tracing::instrument;
use hyperlane_core::accumulator::incremental::IncrementalMerkle;
use hyperlane_core::accumulator::TREE_DEPTH;
use hyperlane_core::{
utils::fmt_bytes, ChainCommunicationError, ChainResult, Checkpoint, ContractLocator,
HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneMessage,
HyperlaneProtocolError, HyperlaneProvider, Indexer, LogMeta, Mailbox, RawHyperlaneMessage,
SequenceIndexer, TxCostEstimate, TxOutcome, H160, H256, U256,
utils::fmt_bytes, ChainCommunicationError, ChainResult, ContractLocator, HyperlaneAbi,
HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneMessage, HyperlaneProtocolError,
HyperlaneProvider, Indexer, LogMeta, Mailbox, RawHyperlaneMessage, SequenceIndexer,
TxCostEstimate, TxOutcome, H160, H256, U256,
};
use crate::contracts::arbitrum_node_interface::ArbitrumNodeInterface;
use crate::contracts::i_mailbox::{IMailbox as EthereumMailboxInternal, ProcessCall, IMAILBOX_ABI};
use crate::trait_builder::BuildableWithProvider;
use crate::tx::{fill_tx_gas_params, report_tx};
use crate::tx::{call_with_lag, fill_tx_gas_params, report_tx};
use crate::EthereumProvider;
/// derived from `forge inspect Mailbox storage --pretty`
const MERKLE_TREE_CONTRACT_SLOT: u32 = 152;
impl<M> std::fmt::Display for EthereumMailboxInternal<M>
where
M: Middleware,
@ -40,7 +35,7 @@ where
}
pub struct SequenceIndexerBuilder {
pub finality_blocks: u32,
pub reorg_period: u32,
}
#[async_trait]
@ -55,13 +50,13 @@ impl BuildableWithProvider for SequenceIndexerBuilder {
Box::new(EthereumMailboxIndexer::new(
Arc::new(provider),
locator,
self.finality_blocks,
self.reorg_period,
))
}
}
pub struct DeliveryIndexerBuilder {
pub finality_blocks: u32,
pub reorg_period: u32,
}
#[async_trait]
@ -76,7 +71,7 @@ impl BuildableWithProvider for DeliveryIndexerBuilder {
Box::new(EthereumMailboxIndexer::new(
Arc::new(provider),
locator,
self.finality_blocks,
self.reorg_period,
))
}
}
@ -89,7 +84,7 @@ where
{
contract: Arc<EthereumMailboxInternal<M>>,
provider: Arc<M>,
finality_blocks: u32,
reorg_period: u32,
}
impl<M> EthereumMailboxIndexer<M>
@ -97,7 +92,7 @@ where
M: Middleware + 'static,
{
/// Create new EthereumMailboxIndexer
pub fn new(provider: Arc<M>, locator: &ContractLocator, finality_blocks: u32) -> Self {
pub fn new(provider: Arc<M>, locator: &ContractLocator, reorg_period: u32) -> Self {
let contract = Arc::new(EthereumMailboxInternal::new(
locator.address,
provider.clone(),
@ -105,7 +100,7 @@ where
Self {
contract,
provider,
finality_blocks,
reorg_period,
}
}
@ -117,7 +112,7 @@ where
.await
.map_err(ChainCommunicationError::from_other)?
.as_u32()
.saturating_sub(self.finality_blocks))
.saturating_sub(self.reorg_period))
}
}
@ -159,9 +154,7 @@ where
#[instrument(err, skip(self))]
async fn sequence_and_tip(&self) -> ChainResult<(Option<u32>, u32)> {
let tip = Indexer::<HyperlaneMessage>::get_finalized_block_number(self).await?;
let base_call = self.contract.count();
let call_at_tip = base_call.block(u64::from(tip));
let sequence = call_at_tip.call().await?;
let sequence = self.contract.nonce().block(u64::from(tip)).call().await?;
Ok((Some(sequence), tip))
}
}
@ -307,20 +300,9 @@ where
{
#[instrument(skip(self))]
async fn count(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<u32> {
let base_call = self.contract.count();
let call_with_lag = if let Some(lag) = maybe_lag {
let tip = self
.provider
.get_block_number()
.await
.map_err(ChainCommunicationError::from_other)?
.as_u64();
base_call.block(tip.saturating_sub(lag.get()))
} else {
base_call
};
let count = call_with_lag.call().await?;
Ok(count)
let call = call_with_lag(self.contract.nonce(), &self.provider, maybe_lag).await?;
let nonce = call.call().await?;
Ok(nonce)
}
#[instrument(skip(self))]
@ -328,92 +310,6 @@ where
Ok(self.contract.delivered(id.into()).call().await?)
}
#[instrument(skip(self))]
async fn latest_checkpoint(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> {
let base_call = self.contract.latest_checkpoint();
let call_with_lag = match maybe_lag {
Some(lag) => {
let tip = self
.provider
.get_block_number()
.await
.map_err(ChainCommunicationError::from_other)?
.as_u64();
base_call.block(tip.saturating_sub(lag.get()))
}
None => base_call,
};
let (root, index) = call_with_lag.call().await?;
Ok(Checkpoint {
mailbox_address: self.address(),
mailbox_domain: self.domain.id(),
root: root.into(),
index,
})
}
#[instrument(skip(self))]
#[allow(clippy::needless_range_loop)]
async fn tree(&self, lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> {
let lag = lag.map(|v| v.get()).unwrap_or(0).into();
// use consistent block for all storage slot or view calls to prevent
// race conditions where tree contents change between calls
let fixed_block_number = self
.provider
.get_block_number()
.await
.map_err(ChainCommunicationError::from_other)?
.saturating_sub(lag)
.into();
let expected_root = self
.contract
.root()
.block(fixed_block_number)
.call()
.await?
.into();
// TODO: migrate to single contract view call once mailbox is upgraded
// see https://github.com/hyperlane-xyz/hyperlane-monorepo/issues/2250
// let branch = self.contract.branch().block(block_number).call().await;
let mut branch = [H256::zero(); TREE_DEPTH];
for index in 0..TREE_DEPTH {
let slot = U256::from(MERKLE_TREE_CONTRACT_SLOT) + index;
let mut location = [0u8; 32];
slot.to_big_endian(&mut location);
branch[index] = self
.provider
.get_storage_at(
self.contract.address(),
location.into(),
Some(fixed_block_number),
)
.await
.map(Into::into)
.map_err(ChainCommunicationError::from_other)?;
}
let count = self
.contract
.count()
.block(fixed_block_number)
.call()
.await? as usize;
let tree = IncrementalMerkle::new(branch, count);
// validate tree built from storage slot lookups matches expected
// result from root() view call at consistent block
assert_eq!(tree.root(), expected_root);
Ok(tree)
}
#[instrument(skip(self))]
async fn default_ism(&self) -> ChainResult<H256> {
Ok(self.contract.default_ism().call().await?.into())

@ -0,0 +1,249 @@
#![allow(missing_docs)]
use std::num::NonZeroU64;
use std::ops::RangeInclusive;
use std::sync::Arc;
use async_trait::async_trait;
use ethers::prelude::Middleware;
use hyperlane_core::accumulator::incremental::IncrementalMerkle;
use tracing::instrument;
use hyperlane_core::{
ChainCommunicationError, ChainResult, Checkpoint, ContractLocator, HyperlaneChain,
HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexer, LogMeta, MerkleTreeHook,
MerkleTreeInsertion, SequenceIndexer, H256,
};
use crate::contracts::merkle_tree_hook::{MerkleTreeHook as MerkleTreeHookContract, Tree};
use crate::trait_builder::BuildableWithProvider;
use crate::tx::call_with_lag;
use crate::EthereumProvider;
// We don't need the reverse of this impl, so it's ok to disable the clippy lint
#[allow(clippy::from_over_into)]
impl Into<IncrementalMerkle> for Tree {
fn into(self) -> IncrementalMerkle {
let branch = self
.branch
.iter()
.map(|v| v.into())
.collect::<Vec<_>>()
// we're iterating over a fixed-size array and want to collect into a
// fixed-size array of the same size (32), so this is safe
.try_into()
.unwrap();
IncrementalMerkle::new(branch, self.count.as_usize())
}
}
pub struct MerkleTreeHookBuilder {}
#[async_trait]
impl BuildableWithProvider for MerkleTreeHookBuilder {
type Output = Box<dyn MerkleTreeHook>;
async fn build_with_provider<M: Middleware + 'static>(
&self,
provider: M,
locator: &ContractLocator,
) -> Self::Output {
Box::new(EthereumMerkleTreeHook::new(Arc::new(provider), locator))
}
}
pub struct MerkleTreeHookIndexerBuilder {
pub reorg_period: u32,
}
#[async_trait]
impl BuildableWithProvider for MerkleTreeHookIndexerBuilder {
type Output = Box<dyn SequenceIndexer<MerkleTreeInsertion>>;
async fn build_with_provider<M: Middleware + 'static>(
&self,
provider: M,
locator: &ContractLocator,
) -> Self::Output {
Box::new(EthereumMerkleTreeHookIndexer::new(
Arc::new(provider),
locator,
self.reorg_period,
))
}
}
#[derive(Debug)]
/// Struct that retrieves event data for an Ethereum MerkleTreeHook
pub struct EthereumMerkleTreeHookIndexer<M>
where
M: Middleware,
{
contract: Arc<MerkleTreeHookContract<M>>,
provider: Arc<M>,
reorg_period: u32,
}
impl<M> EthereumMerkleTreeHookIndexer<M>
where
M: Middleware + 'static,
{
/// Create new EthereumMerkleTreeHookIndexer
pub fn new(provider: Arc<M>, locator: &ContractLocator, reorg_period: u32) -> Self {
Self {
contract: Arc::new(MerkleTreeHookContract::new(
locator.address,
provider.clone(),
)),
provider,
reorg_period,
}
}
}
#[async_trait]
impl<M> Indexer<MerkleTreeInsertion> for EthereumMerkleTreeHookIndexer<M>
where
M: Middleware + 'static,
{
#[instrument(err, skip(self))]
async fn fetch_logs(
&self,
range: RangeInclusive<u32>,
) -> ChainResult<Vec<(MerkleTreeInsertion, LogMeta)>> {
let events = self
.contract
.inserted_into_tree_filter()
.from_block(*range.start())
.to_block(*range.end())
.query_with_meta()
.await?;
let logs = events
.into_iter()
.map(|(log, log_meta)| {
(
MerkleTreeInsertion::new(log.index, H256::from(log.message_id)),
log_meta.into(),
)
})
.collect();
Ok(logs)
}
#[instrument(level = "debug", err, ret, skip(self))]
async fn get_finalized_block_number(&self) -> ChainResult<u32> {
Ok(self
.provider
.get_block_number()
.await
.map_err(ChainCommunicationError::from_other)?
.as_u32()
.saturating_sub(self.reorg_period))
}
}
#[async_trait]
impl<M> SequenceIndexer<MerkleTreeInsertion> for EthereumMerkleTreeHookIndexer<M>
where
M: Middleware + 'static,
{
async fn sequence_and_tip(&self) -> ChainResult<(Option<u32>, u32)> {
// The InterchainGasPaymasterIndexerBuilder must return a `SequenceIndexer` type.
// It's fine if only a blanket implementation is provided for EVM chains, since their
// indexing only uses the `Index` trait, which is a supertrait of `SequenceIndexer`.
// TODO: if `SequenceIndexer` turns out to not depend on `Indexer` at all, then the supertrait
// dependency could be removed, even if the builder would still need to return a type that is both
// ``SequenceIndexer` and `Indexer`.
let tip = self.get_finalized_block_number().await?;
Ok((None, tip))
}
}
/// A reference to a Mailbox contract on some Ethereum chain
#[derive(Debug)]
pub struct EthereumMerkleTreeHook<M>
where
M: Middleware,
{
contract: Arc<MerkleTreeHookContract<M>>,
domain: HyperlaneDomain,
provider: Arc<M>,
}
impl<M> EthereumMerkleTreeHook<M>
where
M: Middleware,
{
/// Create a reference to a mailbox at a specific Ethereum address on some
/// chain
pub fn new(provider: Arc<M>, locator: &ContractLocator) -> Self {
Self {
contract: Arc::new(MerkleTreeHookContract::new(
locator.address,
provider.clone(),
)),
domain: locator.domain.clone(),
provider,
}
}
}
impl<M> HyperlaneChain for EthereumMerkleTreeHook<M>
where
M: Middleware + 'static,
{
fn domain(&self) -> &HyperlaneDomain {
&self.domain
}
fn provider(&self) -> Box<dyn HyperlaneProvider> {
Box::new(EthereumProvider::new(
self.provider.clone(),
self.domain.clone(),
))
}
}
impl<M> HyperlaneContract for EthereumMerkleTreeHook<M>
where
M: Middleware + 'static,
{
fn address(&self) -> H256 {
self.contract.address().into()
}
}
#[async_trait]
impl<M> MerkleTreeHook for EthereumMerkleTreeHook<M>
where
M: Middleware + 'static,
{
#[instrument(skip(self))]
async fn latest_checkpoint(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> {
let call =
call_with_lag(self.contract.latest_checkpoint(), &self.provider, maybe_lag).await?;
let (root, index) = call.call().await?;
Ok(Checkpoint {
merkle_tree_hook_address: self.address(),
mailbox_domain: self.domain.id(),
root: root.into(),
index,
})
}
#[instrument(skip(self))]
#[allow(clippy::needless_range_loop)]
async fn tree(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> {
let call = call_with_lag(self.contract.tree(), &self.provider, maybe_lag).await?;
Ok(call.call().await?.into())
}
#[instrument(skip(self))]
async fn count(&self, maybe_lag: Option<NonZeroU64>) -> ChainResult<u32> {
let call = call_with_lag(self.contract.count(), &self.provider, maybe_lag).await?;
let count = call.call().await?;
Ok(count)
}
}

@ -116,7 +116,9 @@ impl From<std::convert::Infallible> for SignersError {
#[cfg(test)]
mod test {
use hyperlane_core::{Checkpoint, HyperlaneSigner, HyperlaneSignerExt, H256};
use hyperlane_core::{
Checkpoint, CheckpointWithMessageId, HyperlaneSigner, HyperlaneSignerExt, H256,
};
use crate::signers::Signers;
@ -128,11 +130,14 @@ mod test {
.parse::<ethers::signers::LocalWallet>()
.unwrap()
.into();
let message = Checkpoint {
mailbox_address: H256::repeat_byte(2),
mailbox_domain: 5,
root: H256::repeat_byte(1),
index: 123,
let message = CheckpointWithMessageId {
checkpoint: Checkpoint {
merkle_tree_hook_address: H256::repeat_byte(2),
mailbox_domain: 5,
root: H256::repeat_byte(1),
index: 123,
},
message_id: H256::repeat_byte(3),
};
let signed = signer.sign(message).await.expect("!sign");

@ -1,4 +1,6 @@
use std::{sync::Arc, time::Duration};
use std::num::NonZeroU64;
use std::sync::Arc;
use std::time::Duration;
use ethers::{
abi::Detokenize,
@ -6,6 +8,7 @@ use ethers::{
types::Eip1559TransactionRequest,
};
use ethers_contract::builders::ContractCall;
use ethers_core::types::BlockNumber;
use hyperlane_core::{
utils::fmt_bytes, ChainCommunicationError, ChainResult, KnownHyperlaneDomain, H256, U256,
};
@ -118,3 +121,25 @@ where
eip_1559_tx.tx = ethers::types::transaction::eip2718::TypedTransaction::Eip1559(request);
Ok(eip_1559_tx.gas(gas_limit))
}
pub(crate) async fn call_with_lag<M, T>(
call: ethers::contract::builders::ContractCall<M, T>,
provider: &M,
maybe_lag: Option<NonZeroU64>,
) -> ChainResult<ethers::contract::builders::ContractCall<M, T>>
where
M: Middleware + 'static,
T: Detokenize,
{
if let Some(lag) = maybe_lag {
let fixed_block_number: BlockNumber = provider
.get_block_number()
.await
.map_err(ChainCommunicationError::from_other)?
.saturating_sub(lag.get().into())
.into();
Ok(call.block(fixed_block_number))
} else {
Ok(call)
}
}

@ -7,7 +7,6 @@ use std::{fs::OpenOptions, io::Write, str::FromStr};
use hex::FromHex;
use serde_json::{json, Value};
use ethers::signers::Signer;
use hyperlane_core::{
accumulator::{
merkle::{merkle_root_from_branch, MerkleTree},
@ -17,7 +16,6 @@ use hyperlane_core::{
utils::domain_hash,
Checkpoint, HyperlaneMessage, HyperlaneSignerExt, H160, H256,
};
use hyperlane_ethereum::Signers;
/// Output proof to /vector/message.json
#[test]
@ -121,57 +119,3 @@ pub fn output_domain_hashes() {
file.write_all(json.as_bytes())
.expect("Failed to write to file");
}
/// Outputs signed checkpoint test cases in /vector/signedCheckpoint.json
#[test]
pub fn output_signed_checkpoints() {
let mailbox = H256::from(H160::from_str("0x2222222222222222222222222222222222222222").unwrap());
let t = async {
let signer: Signers = "1111111111111111111111111111111111111111111111111111111111111111"
.parse::<ethers::signers::LocalWallet>()
.unwrap()
.into();
let mut test_cases: Vec<Value> = Vec::new();
// test suite
for i in 1..=3 {
let signed_checkpoint = signer
.sign(Checkpoint {
mailbox_address: mailbox,
mailbox_domain: 1000,
root: H256::repeat_byte(i + 1),
index: i as u32,
})
.await
.expect("!sign_with");
test_cases.push(json!({
"mailbox": signed_checkpoint.value.mailbox_address,
"domain": signed_checkpoint.value.mailbox_domain,
"root": signed_checkpoint.value.root,
"index": signed_checkpoint.value.index,
"signature": signed_checkpoint.signature,
"signer": signer.address(),
}))
}
let json = json!(test_cases).to_string();
let mut file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(test_utils::find_vector("signedCheckpoint.json"))
.expect("Failed to open/create file");
file.write_all(json.as_bytes())
.expect("Failed to write to file");
};
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
.block_on(t)
}

@ -5,13 +5,12 @@ use std::ops::RangeInclusive;
use async_trait::async_trait;
use fuels::prelude::{Bech32ContractId, WalletUnlocked};
use hyperlane_core::accumulator::incremental::IncrementalMerkle;
use tracing::instrument;
use hyperlane_core::{
utils::fmt_bytes, ChainCommunicationError, ChainResult, Checkpoint, ContractLocator,
HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneMessage,
HyperlaneProvider, Indexer, LogMeta, Mailbox, TxCostEstimate, TxOutcome, H256, U256,
utils::fmt_bytes, ChainCommunicationError, ChainResult, ContractLocator, HyperlaneAbi,
HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneMessage, HyperlaneProvider,
Indexer, LogMeta, Mailbox, TxCostEstimate, TxOutcome, H256, U256,
};
use crate::{
@ -81,39 +80,11 @@ impl Mailbox for FuelMailbox {
.map_err(ChainCommunicationError::from_other)
}
#[instrument(level = "debug", err, ret, skip(self))]
async fn tree(&self, lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> {
todo!()
}
#[instrument(level = "debug", err, ret, skip(self))]
async fn delivered(&self, id: H256) -> ChainResult<bool> {
todo!()
}
#[instrument(level = "debug", err, ret, skip(self))]
async fn latest_checkpoint(&self, lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> {
assert!(
lag.is_none(),
"Fuel does not support querying point-in-time"
);
let (root, index) = self
.contract
.methods()
.latest_checkpoint()
.simulate()
.await
.map_err(ChainCommunicationError::from_other)?
.value;
Ok(Checkpoint {
mailbox_address: self.address(),
mailbox_domain: self.domain.id(),
root: root.into_h256(),
index,
})
}
#[instrument(err, ret, skip(self))]
async fn default_ism(&self) -> ChainResult<H256> {
todo!()

@ -1,5 +1,5 @@
use fuels::{client::FuelClient, prelude::Provider};
use hyperlane_core::{config::*, ChainCommunicationError, ChainResult};
use hyperlane_core::{ChainCommunicationError, ChainResult};
use url::Url;
/// Fuel connection configuration
@ -9,12 +9,6 @@ pub struct ConnectionConf {
pub url: Url,
}
/// Raw fuel connection configuration used for better deserialization errors.
#[derive(Debug, serde::Deserialize)]
pub struct DeprecatedRawConnectionConf {
url: Option<String>,
}
/// An error type when parsing a connection configuration.
#[derive(thiserror::Error, Debug)]
pub enum ConnectionConfError {
@ -26,27 +20,6 @@ pub enum ConnectionConfError {
InvalidConnectionUrl(String, url::ParseError),
}
impl FromRawConf<DeprecatedRawConnectionConf> for ConnectionConf {
fn from_config_filtered(
raw: DeprecatedRawConnectionConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
use ConnectionConfError::*;
match raw {
DeprecatedRawConnectionConf { url: Some(url) } => Ok(Self {
url: url
.parse()
.map_err(|e| InvalidConnectionUrl(url, e))
.into_config_result(|| cwp.join("url"))?,
}),
DeprecatedRawConnectionConf { url: None } => {
Err(MissingConnectionUrl).into_config_result(|| cwp.join("url"))
}
}
}
}
#[derive(thiserror::Error, Debug)]
#[error(transparent)]
struct FuelNewConnectionError(#[from] anyhow::Error);

@ -218,6 +218,7 @@ impl SealevelInterchainGasPaymasterIndexer {
let igp_payment = InterchainGasPayment {
message_id: gas_payment_account.message_id,
destination: gas_payment_account.destination_domain,
payment: gas_payment_account.payment.into(),
gas_amount: gas_payment_account.gas_amount.into(),
};

@ -9,6 +9,7 @@ pub(crate) use client::RpcClientWithDebug;
pub use interchain_gas::*;
pub use interchain_security_module::*;
pub use mailbox::*;
pub use merkle_tree_hook::*;
pub use provider::*;
pub use solana_sdk::signer::keypair::Keypair;
pub use trait_builder::*;
@ -17,6 +18,7 @@ pub use validator_announce::*;
mod interchain_gas;
mod interchain_security_module;
mod mailbox;
mod merkle_tree_hook;
mod multisig_ism;
mod provider;
mod trait_builder;

@ -11,7 +11,7 @@ use hyperlane_core::{
accumulator::incremental::IncrementalMerkle, ChainCommunicationError, ChainResult, Checkpoint,
ContractLocator, Decode as _, Encode as _, HyperlaneAbi, HyperlaneChain, HyperlaneContract,
HyperlaneDomain, HyperlaneMessage, HyperlaneProvider, Indexer, LogMeta, Mailbox,
SequenceIndexer, TxCostEstimate, TxOutcome, H256, H512, U256,
MerkleTreeHook, SequenceIndexer, TxCostEstimate, TxOutcome, H256, H512, U256,
};
use hyperlane_sealevel_interchain_security_module_interface::{
InterchainSecurityModuleInstruction, VerifyInstruction,
@ -66,11 +66,11 @@ const PROCESS_COMPUTE_UNITS: u32 = 1_400_000;
/// A reference to a Mailbox contract on some Sealevel chain
pub struct SealevelMailbox {
program_id: Pubkey,
pub(crate) program_id: Pubkey,
inbox: (Pubkey, u8),
outbox: (Pubkey, u8),
rpc_client: RpcClient,
domain: HyperlaneDomain,
pub(crate) outbox: (Pubkey, u8),
pub(crate) rpc_client: RpcClient,
pub(crate) domain: HyperlaneDomain,
payer: Option<Keypair>,
}
@ -283,11 +283,7 @@ impl std::fmt::Debug for SealevelMailbox {
impl Mailbox for SealevelMailbox {
#[instrument(err, ret, skip(self))]
async fn count(&self, _maybe_lag: Option<NonZeroU64>) -> ChainResult<u32> {
let tree = self.tree(_maybe_lag).await?;
tree.count()
.try_into()
.map_err(ChainCommunicationError::from_other)
<Self as MerkleTreeHook>::count(self, _maybe_lag).await
}
#[instrument(err, ret, skip(self))]
@ -310,57 +306,6 @@ impl Mailbox for SealevelMailbox {
Ok(account.value.is_some())
}
#[instrument(err, ret, skip(self))]
async fn tree(&self, lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> {
assert!(
lag.is_none(),
"Sealevel does not support querying point-in-time"
);
let outbox_account = self
.rpc_client
.get_account_with_commitment(&self.outbox.0, CommitmentConfig::finalized())
.await
.map_err(ChainCommunicationError::from_other)?
.value
.ok_or_else(|| {
ChainCommunicationError::from_other_str("Could not find account data")
})?;
let outbox = OutboxAccount::fetch(&mut outbox_account.data.as_ref())
.map_err(ChainCommunicationError::from_other)?
.into_inner();
Ok(outbox.tree)
}
#[instrument(err, ret, skip(self))]
async fn latest_checkpoint(&self, lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> {
assert!(
lag.is_none(),
"Sealevel does not support querying point-in-time"
);
let tree = self.tree(lag).await?;
let root = tree.root();
let count: u32 = tree
.count()
.try_into()
.map_err(ChainCommunicationError::from_other)?;
let index = count.checked_sub(1).ok_or_else(|| {
ChainCommunicationError::from_contract_error_str(
"Outbox is empty, cannot compute checkpoint",
)
})?;
let checkpoint = Checkpoint {
mailbox_address: self.program_id.to_bytes().into(),
mailbox_domain: self.domain.id(),
root,
index,
};
Ok(checkpoint)
}
#[instrument(err, ret, skip(self))]
async fn default_ism(&self) -> ChainResult<H256> {
let inbox_account = self
@ -690,7 +635,7 @@ impl SequenceIndexer<HyperlaneMessage> for SealevelMailboxIndexer {
async fn sequence_and_tip(&self) -> ChainResult<(Option<u32>, u32)> {
let tip = Indexer::<HyperlaneMessage>::get_finalized_block_number(self as _).await?;
// TODO: need to make sure the call and tip are at the same height?
let count = self.mailbox.count(None).await?;
let count = Mailbox::count(&self.mailbox, None).await?;
Ok((Some(count), tip))
}
}

@ -0,0 +1,101 @@
use std::{num::NonZeroU64, ops::RangeInclusive};
use async_trait::async_trait;
use derive_new::new;
use hyperlane_core::{
accumulator::incremental::IncrementalMerkle, ChainCommunicationError, ChainResult, Checkpoint,
Indexer, LogMeta, MerkleTreeHook, MerkleTreeInsertion, SequenceIndexer,
};
use hyperlane_sealevel_mailbox::accounts::OutboxAccount;
use solana_sdk::commitment_config::CommitmentConfig;
use tracing::instrument;
use crate::SealevelMailbox;
#[async_trait]
impl MerkleTreeHook for SealevelMailbox {
#[instrument(err, ret, skip(self))]
async fn tree(&self, lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> {
assert!(
lag.is_none(),
"Sealevel does not support querying point-in-time"
);
let outbox_account = self
.rpc_client
.get_account_with_commitment(&self.outbox.0, CommitmentConfig::finalized())
.await
.map_err(ChainCommunicationError::from_other)?
.value
.ok_or_else(|| {
ChainCommunicationError::from_other_str("Could not find account data")
})?;
let outbox = OutboxAccount::fetch(&mut outbox_account.data.as_ref())
.map_err(ChainCommunicationError::from_other)?
.into_inner();
Ok(outbox.tree)
}
#[instrument(err, ret, skip(self))]
async fn latest_checkpoint(&self, lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> {
assert!(
lag.is_none(),
"Sealevel does not support querying point-in-time"
);
let tree = self.tree(lag).await?;
let root = tree.root();
let count: u32 = tree
.count()
.try_into()
.map_err(ChainCommunicationError::from_other)?;
let index = count.checked_sub(1).ok_or_else(|| {
ChainCommunicationError::from_contract_error_str(
"Outbox is empty, cannot compute checkpoint",
)
})?;
let checkpoint = Checkpoint {
merkle_tree_hook_address: self.program_id.to_bytes().into(),
mailbox_domain: self.domain.id(),
root,
index,
};
Ok(checkpoint)
}
#[instrument(err, ret, skip(self))]
async fn count(&self, _maybe_lag: Option<NonZeroU64>) -> ChainResult<u32> {
let tree = self.tree(_maybe_lag).await?;
tree.count()
.try_into()
.map_err(ChainCommunicationError::from_other)
}
}
/// Struct that retrieves event data for a Sealevel merkle tree hook contract
#[derive(Debug, new)]
pub struct SealevelMerkleTreeHookIndexer {}
#[async_trait]
impl Indexer<MerkleTreeInsertion> for SealevelMerkleTreeHookIndexer {
async fn fetch_logs(
&self,
_range: RangeInclusive<u32>,
) -> ChainResult<Vec<(MerkleTreeInsertion, LogMeta)>> {
Ok(vec![])
}
async fn get_finalized_block_number(&self) -> ChainResult<u32> {
Ok(0)
}
}
#[async_trait]
impl SequenceIndexer<MerkleTreeInsertion> for SealevelMerkleTreeHookIndexer {
async fn sequence_and_tip(&self) -> ChainResult<(Option<u32>, u32)> {
Ok((None, 0))
}
}

@ -1,7 +1,4 @@
use hyperlane_core::{
config::{ConfigErrResultExt, ConfigPath, ConfigResult, FromRawConf},
ChainCommunicationError,
};
use hyperlane_core::ChainCommunicationError;
use url::Url;
/// Sealevel connection configuration
@ -11,12 +8,6 @@ pub struct ConnectionConf {
pub url: Url,
}
/// Raw Sealevel connection configuration used for better deserialization errors.
#[derive(Debug, serde::Deserialize)]
pub struct DeprecatedRawConnectionConf {
url: Option<String>,
}
/// An error type when parsing a connection configuration.
#[derive(thiserror::Error, Debug)]
pub enum ConnectionConfError {
@ -28,27 +19,6 @@ pub enum ConnectionConfError {
InvalidConnectionUrl(String, url::ParseError),
}
impl FromRawConf<DeprecatedRawConnectionConf> for ConnectionConf {
fn from_config_filtered(
raw: DeprecatedRawConnectionConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
use ConnectionConfError::*;
match raw {
DeprecatedRawConnectionConf { url: Some(url) } => Ok(Self {
url: url
.parse()
.map_err(|e| InvalidConnectionUrl(url, e))
.into_config_result(|| cwp.join("url"))?,
}),
DeprecatedRawConnectionConf { url: None } => {
Err(MissingConnectionUrl).into_config_result(|| cwp.join("url"))
}
}
}
}
#[derive(thiserror::Error, Debug)]
#[error(transparent)]
struct SealevelNewConnectionError(#[from] anyhow::Error);

@ -0,0 +1,633 @@
{
"chains": {
"arbitrum": {
"chainId": 42161,
"domainId": 42161,
"name": "arbitrum",
"protocol": "ethereum",
"displayName": "Arbitrum",
"nativeToken": {
"name": "Ether",
"symbol": "ETH",
"decimals": 18
},
"rpcUrls": [
{
"http": "https://arb1.arbitrum.io/rpc"
}
],
"blockExplorers": [
{
"name": "Arbiscan",
"url": "https://arbiscan.io",
"apiUrl": "https://api.arbiscan.io/api",
"family": "etherscan"
}
],
"blocks": {
"confirmations": 1,
"reorgPeriod": 0,
"estimateBlockTime": 3
},
"gasCurrencyCoinGeckoId": "ethereum",
"gnosisSafeTransactionServiceUrl": "https://safe-transaction-arbitrum.safe.global/",
"storageGasOracle": "0xD3805207b65d99C075ceA938Fa7c0587026a5DF5",
"proxyAdmin": "0x80Cebd56A65e46c474a1A101e89E76C4c51D179c",
"merkleRootMultisigIsmFactory": "0x3C330D4A2e2b8443AFaB8E326E64ab4251B7Eae0",
"messageIdMultisigIsmFactory": "0x12Df53079d399a47e9E730df095b712B0FDFA791",
"aggregationIsmFactory": "0xD4883084389fC1Eeb4dAfB2ADcFc36B711c310EB",
"aggregationHookFactory": "0x9B5f440bBb64Fee337F37e03362b628711Ea09C7",
"routingIsmFactory": "0xC020F8A7b00178dFA0fcC75C159e14b79F8e5c63",
"merkleTreeHook": "0x748040afB89B8FdBb992799808215419d36A0930",
"interchainGasPaymaster": "0x3b6044acd6767f017e99318AA6Ef93b7B06A5a22",
"aggregationHook": "0xe0cb37cFc47296f1c4eD77EFf92Aed478644d10c",
"protocolFee": "0xD0199067DACb8526e7dc524a9a7DCBb57Cd25421",
"mailbox": "0x979Ca5202784112f4738403dBec5D0F3B9daabB9",
"validatorAnnounce": "0x1df063280C4166AF9a725e3828b4dAC6c7113B08",
"index": {
"from": 145551152
}
},
"avalanche": {
"chainId": 43114,
"domainId": 43114,
"name": "avalanche",
"protocol": "ethereum",
"displayName": "Avalanche",
"nativeToken": {
"decimals": 18,
"name": "Avalanche",
"symbol": "AVAX"
},
"rpcUrls": [
{
"http": "https://api.avax.network/ext/bc/C/rpc",
"pagination": {
"maxBlockRange": 100000,
"minBlockNumber": 6765067
}
}
],
"blockExplorers": [
{
"name": "SnowTrace",
"url": "https://snowtrace.io",
"apiUrl": "https://api.snowtrace.io/api",
"family": "other"
}
],
"blocks": {
"confirmations": 3,
"reorgPeriod": 3,
"estimateBlockTime": 2
},
"gasCurrencyCoinGeckoId": "avalanche-2",
"gnosisSafeTransactionServiceUrl": "https://safe-transaction-avalanche.safe.global/",
"storageGasOracle": "0x175821F30AdCAA4bbB72Ce98eF76C2E0De2C3f21",
"proxyAdmin": "0xd7CF8c05fd81b8cA7CfF8E6C49B08a9D63265c9B",
"merkleRootMultisigIsmFactory": "0x896cF1D1B66cD211633eDd589fF158E8Cfaf9B54",
"messageIdMultisigIsmFactory": "0x8819D653DF5b1FC0DdB32189a2704E471AF8483c",
"aggregationIsmFactory": "0xa5E13796eB7d2EDCc88012c8cfF90D69B51FcF9f",
"aggregationHookFactory": "0x3bF6Ac986C7Af9A9Ac356C0e99C0041EFd8D96e7",
"routingIsmFactory": "0xA9Ddc70f50009aF8bDB312aA757B4304b0F7BbB3",
"merkleTreeHook": "0x84eea61D679F42D92145fA052C89900CBAccE95A",
"interchainGasPaymaster": "0x95519ba800BBd0d34eeAE026fEc620AD978176C0",
"aggregationHook": "0x0165a22BA489F7DA37DAf6397781777D9FCB5708",
"protocolFee": "0xEc4AdA26E51f2685279F37C8aE62BeAd8212D597",
"mailbox": "0xFf06aFcaABaDDd1fb08371f9ccA15D73D51FeBD6",
"validatorAnnounce": "0x9Cad0eC82328CEE2386Ec14a12E81d070a27712f",
"index": {
"from": 37133307
}
},
"base": {
"chainId": 8453,
"domainId": 8453,
"name": "base",
"protocol": "ethereum",
"displayName": "Base",
"nativeToken": {
"name": "Ether",
"symbol": "ETH",
"decimals": 18
},
"rpcUrls": [
{
"http": "https://base.publicnode.com/"
},
{
"http": "https://mainnet.base.org"
},
{
"http": "https://base.blockpi.network/v1/rpc/public"
}
],
"blockExplorers": [
{
"name": "BaseScan",
"url": "https://basescan.org",
"apiUrl": "https://api.basescan.org/api",
"family": "etherscan"
}
],
"blocks": {
"confirmations": 1,
"reorgPeriod": 1,
"estimateBlockTime": 2
},
"gnosisSafeTransactionServiceUrl": "https://safe-transaction-base.safe.global/",
"merkleRootMultisigIsmFactory": "0x8b83fefd896fAa52057798f6426E9f0B080FCCcE",
"messageIdMultisigIsmFactory": "0x8F7454AC98228f3504Bb91eA3D8Adafe6406110A",
"aggregationIsmFactory": "0xEb9FcFDC9EfDC17c1EC5E1dc085B98485da213D6",
"aggregationHookFactory": "0x1052eF3419f26Bec74Ed7CEf4a4FA6812Bc09908",
"routingIsmFactory": "0x0761b0827849abbf7b0cC09CE14e1C93D87f5004",
"proxyAdmin": "0x4Ed7d626f1E96cD1C0401607Bf70D95243E3dEd1",
"mailbox": "0xeA87ae93Fa0019a82A727bfd3eBd1cFCa8f64f1D",
"merkleTreeHook": "0x19dc38aeae620380430C200a6E990D5Af5480117",
"storageGasOracle": "0xBF12ef4B9f307463D3FB59c3604F294dDCe287E2",
"interchainGasPaymaster": "0xc3F23848Ed2e04C0c6d41bd7804fa8f89F940B94",
"aggregationHook": "0x13f3d4B0Ee0a713430fded9E18f7fb6c91A6E41F",
"protocolFee": "0x99ca8c74cE7Cfa9d72A51fbb05F9821f5f826b3a",
"validatorAnnounce": "0x182E8d7c5F1B06201b102123FC7dF0EaeB445a7B",
"index": {
"from": 5959667
}
},
"bsc": {
"chainId": 56,
"domainId": 56,
"name": "bsc",
"protocol": "ethereum",
"displayName": "Binance Smart Chain",
"displayNameShort": "Binance",
"nativeToken": {
"decimals": 18,
"name": "BNB",
"symbol": "BNB"
},
"rpcUrls": [
{
"http": "https://bsc-dataseed.binance.org"
},
{
"http": "https://rpc.ankr.com/bsc"
}
],
"blockExplorers": [
{
"name": "BscScan",
"url": "https://bscscan.com",
"apiUrl": "https://api.bscscan.com/api",
"family": "etherscan"
}
],
"blocks": {
"confirmations": 1,
"reorgPeriod": 15,
"estimateBlockTime": 3
},
"gasCurrencyCoinGeckoId": "binancecoin",
"gnosisSafeTransactionServiceUrl": "https://safe-transaction-bsc.safe.global/",
"transactionOverrides": {
"gasPrice": 7000000000
},
"storageGasOracle": "0x91d23D603d60445411C06e6443d81395593B7940",
"proxyAdmin": "0x65993Af9D0D3a64ec77590db7ba362D6eB78eF70",
"merkleRootMultisigIsmFactory": "0xfADBc81Ca8A957F1Bf7c78bCc575b28DBDE042b6",
"messageIdMultisigIsmFactory": "0x4B1d8352E35e3BDE36dF5ED2e73C24E35c4a96b7",
"aggregationIsmFactory": "0x38B3878c4fb44d201DA924c4a04bae3EE728c065",
"aggregationHookFactory": "0xe70E86a7D1e001D419D71F960Cb6CaD59b6A3dB6",
"routingIsmFactory": "0xc40481D13419BC8090e6AD07074Ef39E538c09CE",
"mailbox": "0x2971b9Aec44bE4eb673DF1B88cDB57b96eefe8a4",
"merkleTreeHook": "0xFDb9Cd5f9daAA2E4474019405A328a88E7484f26",
"interchainGasPaymaster": "0x78E25e7f84416e69b9339B0A6336EB6EFfF6b451",
"aggregationHook": "0x402Fc106576462a892355d69ACF03D46A888ae88",
"protocolFee": "0xA8Aa5f14a5463a78E45CC068F11c867949F3E367",
"validatorAnnounce": "0x7024078130D9c2100fEA474DAD009C2d1703aCcd",
"index": {
"from": 33068482
}
},
"celo": {
"chainId": 42220,
"domainId": 42220,
"name": "celo",
"protocol": "ethereum",
"displayName": "Celo",
"nativeToken": {
"decimals": 18,
"name": "CELO",
"symbol": "CELO"
},
"rpcUrls": [
{
"http": "https://forno.celo.org"
}
],
"blockExplorers": [
{
"name": "CeloScan",
"url": "https://celoscan.io",
"apiUrl": "https://api.celoscan.io/api",
"family": "etherscan"
},
{
"name": "Blockscout",
"url": "https://explorer.celo.org",
"apiUrl": "https://explorer.celo.org/mainnet/api",
"family": "blockscout"
}
],
"blocks": {
"confirmations": 1,
"reorgPeriod": 0,
"estimateBlockTime": 5
},
"gnosisSafeTransactionServiceUrl": "https://mainnet-tx-svc.celo-safe-prod.celo-networks-dev.org/",
"storageGasOracle": "0xD9A9966E7dA9a7f0032bF449FB12696a638E673C",
"proxyAdmin": "0x90f9a2E9eCe93516d65FdaB726a3c62F5960a1b9",
"merkleRootMultisigIsmFactory": "0x4C96a1abc44dc846775CE702C9E9BE821D3b487c",
"messageIdMultisigIsmFactory": "0xaB402f227e892Ef37C105bf06619c0fa106a1fB2",
"aggregationIsmFactory": "0x1722dd970a1F56040712129f5Eeb76B003fd7500",
"aggregationHookFactory": "0xc3745652EFB8555A8b064A0EA78d295133d326D2",
"routingIsmFactory": "0xec748b5623f0B50E4c5eB1CFa7Bd46C3213608b6",
"merkleTreeHook": "0x04dB778f05854f26E67e0a66b740BBbE9070D366",
"interchainGasPaymaster": "0x571f1435613381208477ac5d6974310d88AC7cB7",
"aggregationHook": "0xc65890329066FB20c339Bc5C22f1756e9D3a4fF5",
"protocolFee": "0x89886d431f9c3eEE64DCD6dAbA3f7D689D98D899",
"mailbox": "0x50da3B3907A08a24fe4999F4Dcf337E8dC7954bb",
"validatorAnnounce": "0xCeF677b65FDaA6804d4403083bb12B8dB3991FE1",
"index": {
"from": 22208016
}
},
"ethereum": {
"chainId": 1,
"domainId": 1,
"name": "ethereum",
"protocol": "ethereum",
"displayName": "Ethereum",
"nativeToken": {
"name": "Ether",
"symbol": "ETH",
"decimals": 18
},
"rpcUrls": [
{
"http": "https://mainnet.infura.io/v3/9aa3d95b3bc440fa88ea12eaa4456161"
},
{
"http": "https://cloudflare-eth.com"
}
],
"blockExplorers": [
{
"name": "Etherscan",
"url": "https://etherscan.io",
"apiUrl": "https://api.etherscan.io/api",
"family": "etherscan"
},
{
"name": "Blockscout",
"url": "https://blockscout.com/eth/mainnet",
"apiUrl": "https://blockscout.com/eth/mainnet/api",
"family": "blockscout"
}
],
"blocks": {
"confirmations": 3,
"reorgPeriod": 14,
"estimateBlockTime": 13
},
"gnosisSafeTransactionServiceUrl": "https://safe-transaction-mainnet.safe.global/",
"transactionOverrides": {
"maxFeePerGas": 150000000000,
"maxPriorityFeePerGas": 5000000000
},
"storageGasOracle": "0xc9a103990A8dB11b4f627bc5CD1D0c2685484Ec5",
"proxyAdmin": "0x75EE15Ee1B4A75Fa3e2fDF5DF3253c25599cc659",
"merkleRootMultisigIsmFactory": "0x47e8aF9e30C32Ab91060ED587894288786761B45",
"messageIdMultisigIsmFactory": "0xfA21D9628ADce86531854C2B7ef00F07394B0B69",
"aggregationIsmFactory": "0x46FA191Ad972D9674Ed752B69f9659A0d7b22846",
"aggregationHookFactory": "0x6D2555A8ba483CcF4409C39013F5e9a3285D3C9E",
"routingIsmFactory": "0xCb74c6aE411236CEE6803619916694BE86cF5987",
"merkleTreeHook": "0x48e6c30B97748d1e2e03bf3e9FbE3890ca5f8CCA",
"interchainGasPaymaster": "0x9e6B1022bE9BBF5aFd152483DAD9b88911bC8611",
"aggregationHook": "0xb87AC8EA4533AE017604E44470F7c1E550AC6F10",
"protocolFee": "0x8B05BF30F6247a90006c5837eA63C7905D79e6d8",
"mailbox": "0xc005dc82818d67AF737725bD4bf75435d065D239",
"validatorAnnounce": "0xCe74905e51497b4adD3639366708b821dcBcff96",
"index": {
"from": 18466263
}
},
"gnosis": {
"chainId": 100,
"domainId": 100,
"name": "gnosis",
"protocol": "ethereum",
"displayName": "Gnosis",
"nativeToken": {
"name": "xDai",
"symbol": "xDai",
"decimals": 18
},
"rpcUrls": [
{
"http": "https://rpc.gnosischain.com",
"pagination": {
"maxBlockRange": 10000,
"minBlockNumber": 25997478
}
}
],
"blockExplorers": [
{
"name": "GnosisScan",
"url": "https://gnosisscan.io",
"apiUrl": "https://api.gnosisscan.io/api",
"family": "etherscan"
}
],
"blocks": {
"confirmations": 1,
"reorgPeriod": 14,
"estimateBlockTime": 5
},
"gasCurrencyCoinGeckoId": "xdai",
"gnosisSafeTransactionServiceUrl": "https://safe-transaction-gnosis-chain.safe.global/",
"storageGasOracle": "0x5E01d8F34b629E3f92d69546bbc4142A7Adee7e9",
"proxyAdmin": "0x81a92A1a272cb09d7b4970b07548463dC7aE0cB7",
"merkleRootMultisigIsmFactory": "0x8E273260EAd8B72A085B19346A676d355740e875",
"messageIdMultisigIsmFactory": "0x603f46cc520d2fc22957b81e206408590808F02F",
"aggregationIsmFactory": "0x11EF91d17c5ad3330DbCa709a8841743d3Af6819",
"aggregationHookFactory": "0xbC8AA096dabDf4A0200BB9f8D4Cbb644C3D86d7B",
"mailbox": "0xaD09d78f4c6b9dA2Ae82b1D34107802d380Bb74f",
"routingIsmFactory": "0xd9Cc2e652A162bb93173d1c44d46cd2c0bbDA59D",
"merkleTreeHook": "0x2684C6F89E901987E1FdB7649dC5Be0c57C61645",
"interchainGasPaymaster": "0xDd260B99d302f0A3fF885728c086f729c06f227f",
"aggregationHook": "0xdD1FA1C12496474c1dDC67a658Ba81437F818861",
"protocolFee": "0x9c2214467Daf9e2e1F45b36d08ce0b9C65BFeA88",
"validatorAnnounce": "0x87ED6926abc9E38b9C7C19f835B41943b622663c",
"index": {
"from": 30715963
}
},
"moonbeam": {
"chainId": 1284,
"domainId": 1284,
"name": "moonbeam",
"protocol": "ethereum",
"displayName": "Moonbeam",
"nativeToken": {
"decimals": 18,
"name": "GLMR",
"symbol": "GLMR"
},
"rpcUrls": [
{
"http": "https://rpc.api.moonbeam.network"
}
],
"blockExplorers": [
{
"name": "MoonScan",
"url": "https://moonscan.io",
"apiUrl": "https://api-moonbeam.moonscan.io/api",
"family": "etherscan"
}
],
"blocks": {
"confirmations": 2,
"reorgPeriod": 2,
"estimateBlockTime": 12
},
"gnosisSafeTransactionServiceUrl": "https://transaction.multisig.moonbeam.network",
"storageGasOracle": "0x448b7ADB0dA36d41AA2AfDc9d63b97541A7b3819",
"proxyAdmin": "0x6A9cdA3dd1F593983BFd142Eb35e6ce4137bd5ce",
"merkleRootMultisigIsmFactory": "0xE2f485bc031Feb5a4C41C1967bf028653d75f0C3",
"messageIdMultisigIsmFactory": "0x84Df48F8f241f11d0fA302d09d73030429Bd9C73",
"aggregationIsmFactory": "0x40c6Abcb6A2CdC8882d4bEcaC47927005c7Bb8c2",
"aggregationHookFactory": "0x59cC3E7A49DdC4893eB8754c7908f96072A7DbE8",
"routingIsmFactory": "0x98Aa6239FfCcEc73A662a5e5e26Bc3fD7c7291B7",
"mailbox": "0x094d03E751f49908080EFf000Dd6FD177fd44CC3",
"merkleTreeHook": "0x87403b85f6f316e7ba91ba1fa6C3Fb7dD4095547",
"interchainGasPaymaster": "0x14760E32C0746094cF14D97124865BC7F0F7368F",
"aggregationHook": "0x23cca255aE83F57F39EAf9D14fB9FdaDF22D5863",
"protocolFee": "0xCd3e29A9D293DcC7341295996a118913F7c582c0",
"validatorAnnounce": "0x8c1001eBee6F25b31863A55EadfF149aF88B356F",
"index": {
"from": 4763137
}
},
"optimism": {
"chainId": 10,
"domainId": 10,
"name": "optimism",
"protocol": "ethereum",
"displayName": "Optimism",
"nativeToken": {
"name": "Ether",
"symbol": "ETH",
"decimals": 18
},
"rpcUrls": [
{
"http": "https://mainnet.optimism.io"
}
],
"blockExplorers": [
{
"name": "Etherscan",
"url": "https://optimistic.etherscan.io",
"apiUrl": "https://api-optimistic.etherscan.io/api",
"family": "etherscan"
}
],
"blocks": {
"confirmations": 1,
"reorgPeriod": 0,
"estimateBlockTime": 3
},
"gasCurrencyCoinGeckoId": "ethereum",
"gnosisSafeTransactionServiceUrl": "https://safe-transaction-optimism.safe.global/",
"storageGasOracle": "0x27e88AeB8EA4B159d81df06355Ea3d20bEB1de38",
"proxyAdmin": "0xE047cb95FB3b7117989e911c6afb34771183fC35",
"merkleRootMultisigIsmFactory": "0xCA6Cb9Bc3cfF9E11003A06617cF934B684Bc78BC",
"messageIdMultisigIsmFactory": "0xAa4Be20E9957fE21602c74d7C3cF5CB1112EA9Ef",
"aggregationIsmFactory": "0x7491843F3A5Ba24E0f17a22645bDa04A1Ae2c584",
"aggregationHookFactory": "0x15DEeAB8dECDe553bb0B1F9C00984cbcae1af3D7",
"routingIsmFactory": "0x89E3530137aD51743536443a3EC838b502E72eb7",
"merkleTreeHook": "0x68eE9bec9B4dbB61f69D9D293Ae26a5AACb2e28f",
"interchainGasPaymaster": "0xD8A76C4D91fCbB7Cc8eA795DFDF870E48368995C",
"aggregationHook": "0x4ccC6d8eB79f2a1EC9bcb0f211fef7907631F91f",
"protocolFee": "0xD71Ff941120e8f935b8b1E2C1eD72F5d140FF458",
"mailbox": "0xd4C1905BB1D26BC93DAC913e13CaCC278CdCC80D",
"validatorAnnounce": "0x30f5b08e01808643221528BB2f7953bf2830Ef38",
"index": {
"from": 111554952
}
},
"polygon": {
"chainId": 137,
"domainId": 137,
"name": "polygon",
"protocol": "ethereum",
"displayName": "Polygon",
"nativeToken": {
"name": "Ether",
"symbol": "ETH",
"decimals": 18
},
"rpcUrls": [
{
"http": "https://rpc-mainnet.matic.quiknode.pro",
"pagination": {
"maxBlockRange": 10000,
"minBlockNumber": 19657100
}
},
{
"http": "https://polygon-rpc.com"
}
],
"blockExplorers": [
{
"name": "PolygonScan",
"url": "https://polygonscan.com",
"apiUrl": "https://api.polygonscan.com/api",
"family": "etherscan"
}
],
"blocks": {
"confirmations": 3,
"reorgPeriod": 256,
"estimateBlockTime": 2
},
"gasCurrencyCoinGeckoId": "matic-network",
"gnosisSafeTransactionServiceUrl": "https://safe-transaction-polygon.safe.global/",
"transactionOverrides": {
"maxFeePerGas": 500000000000,
"maxPriorityFeePerGas": 100000000000
},
"storageGasOracle": "0xA3a24EC5670F1F416AB9fD554FcE2f226AE9D7eB",
"proxyAdmin": "0xC4F7590C5d30BE959225dC75640657954A86b980",
"merkleRootMultisigIsmFactory": "0xa9E0E18E78b098c2DE36c42E4DDEA13ce214c592",
"messageIdMultisigIsmFactory": "0xEa5Be2AD66BB1BA321B7aCf0A079fBE304B09Ca0",
"aggregationIsmFactory": "0x81AdDD9Ca89105063DaDEBd5B4408551Ce850E22",
"aggregationHookFactory": "0xFeeB86e70e4a640cDd29636CCE19BD6fe8628135",
"routingIsmFactory": "0xF0752A65ffB2153EaE53F6a70c858a87022d5c56",
"mailbox": "0x5d934f4e2f797775e53561bB72aca21ba36B96BB",
"merkleTreeHook": "0x73FbD25c3e817DC4B4Cd9d00eff6D83dcde2DfF6",
"interchainGasPaymaster": "0x0071740Bf129b05C4684abfbBeD248D80971cce2",
"aggregationHook": "0x34dAb05650Cf590088bA18aF9d597f3e081bCc47",
"protocolFee": "0xF8F3629e308b4758F8396606405989F8D8C9c578",
"validatorAnnounce": "0x454E1a1E1CA8B51506090f1b5399083658eA4Fc5",
"index": {
"from": 49352047
}
},
"polygonzkevm": {
"protocol": "ethereum",
"chainId": 1101,
"domainId": 1101,
"name": "polygonzkevm",
"displayName": "Polygon zkEVM",
"nativeToken": {
"name": "Ether",
"symbol": "ETH",
"decimals": 18
},
"rpcUrls": [
{
"http": "https://polygonzkevm-mainnet.g.alchemy.com/v2/demo"
},
{
"http": "https://rpc.ankr.com/polygon_zkevm"
},
{
"http": "https://zkevm.polygonscan.com/"
}
],
"blockExplorers": [
{
"name": "PolygonScan",
"url": "https://zkevm.polygonscan.com/",
"apiUrl": "https://api-zkevm.polygonscan.com/api",
"family": "etherscan"
}
],
"blocks": {
"confirmations": 1,
"reorgPeriod": 1,
"estimateBlockTime": 10
},
"merkleRootMultisigIsmFactory": "0x8F7454AC98228f3504Bb91eA3D8Adafe6406110A",
"messageIdMultisigIsmFactory": "0xEb9FcFDC9EfDC17c1EC5E1dc085B98485da213D6",
"aggregationIsmFactory": "0x1052eF3419f26Bec74Ed7CEf4a4FA6812Bc09908",
"aggregationHookFactory": "0x0761b0827849abbf7b0cC09CE14e1C93D87f5004",
"routingIsmFactory": "0x4Ed7d626f1E96cD1C0401607Bf70D95243E3dEd1",
"merkleTreeHook": "0x149db7afD694722747035d5AEC7007ccb6F8f112",
"proxyAdmin": "0x2f2aFaE1139Ce54feFC03593FeE8AB2aDF4a85A7",
"storageGasOracle": "0x19dc38aeae620380430C200a6E990D5Af5480117",
"interchainGasPaymaster": "0x0D63128D887159d63De29497dfa45AFc7C699AE4",
"aggregationHook": "0x8464aF853363B8d6844070F68b0AB34Cb6523d0F",
"protocolFee": "0xd83A4F747fE80Ed98839e05079B1B7Fe037b1638",
"mailbox": "0x3a464f746D23Ab22155710f44dB16dcA53e0775E",
"validatorAnnounce": "0x2fa5F5C96419C222cDbCeC797D696e6cE428A7A9",
"index": {
"from": 6789061
}
},
"scroll": {
"chainId": 534352,
"domainId": 534352,
"name": "scroll",
"protocol": "ethereum",
"displayName": "Scroll",
"nativeToken": {
"name": "Ether",
"symbol": "ETH",
"decimals": 18
},
"rpcUrls": [
{
"http": "https://scroll.blockpi.network/v1/rpc/public"
},
{
"http": "https://scroll-mainnet.public.blastapi.io"
}
],
"blockExplorers": [
{
"name": "Scroll Explorer",
"url": "https://scrollscan.com/",
"apiUrl": "https://api.scrollscan.com/api",
"family": "etherscan"
}
],
"blocks": {
"confirmations": 1,
"reorgPeriod": 1,
"estimateBlockTime": 3
},
"merkleRootMultisigIsmFactory": "0x2C1FAbEcd7bFBdEBF27CcdB67baADB38b6Df90fC",
"messageIdMultisigIsmFactory": "0x8b83fefd896fAa52057798f6426E9f0B080FCCcE",
"aggregationIsmFactory": "0x8F7454AC98228f3504Bb91eA3D8Adafe6406110A",
"aggregationHookFactory": "0xEb9FcFDC9EfDC17c1EC5E1dc085B98485da213D6",
"routingIsmFactory": "0x1052eF3419f26Bec74Ed7CEf4a4FA6812Bc09908",
"merkleTreeHook": "0x6119E37Bd66406A1Db74920aC79C15fB8411Ba76",
"proxyAdmin": "0x0761b0827849abbf7b0cC09CE14e1C93D87f5004",
"storageGasOracle": "0x481171eb1aad17eDE6a56005B7F1aB00C581ef13",
"interchainGasPaymaster": "0xBF12ef4B9f307463D3FB59c3604F294dDCe287E2",
"aggregationHook": "0x9Bc0FAf446E128a618A88a2F28960Fb2Ca169faE",
"protocolFee": "0xc3F23848Ed2e04C0c6d41bd7804fa8f89F940B94",
"mailbox": "0x2f2aFaE1139Ce54feFC03593FeE8AB2aDF4a85A7",
"validatorAnnounce": "0xd83A4F747fE80Ed98839e05079B1B7Fe037b1638",
"index": {
"from": 426670
}
}
},
"defaultRpcConsensusType": "fallback"
}

@ -1,166 +0,0 @@
{
"chains": {
"celo": {
"name": "celo",
"domain": 42220,
"addresses": {
"mailbox": "0x35231d4c2D8B8ADcB5617A638A0c4548684c7C70",
"interchainGasPaymaster": "0x6cA0B6D22da47f091B7613223cD4BB03a2d77918",
"validatorAnnounce": "0x9bBdef63594D5FFc2f370Fe52115DdFFe97Bc524"
},
"protocol": "ethereum",
"finalityBlocks": 0,
"index": {
"from": 16884144
}
},
"ethereum": {
"name": "ethereum",
"domain": 1,
"addresses": {
"mailbox": "0x35231d4c2D8B8ADcB5617A638A0c4548684c7C70",
"interchainGasPaymaster": "0x6cA0B6D22da47f091B7613223cD4BB03a2d77918",
"validatorAnnounce": "0x9bBdef63594D5FFc2f370Fe52115DdFFe97Bc524"
},
"protocol": "ethereum",
"finalityBlocks": 20,
"index": {
"from": 16271503
}
},
"avalanche": {
"name": "avalanche",
"domain": 43114,
"addresses": {
"mailbox": "0x35231d4c2D8B8ADcB5617A638A0c4548684c7C70",
"interchainGasPaymaster": "0x6cA0B6D22da47f091B7613223cD4BB03a2d77918",
"validatorAnnounce": "0x9bBdef63594D5FFc2f370Fe52115DdFFe97Bc524"
},
"protocol": "ethereum",
"finalityBlocks": 3,
"index": {
"from": 24145479
}
},
"polygon": {
"name": "polygon",
"domain": 137,
"addresses": {
"mailbox": "0x35231d4c2D8B8ADcB5617A638A0c4548684c7C70",
"interchainGasPaymaster": "0x6cA0B6D22da47f091B7613223cD4BB03a2d77918",
"validatorAnnounce": "0x9bBdef63594D5FFc2f370Fe52115DdFFe97Bc524"
},
"protocol": "ethereum",
"finalityBlocks": 256,
"index": {
"from": 37313389
}
},
"bsc": {
"name": "bsc",
"domain": 56,
"addresses": {
"mailbox": "0x35231d4c2D8B8ADcB5617A638A0c4548684c7C70",
"interchainGasPaymaster": "0x6cA0B6D22da47f091B7613223cD4BB03a2d77918",
"validatorAnnounce": "0x9bBdef63594D5FFc2f370Fe52115DdFFe97Bc524"
},
"protocol": "ethereum",
"finalityBlocks": 15,
"index": {
"from": 24248037
}
},
"arbitrum": {
"name": "arbitrum",
"domain": 42161,
"addresses": {
"mailbox": "0x35231d4c2D8B8ADcB5617A638A0c4548684c7C70",
"interchainGasPaymaster": "0x6cA0B6D22da47f091B7613223cD4BB03a2d77918",
"validatorAnnounce": "0x9bBdef63594D5FFc2f370Fe52115DdFFe97Bc524"
},
"protocol": "ethereum",
"finalityBlocks": 0,
"index": {
"from": 49073182
}
},
"optimism": {
"name": "optimism",
"domain": 10,
"addresses": {
"mailbox": "0x35231d4c2D8B8ADcB5617A638A0c4548684c7C70",
"interchainGasPaymaster": "0x6cA0B6D22da47f091B7613223cD4BB03a2d77918",
"validatorAnnounce": "0x9bBdef63594D5FFc2f370Fe52115DdFFe97Bc524"
},
"protocol": "ethereum",
"finalityBlocks": 0,
"index": {
"from": 55698988
}
},
"moonbeam": {
"name": "moonbeam",
"domain": 1284,
"addresses": {
"mailbox": "0x35231d4c2D8B8ADcB5617A638A0c4548684c7C70",
"interchainGasPaymaster": "0x6cA0B6D22da47f091B7613223cD4BB03a2d77918",
"validatorAnnounce": "0x9bBdef63594D5FFc2f370Fe52115DdFFe97Bc524"
},
"protocol": "ethereum",
"finalityBlocks": 2,
"connection": {
"type": "http"
},
"index": {
"from": 2595747
}
},
"gnosis": {
"name": "gnosis",
"domain": 100,
"addresses": {
"mailbox": "0x35231d4c2D8B8ADcB5617A638A0c4548684c7C70",
"interchainGasPaymaster": "0x6cA0B6D22da47f091B7613223cD4BB03a2d77918",
"validatorAnnounce": "0x9bBdef63594D5FFc2f370Fe52115DdFFe97Bc524"
},
"protocol": "ethereum",
"finalityBlocks": 14,
"index": {
"from": 25900000
}
},
"nautilus": {
"name": "nautilus",
"domain": 22222,
"addresses": {
"mailbox": "0xF59557dfacDc5a1cb8A36Af43aA4819a6A891e88",
"interchainGasPaymaster": "0x3a464f746D23Ab22155710f44dB16dcA53e0775E",
"validatorAnnounce": "0x23ce76645EC601148fa451e751eeB75785b97A00"
},
"protocol": "ethereum",
"finalityBlocks": 1,
"index": {
"from": 216377
}
},
"solana": {
"name": "solana",
"domain": 1399811149,
"addresses": {
"mailbox": "Ge9atjAc3Ltu91VTbNpJDCjZ9CFxFyck4h3YBcTF9XPq",
"interchainGasPaymaster": "FCNfmLSZLo5x7oNYmkYU8WdPUu7pj636P9CaMxkmaCp7",
"validatorAnnounce": "C88Lk5GR6cPxYoJxPbNDDEwsx5Kxn1wZEomvQ2So333g"
},
"protocol": "sealevel",
"finalityBlocks": 0,
"connection": {
"type": "http",
"url": "https://api.mainnet-beta.solana.com"
},
"index": {
"from": 1,
"mode": "sequence"
}
}
}
}

@ -2,18 +2,21 @@
"chains": {
"sealeveltest1": {
"name": "sealeveltest1",
"domain": 13375,
"addresses": {
"mailbox": "692KZJaoe2KRcD6uhCQDLLXnLNA5ZLnfvdqjE4aX9iu1",
"interchainGasPaymaster": "DrFtxirPPsfdY4HQiNZj2A9o4Ux7JaL3gELANgAoihhp",
"validatorAnnounce": "DH43ae1LwemXAboWwSh8zc9pG8j72gKUEXNi57w8fEnn"
},
"chainId": 13375,
"domainId": 13375,
"mailbox": "692KZJaoe2KRcD6uhCQDLLXnLNA5ZLnfvdqjE4aX9iu1",
"interchainGasPaymaster": "DrFtxirPPsfdY4HQiNZj2A9o4Ux7JaL3gELANgAoihhp",
"validatorAnnounce": "DH43ae1LwemXAboWwSh8zc9pG8j72gKUEXNi57w8fEnn",
"protocol": "sealevel",
"finalityBlocks": 0,
"connection": {
"type": "http",
"url": "http://localhost:8899"
"blocks": {
"reorgPeriod": 0,
"confirmations": 0
},
"rpcUrls": [
{
"http": "http://localhost:8899"
}
],
"index": {
"from": 1,
"mode": "sequence"
@ -21,18 +24,21 @@
},
"sealeveltest2": {
"name": "sealeveltest2",
"domain": 13376,
"addresses": {
"mailbox": "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj",
"interchainGasPaymaster": "G5rGigZBL8NmxCaukK2CAKr9Jq4SUfAhsjzeri7GUraK",
"validatorAnnounce": "3Uo5j2Bti9aZtrDqJmAyuwiFaJFPFoNL5yxTpVCNcUhb"
},
"chainId": 13376,
"domainId": 13376,
"mailbox": "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj",
"interchainGasPaymaster": "G5rGigZBL8NmxCaukK2CAKr9Jq4SUfAhsjzeri7GUraK",
"validatorAnnounce": "3Uo5j2Bti9aZtrDqJmAyuwiFaJFPFoNL5yxTpVCNcUhb",
"protocol": "sealevel",
"finalityBlocks": 0,
"connection": {
"type": "http",
"url": "http://localhost:8899"
"blocks": {
"reorgPeriod": 0,
"confirmations": 0
},
"rpcUrls": [
{
"http": "http://localhost:8899"
}
],
"index": {
"from": 1,
"mode": "sequence"

File diff suppressed because it is too large Load Diff

@ -1,163 +0,0 @@
{
"chains": {
"alfajores": {
"name": "alfajores",
"domain": 44787,
"addresses": {
"mailbox": "0xCC737a94FecaeC165AbCf12dED095BB13F037685",
"interchainGasPaymaster": "0x8f9C3888bFC8a5B25AED115A82eCbb788b196d2a",
"validatorAnnounce": "0x3Fc742696D5dc9846e04f7A1823D92cb51695f9a"
},
"protocol": "ethereum",
"finalityBlocks": 0,
"index": {
"from": 14863532
}
},
"fuji": {
"name": "fuji",
"domain": 43113,
"addresses": {
"mailbox": "0xCC737a94FecaeC165AbCf12dED095BB13F037685",
"interchainGasPaymaster": "0x8f9C3888bFC8a5B25AED115A82eCbb788b196d2a",
"validatorAnnounce": "0x3Fc742696D5dc9846e04f7A1823D92cb51695f9a"
},
"protocol": "ethereum",
"finalityBlocks": 3,
"index": {
"from": 16330615
}
},
"mumbai": {
"name": "mumbai",
"domain": 80001,
"addresses": {
"mailbox": "0xCC737a94FecaeC165AbCf12dED095BB13F037685",
"interchainGasPaymaster": "0x8f9C3888bFC8a5B25AED115A82eCbb788b196d2a",
"validatorAnnounce": "0x3Fc742696D5dc9846e04f7A1823D92cb51695f9a"
},
"protocol": "ethereum",
"finalityBlocks": 32,
"index": {
"from": 29390033
}
},
"bsctestnet": {
"name": "bsctestnet",
"domain": 97,
"addresses": {
"mailbox": "0xCC737a94FecaeC165AbCf12dED095BB13F037685",
"interchainGasPaymaster": "0x8f9C3888bFC8a5B25AED115A82eCbb788b196d2a",
"validatorAnnounce": "0x3Fc742696D5dc9846e04f7A1823D92cb51695f9a"
},
"protocol": "ethereum",
"finalityBlocks": 9,
"index": {
"from": 25001629
}
},
"goerli": {
"name": "goerli",
"domain": 5,
"addresses": {
"mailbox": "0xCC737a94FecaeC165AbCf12dED095BB13F037685",
"interchainGasPaymaster": "0x8f9C3888bFC8a5B25AED115A82eCbb788b196d2a",
"validatorAnnounce": "0x3Fc742696D5dc9846e04f7A1823D92cb51695f9a"
},
"protocol": "ethereum",
"finalityBlocks": 2,
"index": {
"from": 8039005
}
},
"sepolia": {
"name": "sepolia",
"domain": 11155111,
"addresses": {
"mailbox": "0xCC737a94FecaeC165AbCf12dED095BB13F037685",
"interchainGasPaymaster": "0x8f9C3888bFC8a5B25AED115A82eCbb788b196d2a",
"validatorAnnounce": "0x3Fc742696D5dc9846e04f7A1823D92cb51695f9a"
},
"protocol": "ethereum",
"finalityBlocks": 2,
"index": {
"from": 3082913
}
},
"moonbasealpha": {
"name": "moonbasealpha",
"domain": 1287,
"addresses": {
"mailbox": "0xCC737a94FecaeC165AbCf12dED095BB13F037685",
"interchainGasPaymaster": "0x8f9C3888bFC8a5B25AED115A82eCbb788b196d2a",
"validatorAnnounce": "0x3Fc742696D5dc9846e04f7A1823D92cb51695f9a"
},
"protocol": "ethereum",
"finalityBlocks": 1,
"index": {
"from": 3310405
}
},
"optimismgoerli": {
"name": "optimismgoerli",
"domain": 420,
"addresses": {
"mailbox": "0xCC737a94FecaeC165AbCf12dED095BB13F037685",
"interchainGasPaymaster": "0x8f9C3888bFC8a5B25AED115A82eCbb788b196d2a",
"validatorAnnounce": "0x3Fc742696D5dc9846e04f7A1823D92cb51695f9a"
},
"protocol": "ethereum",
"finalityBlocks": 1,
"index": {
"from": 3055263
}
},
"arbitrumgoerli": {
"name": "arbitrumgoerli",
"domain": 421613,
"addresses": {
"mailbox": "0xCC737a94FecaeC165AbCf12dED095BB13F037685",
"interchainGasPaymaster": "0x8f9C3888bFC8a5B25AED115A82eCbb788b196d2a",
"validatorAnnounce": "0x3Fc742696D5dc9846e04f7A1823D92cb51695f9a"
},
"protocol": "ethereum",
"finalityBlocks": 1,
"index": {
"from": 1941997
}
},
"solanadevnet": {
"name": "solanadevnet",
"domain": 1399811151,
"addresses": {
"mailbox": "4v25Dz9RccqUrTzmfHzJMsjd1iVoNrWzeJ4o6GYuJrVn",
"interchainGasPaymaster": "7hMPEGdgBQFsjEz3aaNwZp8WMFHs615zAM3erXBDJuJR",
"validatorAnnounce": "CMHKvdq4CopDf7qXnDCaTybS15QekQeRt4oUB219yxsp"
},
"protocol": "sealevel",
"finalityBlocks": 0,
"connection": {
"type": "http",
"url": "https://api.devnet.solana.com"
},
"index": {
"from": 1,
"mode": "sequence"
}
},
"proteustestnet": {
"name": "proteustestnet",
"domain": 88002,
"addresses": {
"mailbox": "0x918D3924Fad8F71551D9081172e9Bb169745461e",
"interchainGasPaymaster": "0x06b62A9F5AEcc1E601D0E02732b4E1D0705DE7Db",
"validatorAnnounce": "0xEEea93d0d0287c71e47B3f62AFB0a92b9E8429a1"
},
"protocol": "ethereum",
"finalityBlocks": 1,
"index": {
"from": 8609588
}
}
}
}

@ -7,7 +7,7 @@ Expand the name of the chart.
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
We truncate at 63 chars - 11 because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "agent-common.fullname" -}}
@ -49,7 +49,7 @@ app.kubernetes.io/managed-by: {{ .Release.Service }}
Selector labels
*/}}
{{- define "agent-common.selectorLabels" -}}
app.kubernetes.io/name: {{ include "agent-common.name" . }}
app.kubernetes.io/name: {{ include "agent-common.name" . | trunc 63 | trimSuffix "-"}}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
@ -73,30 +73,30 @@ The name of the ClusterSecretStore/SecretStore
{{/*
Recursively converts a config object into environment variables than can
be parsed by rust. For example, a config of { foo: { bar: { baz: 420 }, boo: 421 } } will
be: HYP_FOO_BAR_BAZ=420 and HYP_FOO_BOO=421
be parsed by rust. For example, a config of { foo: { bar: { baz: 420 }, booGo: 421 } } will
be: HYP_FOO_BAR_BAZ=420 and HYP_FOO_BOOGO=421
Env vars can be formatted in FOO="BAR" format if .format is "dot_env",
FOO: "BAR" format if .format is "config_map", or otherwise
they will be formatted as spec YAML-friendly environment variables
*/}}
{{- define "agent-common.config-env-vars" -}}
{{- range $key, $value := .config }}
{{- $key_name := printf "%s%s" (default "" $.key_name_prefix) $key }}
{{- if typeIs "map[string]interface {}" $value }}
{{- include "agent-common.config-env-vars" (dict "config" $value "agent_name" $.agent_name "format" $.format "key_name_prefix" (printf "%s_" $key_name)) }}
{{- range $key_or_idx, $value := .config }}
{{- $key_name := printf "%s%v" (default "" $.key_name_prefix) $key_or_idx }}
{{- if or (typeIs "map[string]interface {}" $value) (typeIs "[]interface {}" $value) }}
{{- include "agent-common.config-env-vars" (dict "config" $value "format" $.format "key_name_prefix" (printf "%s_" $key_name)) }}
{{- else }}
{{- include "agent-common.config-env-var" (dict "agent_name" $.agent_name "key" $key_name "value" $value "format" $.format ) }}
{{- include "agent-common.config-env-var" (dict "key" $key_name "value" $value "format" $.format ) }}
{{- end }}
{{- end }}
{{- end }}
{{- define "agent-common.config-env-var" }}
{{- if (eq .format "dot_env") }}
HYP_{{ .agent_name | upper }}_{{ .key | upper }}={{ .value | quote }}
HYP_{{ .key | upper }}={{ .value | quote }}
{{- else if (eq .format "config_map") }}
HYP_{{ .agent_name | upper }}_{{ .key | upper }}: {{ .value | quote }}
HYP_{{ .key | upper }}: {{ .value | quote }}
{{- else }}
- name: HYP_{{ .agent_name | upper }}_{{ .key | upper }}
- name: HYP_{{ .key | upper }}
value: {{ .value | quote }}
{{- end }}
{{- end }}

@ -1,44 +0,0 @@
# Hyperlane-Agent Helm Chart
![Version: 0.1.0](https://img.shields.io/badge/Version-0.1.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 0.1.0](https://img.shields.io/badge/AppVersion-0.1.0-informational?style=flat-square)
A Helm Chart that encapsulates the deployment of the Hyperlane Rust Agent(s). It is currently designed to be deployed against a Google Kubernetes Engine cluster, but specification of another PVC Storage Class should be sufficient to make it compatible with other cloud providers.
Additional documentation is present in comments in `yalues.yaml`.
## Values
| Key | Type | Default | Description |
| -------------------------------------- | ------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| affinity | object | `{}` | |
| fullnameOverride | string | `""` | |
| image.pullPolicy | string | `"Always"` | |
| image.repository | string | `"gcr.io/clabs-optics/optics-agent"` | Main repository for Hyperlane Agent binaries, provided by cLabs |
| image.tag | string | `"latest"` | Overrides the image tag whose default is the chart appVersion. |
| imagePullSecrets | list | `[]` | |
| nameOverride | string | `""` | |
| nodeSelector | object | `{}` | |
| hyperlane | object | `{"outboxChain":{"address":null,"connectionType":null,"connectionUrl":null,"domain":null,"name":"goerli","protocol":null},enabled":false,"messageInterval":null,"signers":[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}]},"processor":{"enabled":false,"pollingInterval":null,"signers":[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}]},"relayer":{"enabled":false,"pollingInterval":null,"signers":[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}]},"inboxChains":[{"address":null,"connectionType":null,"connectionUrl":null,"domain":null,"name":"alfajores","protocol":null}],"runEnv":"default","validator":{"signer":"","enabled":false,"pollingInterval":null,"signers":[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}],"updatePause":null}}` | Hyperlane Overrides By Default, Hyperlane Agents load the config baked into the Docker Image Pass values here in order to override the values in the config Note: For successful operation, one _must_ pass signer keys as they are not baked into the image for security reasons. |
| hyperlane.outboxChain.address | string | `nil` | The contract address for the home contract |
| hyperlane.outboxChain.connectionUrl | string | `nil` | Connection string pointing to an RPC endpoint for the home chain |
| hyperlane.outboxChain.domain | string | `nil` | The hard-coded domain corresponding to this blockchain |
| hyperlane.outboxChain.protocol | string | `nil` | RPC Style |
| hyperlane.relayer.enabled | bool | `false` | Enables or disables the relayer |
| hyperlane.inboxChains | list | `[{"address":null,"connectionType":null,"connectionUrl":null,"domain":null,"name":"alfajores","protocol":null}]` | Replica chain overrides, a sequence |
| hyperlane.inboxChains[0].address | string | `nil` | The contract address for the replica contract |
| hyperlane.inboxChains[0].connectionUrl | string | `nil` | Connection string pointing to an RPC endpoint for the replica chain |
| hyperlane.validator.signer | string | `""` | Specialized key used by validator and watcher used to sign attestations, separate from validator.keys |
| hyperlane.validator.enabled | bool | `false` | Enables or disables the validator |
| hyperlane.validator.pollingInterval | string | `nil` | How long to wait between checking for updates |
| hyperlane.validator.signers | list | `[{"key":"","name":"goerli"},{"key":"","name":"alfajores"}]` | Trnsaction Signing keys for home and replica(s) |
| podAnnotations | object | `{}` | |
| podSecurityContext | object | `{}` | |
| replicaCount | int | `1` | |
| resources | object | `{}` | |
| securityContext | object | `{}` | |
| tolerations | list | `[]` | |
| volumeStorageClass | string | `"standard"` | Default to standard storageclass provided by GKE |
---
Autogenerated from chart metadata using [helm-docs v1.5.0](https://github.com/norwoodj/helm-docs/releases/v1.5.0)

@ -0,0 +1,9 @@
{{/*
We truncate at 63 chars - (11 + (len $suffix)) because the controller-revision-hash label adds an 11 character suffix
to the pod name, and we want the -validator suffix to still be present, but are happy to truncate the preceding name.
See https://github.com/kubernetes/kubernetes/issues/64023 for controller-revision-hash details.
*/}}
{{- define "validator.fullname" -}}
{{- $suffix := "-validator" }}
{{- include "agent-common.fullname" . | trunc (int (sub 63 (add 11 (len $suffix)))) | trimSuffix "-" }}{{ print $suffix }}
{{- end }}

@ -7,10 +7,10 @@ metadata:
data:
ONELINE_BACKTRACES: "true"
RUST_BACKTRACE: {{ .Values.hyperlane.rustBacktrace }}
HYP_BASE_DB: {{ .Values.hyperlane.dbPath }}
HYP_BASE_TRACING_FMT: {{ .Values.hyperlane.tracing.format }}
HYP_BASE_TRACING_LEVEL: {{ .Values.hyperlane.tracing.level }}
HYP_DB: {{ .Values.hyperlane.dbPath }}
HYP_LOG_FORMAT: {{ .Values.hyperlane.tracing.format }}
HYP_LOG_LEVEL: {{ .Values.hyperlane.tracing.level }}
{{- range .Values.hyperlane.chains }}
{{- include "agent-common.config-env-vars" (dict "config" . "agent_name" "base" "key_name_prefix" (printf "CHAINS_%s_" (.name | upper)) "format" "config_map") | indent 2 }}
{{- include "agent-common.config-env-vars" (dict "config" . "key_name_prefix" (printf "chains_%s_" .name) "format" "config_map") | indent 2 }}
{{- end }}
HYP_BASE_METRICS: {{ .Values.hyperlane.metrics.port | quote }}
HYP_METRICSPORT: {{ .Values.hyperlane.metrics.port | quote }}

@ -27,11 +27,7 @@ spec:
*/}}
{{- range .Values.hyperlane.chains }}
{{- if not .disabled }}
{{- if or (eq .connection.type "httpQuorum") (eq .connection.type "httpFallback") }}
HYP_BASE_CHAINS_{{ .name | upper }}_CONNECTION_URLS: {{ printf "'{{ .%s_rpcs | fromJson | join \",\" }}'" .name }}
{{- else }}
HYP_BASE_CHAINS_{{ .name | upper }}_CONNECTION_URL: {{ printf "'{{ .%s_rpc | toString }}'" .name }}
{{- end }}
HYP_CHAINS_{{ .name | upper }}_CUSTOMRPCURLS: {{ printf "'{{ .%s_rpcs | mustFromJson | join \",\" }}'" .name }}
{{- end }}
{{- end }}
data:
@ -41,14 +37,8 @@ spec:
*/}}
{{- range .Values.hyperlane.chains }}
{{- if not .disabled }}
{{- if or (eq .connection.type "httpQuorum") (eq .connection.type "httpFallback") }}
- secretKey: {{ printf "%s_rpcs" .name }}
remoteRef:
key: {{ printf "%s-rpc-endpoints-%s" $.Values.hyperlane.runEnv .name }}
{{- else }}
- secretKey: {{ printf "%s_rpc" .name }}
remoteRef:
key: {{ printf "%s-rpc-endpoint-%s" $.Values.hyperlane.runEnv .name }}
{{- end }}
{{- end }}
{{- end }}

@ -23,13 +23,16 @@ spec:
data:
{{- range .Values.hyperlane.relayerChains }}
{{- if eq .signer.type "hexKey" }}
HYP_BASE_CHAINS_{{ .name | upper }}_SIGNER_KEY: {{ printf "'{{ .%s_signer_key | toString }}'" .name }}
HYP_CHAINS_{{ .name | upper }}_SIGNER_KEY: {{ printf "'{{ .%s_signer_key | toString }}'" .name }}
{{- end }}
{{- end }}
{{- if .Values.hyperlane.relayer.aws }}
{{- if and (eq .signer.type "aws") $.Values.hyperlane.relayer.aws }}
HYP_CHAINS_{{ .name | upper }}_SIGNER_TYPE: aws
HYP_CHAINS_{{ .name | upper }}_SIGNER_ID: {{ .signer.id }}
HYP_CHAINS_{{ .name | upper }}_SIGNER_REGION: {{ .signer.region}}
AWS_ACCESS_KEY_ID: {{ print "'{{ .aws_access_key_id | toString }}'" }}
AWS_SECRET_ACCESS_KEY: {{ print "'{{ .aws_secret_access_key | toString }}'" }}
{{- end }}
{{- end }}
data:
{{- range .Values.hyperlane.relayerChains }}
{{- if eq .signer.type "hexKey" }}

@ -55,14 +55,7 @@ spec:
- secretRef:
name: {{ include "agent-common.fullname" . }}-relayer-secret
env:
{{- include "agent-common.config-env-vars" (dict "config" .Values.hyperlane.relayer.config "agent_name" "relayer") | indent 10 }}
{{- $relayerChainNames := list }}
{{- range .Values.hyperlane.relayerChains }}
{{- include "agent-common.config-env-vars" (dict "config" .signer "agent_name" "base" "key_name_prefix" (printf "CHAINS_%s_SIGNER_" (.name | upper))) | indent 10 }}
{{- $relayerChainNames = append $relayerChainNames .name }}
{{- end }}
- name: HYP_BASE_RELAYCHAINS
value: {{ $relayerChainNames | join "," }}
{{- include "agent-common.config-env-vars" (dict "config" .Values.hyperlane.relayer.config) | nindent 10 }}
resources:
{{- toYaml .Values.hyperlane.relayer.resources | nindent 10 }}
volumeMounts:

@ -21,7 +21,7 @@ spec:
labels:
{{- include "agent-common.labels" . | nindent 10 }}
data:
HYP_BASE_DB: {{ print "'{{ .db | toString }}'" }}
HYP_DB: {{ print "'{{ .db | toString }}'" }}
data:
- secretKey: db
remoteRef:

@ -55,14 +55,7 @@ spec:
- secretRef:
name: {{ include "agent-common.fullname" . }}-scraper3-secret
env:
{{- $scraperChainNames := list }}
{{- range .Values.hyperlane.chains }}
{{- if not .disabled }}
{{- $scraperChainNames = append $scraperChainNames .name }}
{{- end }}
{{- end }}
- name: HYP_SCRAPER_CHAINSTOSCRAPE
value: {{ $scraperChainNames | join "," }}
{{- include "agent-common.config-env-vars" (dict "config" .Values.hyperlane.scraper.config) | nindent 8 }}
resources:
{{- toYaml .Values.hyperlane.scraper.resources | nindent 10 }}
ports:

@ -2,14 +2,12 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: {{ include "agent-common.fullname" . }}-validator
name: {{ include "validator.fullname" . }}
labels:
{{- include "agent-common.labels" . | nindent 4 }}
data:
{{ $index := 0 }}
{{- range .Values.hyperlane.validator.configs }}
{{- range $index, $config := .Values.hyperlane.validator.configs }}
validator-{{ $index }}.env: |
{{- include "agent-common.config-env-vars" (dict "config" . "agent_name" "validator" "format" "dot_env") | indent 4 }}
{{ $index = add1 $index }}
{{- include "agent-common.config-env-vars" (dict "config" $config "format" "dot_env") | nindent 4 }}
{{- end }}
{{- end }}

@ -2,7 +2,7 @@
apiVersion: external-secrets.io/v1beta1
kind: ExternalSecret
metadata:
name: {{ include "agent-common.fullname" . }}-validator-external-secret
name: {{ include "validator.fullname" . }}-external-secret
labels:
{{- include "agent-common.labels" . | nindent 4 }}
annotations:
@ -14,7 +14,7 @@ spec:
refreshInterval: "1h"
# The secret that will be created
target:
name: {{ include "agent-common.fullname" . }}-validator-secret
name: {{ include "validator.fullname" . }}-secret
template:
type: Opaque
metadata:
@ -24,18 +24,18 @@ spec:
{{ $index := 0 }}
{{- range .Values.hyperlane.validator.configs }}
validator-{{ $index }}.env: |
{{- if eq .validator.type "hexKey" }}
HYP_VALIDATOR_VALIDATOR_KEY={{ printf "'{{ .signer_key_%d | toString }}'" $index }}
HYP_BASE_CHAINS_{{ .originChainName | upper }}_SIGNER_KEY={{ printf "'{{ .signer_key_%d | toString }}'" $index }}
{{- end }}
{{- if or (eq .checkpointSyncer.type "s3") $.Values.hyperlane.aws }}
{{- if eq .validator.type "hexKey" }}
HYP_VALIDATOR_KEY={{ printf "'{{ .signer_key_%d | toString }}'" $index }}
HYP_CHAINS_{{ .originChainName | upper }}_SIGNER_KEY={{ printf "'{{ .signer_key_%d | toString }}'" $index }}
{{- end }}
{{- if or (eq .checkpointSyncer.type "s3") $.Values.hyperlane.aws }}
AWS_ACCESS_KEY_ID={{ printf "'{{ .aws_access_key_id_%d | toString }}'" $index }}
AWS_SECRET_ACCESS_KEY={{ printf "'{{ .aws_secret_access_key_%d | toString }}'" $index }}
{{- end }}
{{- end }}
{{ $index = add1 $index }}
{{- end }}
data:
{{ $index := 0 }}
{{ $index = 0 }}
{{- range .Values.hyperlane.validator.configs }}
{{- if eq .validator.type "hexKey" }}
- secretKey: signer_key_{{ $index }}

@ -2,7 +2,7 @@
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: {{ include "agent-common.fullname" . }}-validator
name: {{ include "validator.fullname" . }}
labels:
{{- include "agent-common.labels" . | nindent 4 }}
app.kubernetes.io/component: validator
@ -12,7 +12,7 @@ spec:
{{- include "agent-common.selectorLabels" . | nindent 6 }}
app.kubernetes.io/component: validator
replicas: {{ len .Values.hyperlane.validator.configs }}
serviceName: {{ include "agent-common.fullname" . }}-validator
serviceName: {{ include "validator.fullname" . }}
template:
metadata:
annotations:
@ -58,7 +58,7 @@ spec:
- secretRef:
name: {{ include "agent-common.fullname" . }}-secret
- secretRef:
name: {{ include "agent-common.fullname" . }}-validator-secret
name: {{ include "validator.fullname" . }}-secret
env:
- name: REPLICA_NAME
valueFrom:
@ -79,10 +79,10 @@ spec:
volumes:
- name: config-env-vars
configMap:
name: {{ include "agent-common.fullname" . }}-validator
name: {{ include "validator.fullname" . }}
- name: secret-env-vars
secret:
secretName: {{ include "agent-common.fullname" . }}-validator-secret
secretName: {{ include "validator.fullname" . }}-secret
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}

@ -47,19 +47,33 @@ hyperlane:
aws: # true | false
# -- Chain overrides, a sequence
# This should mirror @hyperlane-xyz/sdk AgentChainMetadata
chains:
- name: 'alfajores'
- name: examplechain
disabled: false
rpcConsensusType: fallback
signer:
# aws:
addresses:
mailbox:
multisigIsm:
interchainGasPaymaster:
domain:
protocol: # "ethereum"
connection:
type: # "http"
type: # aws
index:
from:
chunk:
mode:
mailbox:
multisigIsm:
interchainGasPaymaster:
interchainSecurityModule:
protocol: ethereum
chainId:
domainId:
customRpcUrls:
- example:
url: https://example.com
priority: 1
blocks:
confirmations:
reorgPeriod:
estimatedBlockTime:
isTestnet: false
# Hyperlane Agent Roles
# Individually Switchable via <role>.enabled
@ -81,7 +95,6 @@ hyperlane:
# -- How long to wait between checking for updates
configs: []
# - interval:
# reorgPeriod:
# checkpointSyncers:
# originChainName:
# type: # "hexKey"
@ -103,6 +116,7 @@ hyperlane:
cpu: 500m
memory: 256Mi
config:
relayChains: ''
multisigCheckpointSyncer:
checkpointSyncers:
# -- Specify whether a default signer key is used for all chains in Values.hyperlane.relayerChains list.
@ -130,6 +144,7 @@ hyperlane:
cpu: 250m
memory: 256Mi
config:
chainsToScrape: ''
kathy:
enabled: false

@ -1,16 +1,12 @@
use std::future::Future;
use std::time::Duration;
use async_trait::async_trait;
use eyre::Result;
use paste::paste;
use tokio::time::sleep;
use tracing::{debug, instrument, trace};
use hyperlane_core::{
HyperlaneDomain, HyperlaneLogStore, HyperlaneMessage, HyperlaneMessageStore,
GasPaymentKey, HyperlaneDomain, HyperlaneLogStore, HyperlaneMessage, HyperlaneMessageStore,
HyperlaneWatermarkedLogStore, InterchainGasExpenditure, InterchainGasPayment,
InterchainGasPaymentMeta, LogMeta, H256,
InterchainGasPaymentMeta, LogMeta, MerkleTreeInsertion, H256,
};
use super::{
@ -30,6 +26,8 @@ const GAS_PAYMENT_META_PROCESSED: &str = "gas_payment_meta_processed_v3_";
const GAS_EXPENDITURE_FOR_MESSAGE_ID: &str = "gas_expenditure_for_message_id_v2_";
const PENDING_MESSAGE_RETRY_COUNT_FOR_MESSAGE_ID: &str =
"pending_message_retry_count_for_message_id_";
const MERKLE_TREE_INSERTION: &str = "merkle_tree_insertion_";
const MERKLE_LEAF_INDEX_BY_MESSAGE_ID: &str = "merkle_leaf_index_by_message_id_";
const LATEST_INDEXED_GAS_PAYMENT_BLOCK: &str = "latest_indexed_gas_payment_block";
type DbResult<T> = std::result::Result<T, DbError>;
@ -106,20 +104,6 @@ impl HyperlaneRocksDB {
}
}
// TODO(james): this is a quick-fix for the prover_sync and I don't like it
/// poll db ever 100 milliseconds waiting for a leaf.
pub fn wait_for_message_nonce(&self, nonce: u32) -> impl Future<Output = DbResult<H256>> {
let slf = self.clone();
async move {
loop {
if let Some(id) = slf.retrieve_message_id_by_nonce(&nonce)? {
return Ok(id);
}
sleep(Duration::from_millis(100)).await
}
}
}
/// If the provided gas payment, identified by its metadata, has not been
/// processed, processes the gas payment and records it as processed.
/// Returns whether the gas payment was processed for the first time.
@ -146,12 +130,28 @@ impl HyperlaneRocksDB {
self.store_processed_by_gas_payment_meta(&payment_meta, &true)?;
// Update the total gas payment for the message to include the payment
self.update_gas_payment_by_message_id(payment)?;
self.update_gas_payment_by_gas_payment_key(payment)?;
// Return true to indicate the gas payment was processed for the first time
Ok(true)
}
/// Store the merkle tree insertion event, and also store a mapping from message_id to leaf_index
pub fn process_tree_insertion(&self, insertion: &MerkleTreeInsertion) -> DbResult<bool> {
if let Ok(Some(_)) = self.retrieve_merkle_tree_insertion_by_leaf_index(&insertion.index()) {
debug!(insertion=?insertion, "Tree insertion already stored in db");
return Ok(false);
}
// even if double insertions are ok, store the leaf by `leaf_index` (guaranteed to be unique)
// rather than by `message_id` (not guaranteed to be recurring), so that leaves can be retrieved
// based on insertion order.
self.store_merkle_tree_insertion_by_leaf_index(&insertion.index(), insertion)?;
self.store_merkle_leaf_index_by_message_id(&insertion.message_id(), &insertion.index())?;
// Return true to indicate the tree insertion was processed
Ok(true)
}
/// Processes the gas expenditure and store the total expenditure for the
/// message.
pub fn process_gas_expenditure(&self, expenditure: InterchainGasExpenditure) -> DbResult<()> {
@ -160,12 +160,16 @@ impl HyperlaneRocksDB {
}
/// Update the total gas payment for a message to include gas_payment
fn update_gas_payment_by_message_id(&self, event: InterchainGasPayment) -> DbResult<()> {
let existing_payment = self.retrieve_gas_payment_by_message_id(event.message_id)?;
fn update_gas_payment_by_gas_payment_key(&self, event: InterchainGasPayment) -> DbResult<()> {
let gas_payment_key = GasPaymentKey {
message_id: event.message_id,
destination: event.destination,
};
let existing_payment = self.retrieve_gas_payment_by_gas_payment_key(gas_payment_key)?;
let total = existing_payment + event;
debug!(?event, new_total_gas_payment=?total, "Storing gas payment");
self.store_interchain_gas_payment_data_by_message_id(&total.message_id, &total.into())?;
self.store_interchain_gas_payment_data_by_gas_payment_key(&gas_payment_key, &total.into())?;
Ok(())
}
@ -190,14 +194,14 @@ impl HyperlaneRocksDB {
}
/// Retrieve the total gas payment for a message
pub fn retrieve_gas_payment_by_message_id(
pub fn retrieve_gas_payment_by_gas_payment_key(
&self,
message_id: H256,
gas_payment_key: GasPaymentKey,
) -> DbResult<InterchainGasPayment> {
Ok(self
.retrieve_interchain_gas_payment_data_by_message_id(&message_id)?
.retrieve_interchain_gas_payment_data_by_gas_payment_key(&gas_payment_key)?
.unwrap_or_default()
.complete(message_id))
.complete(gas_payment_key.message_id, gas_payment_key.destination))
}
/// Retrieve the total gas payment for a message
@ -249,6 +253,21 @@ impl HyperlaneLogStore<InterchainGasPayment> for HyperlaneRocksDB {
}
}
#[async_trait]
impl HyperlaneLogStore<MerkleTreeInsertion> for HyperlaneRocksDB {
/// Store every tree insertion event
#[instrument(skip_all)]
async fn store_logs(&self, leaves: &[(MerkleTreeInsertion, LogMeta)]) -> Result<u32> {
let mut insertions = 0;
for (insertion, _meta) in leaves {
if self.process_tree_insertion(insertion)? {
insertions += 1;
}
}
Ok(insertions)
}
}
#[async_trait]
impl HyperlaneMessageStore for HyperlaneRocksDB {
/// Gets a message by nonce.
@ -315,7 +334,7 @@ make_store_and_retrieve!(pub(self), dispatched_block_number_by_nonce, MESSAGE_DI
make_store_and_retrieve!(pub, processed_by_nonce, NONCE_PROCESSED, u32, bool);
make_store_and_retrieve!(pub(self), processed_by_gas_payment_meta, GAS_PAYMENT_META_PROCESSED, InterchainGasPaymentMeta, bool);
make_store_and_retrieve!(pub(self), interchain_gas_expenditure_data_by_message_id, GAS_EXPENDITURE_FOR_MESSAGE_ID, H256, InterchainGasExpenditureData);
make_store_and_retrieve!(pub(self), interchain_gas_payment_data_by_message_id, GAS_PAYMENT_FOR_MESSAGE_ID, H256, InterchainGasPaymentData);
make_store_and_retrieve!(pub(self), interchain_gas_payment_data_by_gas_payment_key, GAS_PAYMENT_FOR_MESSAGE_ID, GasPaymentKey, InterchainGasPaymentData);
make_store_and_retrieve!(
pub,
pending_message_retry_count_by_message_id,
@ -323,3 +342,17 @@ make_store_and_retrieve!(
H256,
u32
);
make_store_and_retrieve!(
pub,
merkle_tree_insertion_by_leaf_index,
MERKLE_TREE_INSERTION,
u32,
MerkleTreeInsertion
);
make_store_and_retrieve!(
pub,
merkle_leaf_index_by_message_id,
MERKLE_LEAF_INDEX_BY_MESSAGE_ID,
H256,
u32
);

@ -31,9 +31,10 @@ impl Default for InterchainGasPaymentData {
}
impl InterchainGasPaymentData {
pub fn complete(self, message_id: H256) -> InterchainGasPayment {
pub fn complete(self, message_id: H256, destination: u32) -> InterchainGasPayment {
InterchainGasPayment {
message_id,
destination,
payment: self.payment,
gas_amount: self.gas_amount,
}

@ -5,7 +5,7 @@ use futures_util::future::try_join_all;
use hyperlane_core::{
Delivery, HyperlaneChain, HyperlaneDomain, HyperlaneMessageStore, HyperlaneProvider,
HyperlaneWatermarkedLogStore, InterchainGasPaymaster, InterchainGasPayment, Mailbox,
MultisigIsm, ValidatorAnnounce, H256,
MerkleTreeHook, MerkleTreeInsertion, MultisigIsm, ValidatorAnnounce, H256,
};
use crate::{
@ -179,9 +179,11 @@ macro_rules! build_indexer_fns {
impl Settings {
build_contract_fns!(build_interchain_gas_paymaster, build_interchain_gas_paymasters -> dyn InterchainGasPaymaster);
build_contract_fns!(build_mailbox, build_mailboxes -> dyn Mailbox);
build_contract_fns!(build_merkle_tree_hook, build_merkle_tree_hooks -> dyn MerkleTreeHook);
build_contract_fns!(build_validator_announce, build_validator_announces -> dyn ValidatorAnnounce);
build_contract_fns!(build_provider, build_providers -> dyn HyperlaneProvider);
build_indexer_fns!(build_delivery_indexer, build_delivery_indexers -> dyn HyperlaneWatermarkedLogStore<Delivery>, WatermarkContractSync<Delivery>);
build_indexer_fns!(build_message_indexer, build_message_indexers -> dyn HyperlaneMessageStore, MessageContractSync);
build_indexer_fns!(build_interchain_gas_payment_indexer, build_interchain_gas_payment_indexers -> dyn HyperlaneWatermarkedLogStore<InterchainGasPayment>, WatermarkContractSync<InterchainGasPayment>);
build_indexer_fns!(build_merkle_tree_hook_indexer, build_merkle_tree_hook_indexers -> dyn HyperlaneWatermarkedLogStore<MerkleTreeInsertion>, WatermarkContractSync<MerkleTreeInsertion>);
}

@ -8,8 +8,9 @@ use eyre::{eyre, Context, Result};
use hyperlane_core::{
AggregationIsm, CcipReadIsm, ContractLocator, HyperlaneAbi, HyperlaneDomain,
HyperlaneDomainProtocol, HyperlaneMessage, HyperlaneProvider, HyperlaneSigner, IndexMode,
InterchainGasPaymaster, InterchainGasPayment, InterchainSecurityModule, Mailbox, MultisigIsm,
RoutingIsm, SequenceIndexer, ValidatorAnnounce, H256,
InterchainGasPaymaster, InterchainGasPayment, InterchainSecurityModule, Mailbox,
MerkleTreeHook, MerkleTreeInsertion, MultisigIsm, RoutingIsm, SequenceIndexer,
ValidatorAnnounce, H256,
};
use hyperlane_ethereum::{
self as h_eth, BuildableWithProvider, EthereumInterchainGasPaymasterAbi, EthereumMailboxAbi,
@ -31,8 +32,8 @@ pub struct ChainConf {
pub domain: HyperlaneDomain,
/// Signer configuration for this chain
pub signer: Option<SignerConf>,
/// Number of blocks until finality
pub finality_blocks: u32,
/// The reorg period of the chain, i.e. the number of blocks until finality
pub reorg_period: u32,
/// Addresses of contracts on the chain
pub addresses: CoreContractAddresses,
/// The chain connection details
@ -76,6 +77,8 @@ pub struct CoreContractAddresses {
pub interchain_gas_paymaster: H256,
/// Address of the ValidatorAnnounce contract
pub validator_announce: H256,
/// Address of the MerkleTreeHook contract
pub merkle_tree_hook: Option<H256>,
}
/// Indexing settings
@ -115,7 +118,7 @@ impl ChainConf {
/// Try to convert the chain setting into a Mailbox contract
pub async fn build_mailbox(&self, metrics: &CoreMetrics) -> Result<Box<dyn Mailbox>> {
let ctx = "Building provider";
let ctx = "Building mailbox";
let locator = self.locator(self.addresses.mailbox);
match &self.connection {
@ -123,7 +126,6 @@ impl ChainConf {
self.build_ethereum(conf, &locator, metrics, h_eth::MailboxBuilder {})
.await
}
ChainConnectionConf::Fuel(conf) => {
let wallet = self.fuel_signer().await.context(ctx)?;
hyperlane_fuel::FuelMailbox::new(conf, locator, wallet)
@ -140,6 +142,37 @@ impl ChainConf {
.context(ctx)
}
/// Try to convert the chain setting into a Merkle Tree Hook contract
pub async fn build_merkle_tree_hook(
&self,
metrics: &CoreMetrics,
) -> Result<Box<dyn MerkleTreeHook>> {
let ctx = "Building merkle tree hook";
// TODO: if the merkle tree hook is set for sealevel, it's still a mailbox program
// that the connection is made to using the pda seeds, which will not be usable.
let address = self
.addresses
.merkle_tree_hook
.unwrap_or(self.addresses.mailbox);
let locator = self.locator(address);
match &self.connection {
ChainConnectionConf::Ethereum(conf) => {
self.build_ethereum(conf, &locator, metrics, h_eth::MerkleTreeHookBuilder {})
.await
}
ChainConnectionConf::Fuel(_conf) => {
todo!("Fuel does not support merkle tree hooks yet")
}
ChainConnectionConf::Sealevel(conf) => {
h_sealevel::SealevelMailbox::new(conf, locator, None)
.map(|m| Box::new(m) as Box<dyn MerkleTreeHook>)
.map_err(Into::into)
}
}
.context(ctx)
}
/// Try to convert the chain settings into a message indexer
pub async fn build_message_indexer(
&self,
@ -155,7 +188,7 @@ impl ChainConf {
&locator,
metrics,
h_eth::SequenceIndexerBuilder {
finality_blocks: self.finality_blocks,
reorg_period: self.reorg_period,
},
)
.await
@ -185,7 +218,7 @@ impl ChainConf {
&locator,
metrics,
h_eth::DeliveryIndexerBuilder {
finality_blocks: self.finality_blocks,
reorg_period: self.reorg_period,
},
)
.await
@ -247,7 +280,7 @@ impl ChainConf {
metrics,
h_eth::InterchainGasPaymasterIndexerBuilder {
mailbox_address: self.addresses.mailbox.into(),
finality_blocks: self.finality_blocks,
reorg_period: self.reorg_period,
},
)
.await
@ -264,6 +297,39 @@ impl ChainConf {
.context(ctx)
}
/// Try to convert the chain settings into a merkle tree hook indexer
pub async fn build_merkle_tree_hook_indexer(
&self,
metrics: &CoreMetrics,
) -> Result<Box<dyn SequenceIndexer<MerkleTreeInsertion>>> {
let ctx = "Building merkle tree hook indexer";
let address = self
.addresses
.merkle_tree_hook
.unwrap_or(self.addresses.mailbox);
let locator = self.locator(address);
match &self.connection {
ChainConnectionConf::Ethereum(conf) => {
self.build_ethereum(
conf,
&locator,
metrics,
h_eth::MerkleTreeHookIndexerBuilder {
reorg_period: self.reorg_period,
},
)
.await
}
ChainConnectionConf::Fuel(_) => todo!(),
ChainConnectionConf::Sealevel(_) => {
let indexer = Box::new(h_sealevel::SealevelMerkleTreeHookIndexer::new());
Ok(indexer as Box<dyn SequenceIndexer<MerkleTreeInsertion>>)
}
}
.context(ctx)
}
/// Try to convert the chain settings into a ValidatorAnnounce
pub async fn build_validator_announce(
&self,
@ -493,6 +559,13 @@ impl ChainConf {
self.addresses.interchain_gas_paymaster,
EthereumInterchainGasPaymasterAbi::fn_map_owned(),
);
if let Some(address) = self.addresses.merkle_tree_hook {
register_contract(
"merkle_tree_hook",
address,
EthereumInterchainGasPaymasterAbi::fn_map_owned(),
);
}
cfg
}

@ -1,435 +0,0 @@
//! This module is responsible for parsing the agent's settings using the old config format.
// TODO: Remove this module once we have finished migrating to the new format.
use std::{
collections::{HashMap, HashSet},
path::PathBuf,
};
use ethers_prometheus::middleware::PrometheusMiddlewareConf;
use eyre::{eyre, Context};
use hyperlane_core::{cfg_unwrap_all, config::*, utils::hex_or_base58_to_h256, HyperlaneDomain};
use serde::Deserialize;
use super::envs::*;
use crate::settings::{
chains::IndexSettings, trace::TracingConfig, ChainConf, ChainConnectionConf,
CheckpointSyncerConf, CoreContractAddresses, Settings, SignerConf,
};
/// Raw base settings.
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DeprecatedRawSettings {
chains: Option<HashMap<String, DeprecatedRawChainConf>>,
defaultsigner: Option<DeprecatedRawSignerConf>,
metrics: Option<StrOrInt>,
tracing: Option<TracingConfig>,
}
impl FromRawConf<DeprecatedRawSettings, Option<&HashSet<&str>>> for Settings {
fn from_config_filtered(
raw: DeprecatedRawSettings,
cwp: &ConfigPath,
filter: Option<&HashSet<&str>>,
) -> Result<Self, ConfigParsingError> {
let mut err = ConfigParsingError::default();
let chains: HashMap<String, ChainConf> = if let Some(mut chains) = raw.chains {
let default_signer: Option<SignerConf> = raw.defaultsigner.and_then(|r| {
r.parse_config(&cwp.join("defaultsigner"))
.take_config_err(&mut err)
});
if let Some(filter) = filter {
chains.retain(|k, _| filter.contains(&k.as_str()));
}
let chains_path = cwp + "chains";
chains
.into_iter()
.map(|(k, v)| {
let cwp = &chains_path + &k;
let k = k.to_ascii_lowercase();
let mut parsed: ChainConf = v.parse_config(&cwp)?;
if let Some(default_signer) = &default_signer {
parsed.signer.get_or_insert_with(|| default_signer.clone());
}
Ok((k, parsed))
})
.filter_map(|res| match res {
Ok((k, v)) => Some((k, v)),
Err(e) => {
err.merge(e);
None
}
})
.collect()
} else {
Default::default()
};
let tracing = raw.tracing.unwrap_or_default();
let metrics = raw
.metrics
.and_then(|port| port.try_into().take_err(&mut err, || cwp + "metrics"))
.unwrap_or(9090);
err.into_result(Self {
chains,
metrics_port: metrics,
tracing,
})
}
}
#[derive(Deserialize, Debug)]
#[serde(tag = "protocol", content = "connection", rename_all = "camelCase")]
enum DeprecatedRawChainConnectionConf {
Ethereum(h_eth::RawConnectionConf),
Fuel(h_fuel::DeprecatedRawConnectionConf),
Sealevel(h_sealevel::DeprecatedRawConnectionConf),
#[serde(other)]
Unknown,
}
impl FromRawConf<DeprecatedRawChainConnectionConf> for ChainConnectionConf {
fn from_config_filtered(
raw: DeprecatedRawChainConnectionConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
use DeprecatedRawChainConnectionConf::*;
match raw {
Ethereum(r) => Ok(Self::Ethereum(r.parse_config(&cwp.join("connection"))?)),
Fuel(r) => Ok(Self::Fuel(r.parse_config(&cwp.join("connection"))?)),
Sealevel(r) => Ok(Self::Sealevel(r.parse_config(&cwp.join("connection"))?)),
Unknown => {
Err(eyre!("Unknown chain protocol")).into_config_result(|| cwp.join("protocol"))
}
}
}
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct DeprecatedRawCoreContractAddresses {
mailbox: Option<String>,
interchain_gas_paymaster: Option<String>,
validator_announce: Option<String>,
}
impl FromRawConf<DeprecatedRawCoreContractAddresses> for CoreContractAddresses {
fn from_config_filtered(
raw: DeprecatedRawCoreContractAddresses,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
macro_rules! parse_addr {
($name:ident) => {
let $name = raw
.$name
.ok_or_else(|| {
eyre!(
"Missing {} core contract address",
stringify!($name).replace('_', " ")
)
})
.take_err(&mut err, || cwp + stringify!($name))
.and_then(|v| {
hex_or_base58_to_h256(&v).take_err(&mut err, || cwp + stringify!($name))
});
};
}
parse_addr!(mailbox);
parse_addr!(interchain_gas_paymaster);
parse_addr!(validator_announce);
cfg_unwrap_all!(cwp, err: [mailbox, interchain_gas_paymaster, validator_announce]);
err.into_result(Self {
mailbox,
interchain_gas_paymaster,
validator_announce,
})
}
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct DeprecatedRawIndexSettings {
from: Option<StrOrInt>,
chunk: Option<StrOrInt>,
mode: Option<String>,
}
impl FromRawConf<DeprecatedRawIndexSettings> for IndexSettings {
fn from_config_filtered(
raw: DeprecatedRawIndexSettings,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
let from = raw
.from
.and_then(|v| v.try_into().take_err(&mut err, || cwp + "from"))
.unwrap_or_default();
let chunk_size = raw
.chunk
.and_then(|v| v.try_into().take_err(&mut err, || cwp + "chunk"))
.unwrap_or(1999);
let mode = raw
.mode
.map(serde_json::Value::from)
.and_then(|m| {
serde_json::from_value(m)
.context("Invalid mode")
.take_err(&mut err, || cwp + "mode")
})
.unwrap_or_default();
err.into_result(Self {
from,
chunk_size,
mode,
})
}
}
/// A raw chain setup is a domain ID, an address on that chain (where the
/// mailbox is deployed) and details for connecting to the chain API.
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DeprecatedRawChainConf {
name: Option<String>,
domain: Option<StrOrInt>,
pub(super) signer: Option<DeprecatedRawSignerConf>,
finality_blocks: Option<StrOrInt>,
addresses: Option<DeprecatedRawCoreContractAddresses>,
#[serde(flatten, default)]
connection: Option<DeprecatedRawChainConnectionConf>,
// TODO: if people actually use the metrics conf we should also add a raw form.
#[serde(default)]
metrics_conf: Option<PrometheusMiddlewareConf>,
#[serde(default)]
index: Option<DeprecatedRawIndexSettings>,
}
impl FromRawConf<DeprecatedRawChainConf> for ChainConf {
fn from_config_filtered(
raw: DeprecatedRawChainConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let mut err = ConfigParsingError::default();
let connection = raw
.connection
.ok_or_else(|| eyre!("Missing `connection` configuration"))
.take_err(&mut err, || cwp + "connection")
.and_then(|r| r.parse_config(cwp).take_config_err(&mut err));
let domain = connection.as_ref().and_then(|c: &ChainConnectionConf| {
let protocol = c.protocol();
let domain_id = raw
.domain
.ok_or_else(|| eyre!("Missing `domain` configuration"))
.take_err(&mut err, || cwp + "domain")
.and_then(|r| {
r.try_into()
.context("Invalid domain id, expected integer")
.take_err(&mut err, || cwp + "domain")
});
let name = raw
.name
.as_deref()
.ok_or_else(|| eyre!("Missing domain `name` configuration"))
.take_err(&mut err, || cwp + "name");
HyperlaneDomain::from_config(domain_id?, name?, protocol)
.take_err(&mut err, || cwp.clone())
});
let addresses = raw
.addresses
.ok_or_else(|| eyre!("Missing `addresses` configuration for core contracts"))
.take_err(&mut err, || cwp + "addresses")
.and_then(|v| {
v.parse_config(&cwp.join("addresses"))
.take_config_err(&mut err)
});
let signer = raw.signer.and_then(|v| -> Option<SignerConf> {
v.parse_config(&cwp.join("signer"))
.take_config_err(&mut err)
});
let finality_blocks = raw
.finality_blocks
.and_then(|v| {
v.try_into()
.context("Invalid `finalityBlocks`, expected integer")
.take_err(&mut err, || cwp + "finality_blocks")
})
.unwrap_or(0);
let index = raw
.index
.and_then(|v| v.parse_config(&cwp.join("index")).take_config_err(&mut err))
.unwrap_or_default();
let metrics_conf = raw.metrics_conf.unwrap_or_default();
cfg_unwrap_all!(cwp, err: [connection, domain, addresses]);
err.into_result(Self {
connection,
domain,
addresses,
signer,
finality_blocks,
index,
metrics_conf,
})
}
}
/// Raw signer types
#[derive(Debug, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct DeprecatedRawSignerConf {
#[serde(rename = "type")]
signer_type: Option<String>,
key: Option<String>,
id: Option<String>,
region: Option<String>,
}
/// Raw checkpoint syncer types
#[derive(Debug, Deserialize)]
#[serde(tag = "type", rename_all = "camelCase")]
pub enum DeprecatedRawCheckpointSyncerConf {
/// A local checkpoint syncer
LocalStorage {
/// Path
path: Option<String>,
},
/// A checkpoint syncer on S3
S3 {
/// Bucket name
bucket: Option<String>,
/// S3 Region
region: Option<String>,
/// Folder name inside bucket - defaults to the root of the bucket
folder: Option<String>,
},
/// Unknown checkpoint syncer type was specified
#[serde(other)]
Unknown,
}
impl FromRawConf<DeprecatedRawSignerConf> for SignerConf {
fn from_config_filtered(
raw: DeprecatedRawSignerConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
let key_path = || cwp + "key";
let region_path = || cwp + "region";
match raw.signer_type.as_deref() {
Some("hexKey") => Ok(Self::HexKey {
key: raw
.key
.ok_or_else(|| eyre!("Missing `key` for HexKey signer"))
.into_config_result(key_path)?
.parse()
.into_config_result(key_path)?,
}),
Some("aws") => Ok(Self::Aws {
id: raw
.id
.ok_or_else(|| eyre!("Missing `id` for Aws signer"))
.into_config_result(|| cwp + "id")?,
region: raw
.region
.ok_or_else(|| eyre!("Missing `region` for Aws signer"))
.into_config_result(region_path)?
.parse()
.into_config_result(region_path)?,
}),
Some(t) => Err(eyre!("Unknown signer type `{t}`")).into_config_result(|| cwp + "type"),
None if raw.key.is_some() => Ok(Self::HexKey {
key: raw.key.unwrap().parse().into_config_result(key_path)?,
}),
None if raw.id.is_some() | raw.region.is_some() => Ok(Self::Aws {
id: raw
.id
.ok_or_else(|| eyre!("Missing `id` for Aws signer"))
.into_config_result(|| cwp + "id")?,
region: raw
.region
.ok_or_else(|| eyre!("Missing `region` for Aws signer"))
.into_config_result(region_path)?
.parse()
.into_config_result(region_path)?,
}),
None => Ok(Self::Node),
}
}
}
impl FromRawConf<DeprecatedRawCheckpointSyncerConf> for CheckpointSyncerConf {
fn from_config_filtered(
raw: DeprecatedRawCheckpointSyncerConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
match raw {
DeprecatedRawCheckpointSyncerConf::LocalStorage { path } => {
let path: PathBuf = path
.ok_or_else(|| eyre!("Missing `path` for LocalStorage checkpoint syncer"))
.into_config_result(|| cwp + "path")?
.parse()
.into_config_result(|| cwp + "path")?;
if !path.exists() {
std::fs::create_dir_all(&path)
.with_context(|| {
format!(
"Failed to create local checkpoint syncer storage directory at {:?}",
path
)
})
.into_config_result(|| cwp + "path")?;
} else if !path.is_dir() {
Err(eyre!(
"LocalStorage checkpoint syncer path is not a directory"
))
.into_config_result(|| cwp + "path")?;
}
Ok(Self::LocalStorage { path })
}
DeprecatedRawCheckpointSyncerConf::S3 {
bucket,
folder,
region,
} => Ok(Self::S3 {
bucket: bucket
.ok_or_else(|| eyre!("Missing `bucket` for S3 checkpoint syncer"))
.into_config_result(|| cwp + "bucket")?,
folder,
region: region
.ok_or_else(|| eyre!("Missing `region` for S3 checkpoint syncer"))
.into_config_result(|| cwp + "region")?
.parse()
.into_config_result(|| cwp + "region")?,
}),
DeprecatedRawCheckpointSyncerConf::Unknown => {
Err(eyre!("Missing `type` for checkpoint syncer"))
.into_config_result(|| cwp + "type")
}
}
}
}

@ -1,9 +1,8 @@
use std::ffi::{OsStr, OsString};
use config::{ConfigError, Map, Source, Value, ValueKind};
use convert_case::Case;
use crate::settings::loader::split_and_recase_key;
use hyperlane_core::unwrap_or_none_result;
use itertools::Itertools;
/// A source for loading configuration from command line arguments.
///
@ -24,10 +23,6 @@ pub struct CommandLineArguments {
/// Ignore empty env values (treat as unset).
ignore_empty: bool,
/// What casing to use for the keys in the environment. By default it will not mutate the key
/// value.
casing: Option<Case>,
/// Alternate source for the environment. This can be used when you want to
/// test your own code using this source, without the need to change the
/// actual system environment variables.
@ -46,11 +41,6 @@ impl CommandLineArguments {
self
}
pub fn casing(mut self, casing: Case) -> Self {
self.casing = Some(casing);
self
}
pub fn source<I, S>(mut self, source: I) -> Self
where
I: IntoIterator<Item = S>,
@ -87,7 +77,7 @@ impl Source for CommandLineArguments {
continue;
}
let key = split_and_recase_key(separator, self.casing, key);
let key = key.split(separator).join(".");
m.insert(key, Value::new(Some(&uri), ValueKind::String(value)));
}
@ -165,9 +155,7 @@ impl Iterator for ArgumentParser {
impl ArgumentParser {
#[inline(never)]
fn find_next_kv_pair(&mut self) -> Result<Option<(String, String, PairKind, usize)>, Error> {
let Some(idx) = self.index_of_next_key() else {
return Ok(None);
};
unwrap_or_none_result!(idx, self.index_of_next_key());
// full term without leading '--'
let term = &os_to_str(&self.0[idx])?[2..];
if term.is_empty() {

@ -0,0 +1,66 @@
use std::fmt::Debug;
use config::{ConfigError, Map, Source, Value, ValueKind};
use convert_case::{Case, Casing};
use derive_new::new;
use itertools::Itertools;
#[derive(Clone, Debug, new)]
pub struct CaseAdapter<S> {
inner: S,
casing: Case,
}
impl<S> Source for CaseAdapter<S>
where
S: Source + Clone + Send + Sync + 'static,
{
fn clone_into_box(&self) -> Box<dyn Source + Send + Sync> {
Box::new(self.clone())
}
fn collect(&self) -> Result<Map<String, Value>, ConfigError> {
self.inner.collect().map(|m| {
m.into_iter()
.map(|(k, v)| recase_pair(k, v, self.casing))
.collect()
})
}
}
fn recase_pair(key: String, mut val: Value, case: Case) -> (String, Value) {
let key = split_and_recase_key(".", Some(case), key);
match &mut val.kind {
ValueKind::Table(table) => {
let tmp = table
.drain()
.map(|(k, v)| recase_pair(k, v, case))
.collect_vec();
table.extend(tmp);
}
ValueKind::Array(ary) => {
let tmp = ary
.drain(..)
.map(|v| recase_pair(String::new(), v, case).1)
.collect_vec();
ary.extend(tmp)
}
_ => {}
}
(key, val)
}
/// Load a settings object from the config locations and re-join the components with the standard
/// `config` crate separator `.`.
fn split_and_recase_key(sep: &str, case: Option<Case>, key: String) -> String {
if let Some(case) = case {
// if case is given, replace case of each key component and separate them with `.`
key.split(sep).map(|s| s.to_case(case)).join(".")
} else if !sep.is_empty() && sep != "." {
// Just standardize the separator to `.`
key.replace(sep, ".")
} else {
// no changes needed if there was no separator defined and we are preserving case.
key
}
}

@ -1,343 +0,0 @@
// TODO: Remove this file after deprecated config parsing has been removed.
use std::ffi::{OsStr, OsString};
use config::{ConfigError, Map, Source, Value, ValueKind};
use convert_case::Case;
use crate::settings::loader::split_and_recase_key;
/// A source for loading configuration from command line arguments.
/// Command line argument keys are case-insensitive, and the following forms are
/// supported:
///
/// * `--key=value`
/// * `--key="value"`
/// * `--key='value'`
/// * `--key value`
/// * `--key` (value is an empty string)
#[must_use]
#[derive(Clone, Debug, Default)]
pub struct DeprecatedCommandLineArguments {
/// Optional character sequence that separates each key segment in an
/// environment key pattern. Consider a nested configuration such as
/// `redis.password`, a separator of `-` would allow an environment key
/// of `redis-password` to match.
separator: Option<String>,
/// Ignore empty env values (treat as unset).
ignore_empty: bool,
/// Alternate source for the environment. This can be used when you want to
/// test your own code using this source, without the need to change the
/// actual system environment variables.
source: Option<Vec<OsString>>,
}
#[allow(unused)]
impl DeprecatedCommandLineArguments {
pub fn separator(mut self, s: &str) -> Self {
self.separator = Some(s.into());
self
}
pub fn ignore_empty(mut self, ignore: bool) -> Self {
self.ignore_empty = ignore;
self
}
pub fn source<I, S>(mut self, source: I) -> Self
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
self.source = Some(source.into_iter().map(|s| s.as_ref().to_owned()).collect());
self
}
}
impl Source for DeprecatedCommandLineArguments {
fn clone_into_box(&self) -> Box<dyn Source + Send + Sync> {
Box::new((*self).clone())
}
fn collect(&self) -> Result<Map<String, Value>, ConfigError> {
let mut m = Map::new();
let uri: String = "program argument".into();
let separator = self.separator.as_deref().unwrap_or("-");
let mut args = if let Some(source) = &self.source {
ArgumentParser::from_vec(source.clone())
} else {
ArgumentParser::from_env()
};
while let Some((key, value)) = args
.next()
.transpose()
.map_err(|e| ConfigError::Foreign(Box::new(e)))?
{
if self.ignore_empty && value.is_empty() {
continue;
}
let mut key = split_and_recase_key(separator, Some(Case::Flat), key);
if key.ends_with("interchaingaspaymaster") {
key = key.replace("interchaingaspaymaster", "interchainGasPaymaster");
} else if key.ends_with("validatorannounce") {
key = key.replace("validatorannounce", "validatorAnnounce");
}
m.insert(key, Value::new(Some(&uri), ValueKind::String(value)));
}
let remaining = args.finish();
if remaining.is_empty() {
Ok(m)
} else {
Err(ConfigError::Message("Could not parse all arguments".into()))
}
}
}
/// An ultra simple CLI arguments parser.
/// Adapted from pico-args 0.5.0.
#[derive(Clone, Debug)]
pub struct ArgumentParser(Vec<OsString>);
impl ArgumentParser {
/// Creates a parser from a vector of arguments.
///
/// The executable path **must** be removed.
///
/// This can be used for supporting `--` arguments to forward to another
/// program.
fn from_vec(args: Vec<OsString>) -> Self {
ArgumentParser(args)
}
/// Creates a parser from [`env::args_os`].
///
/// The executable path will be removed.
///
/// [`env::args_os`]: https://doc.rust-lang.org/stable/std/env/fn.args_os.html
fn from_env() -> Self {
let mut args: Vec<_> = std::env::args_os().collect();
args.remove(0);
ArgumentParser(args)
}
/// Returns a list of remaining arguments.
///
/// It's up to the caller what to do with them.
/// One can report an error about unused arguments,
/// other can use them for further processing.
fn finish(self) -> Vec<OsString> {
self.0
}
}
impl Iterator for ArgumentParser {
type Item = Result<(String, String), Error>;
fn next(&mut self) -> Option<Self::Item> {
let (k, v, kind, idx) = match self.find_next_kv_pair() {
Ok(Some(tup)) => tup,
Ok(None) => return None,
Err(e) => return Some(Err(e)),
};
match kind {
PairKind::SingleArgument => {
self.0.remove(idx);
}
PairKind::TwoArguments => {
self.0.remove(idx + 1);
self.0.remove(idx);
}
}
Some(Ok((k, v)))
}
}
// internal workings
impl ArgumentParser {
#[inline(never)]
fn find_next_kv_pair(&mut self) -> Result<Option<(String, String, PairKind, usize)>, Error> {
let Some(idx) = self.index_of_next_key() else {
return Ok(None);
};
// full term without leading '--'
let term = &os_to_str(&self.0[idx])?[2..];
if term.is_empty() {
return Err(Error::EmptyKey);
}
if let Some((key, value)) = term.split_once('=') {
// Parse a `--key=value` pair.
let key = key.to_owned();
// Check for quoted value.
let value = if starts_with(value, b'"') {
if !ends_with(value, b'"') {
// A closing quote must be the same as an opening one.
return Err(Error::UnmatchedQuote(key));
}
&value[1..value.len() - 1]
} else if starts_with(value, b'\'') {
if !ends_with(value, b'\'') {
// A closing quote must be the same as an opening one.
return Err(Error::UnmatchedQuote(key));
}
&value[1..value.len() - 1]
} else {
value
};
Ok(Some((key, value.to_owned(), PairKind::SingleArgument, idx)))
} else {
// Parse a `--key value` pair.
let key = term.to_owned();
let value = self
.0
.get(idx + 1)
.map(|v| os_to_str(v))
.transpose()?
.unwrap_or("");
if value.is_empty() || value.starts_with('-') {
// the next value is another key
Ok(Some((key, "".to_owned(), PairKind::SingleArgument, idx)))
} else {
Ok(Some((key, value.to_owned(), PairKind::TwoArguments, idx)))
}
}
}
fn index_of_next_key(&self) -> Option<usize> {
self.0.iter().position(|v| {
#[cfg(unix)]
{
use std::os::unix::ffi::OsStrExt;
v.len() >= 2 && &v.as_bytes()[0..2] == b"--"
}
#[cfg(not(unix))]
{
v.len() >= 2 && v.to_str().map(|v| v.starts_with("--")).unwrap_or(false)
}
})
}
}
#[inline]
fn starts_with(text: &str, c: u8) -> bool {
if text.is_empty() {
false
} else {
text.as_bytes()[0] == c
}
}
#[inline]
fn ends_with(text: &str, c: u8) -> bool {
if text.is_empty() {
false
} else {
text.as_bytes()[text.len() - 1] == c
}
}
#[inline]
fn os_to_str(text: &OsStr) -> Result<&str, Error> {
text.to_str().ok_or(Error::NonUtf8Argument)
}
/// A list of possible errors.
#[derive(Clone, Debug, thiserror::Error)]
pub enum Error {
/// Arguments must be a valid UTF-8 strings.
#[error("argument is not a UTF-8 string")]
NonUtf8Argument,
/// Found '--` or a key with nothing after the prefix
#[error("key name is empty (possibly after removing prefix)")]
EmptyKey,
/// Could not find closing quote for a value.
#[error("unmatched quote in `{0}`")]
UnmatchedQuote(String),
}
#[derive(Clone, Copy, PartialEq, Eq)]
enum PairKind {
SingleArgument,
TwoArguments,
}
#[cfg(test)]
mod test {
use super::*;
macro_rules! assert_arg {
($config:expr, $key:literal, $value:literal) => {
let origin = "program argument".to_owned();
assert_eq!(
$config.remove($key),
Some(Value::new(
Some(&origin),
ValueKind::String($value.to_owned())
))
);
};
}
const ARGUMENTS: &[&str] = &[
"--key-a",
"value-a",
"--keY-b=value-b",
"--key-c=\"value c\"",
"--KEY-d='valUE d'",
"--key-e=''",
"--key-F",
"--key-g=value-g",
"--key-h",
];
#[test]
fn default_case() {
let mut config = DeprecatedCommandLineArguments::default()
.source(ARGUMENTS)
.collect()
.unwrap();
assert_arg!(config, "key.a", "value-a");
assert_arg!(config, "key.b", "value-b");
assert_arg!(config, "key.c", "value c");
assert_arg!(config, "key.d", "valUE d");
assert_arg!(config, "key.e", "");
assert_arg!(config, "key.f", "");
assert_arg!(config, "key.g", "value-g");
assert_arg!(config, "key.h", "");
assert!(config.is_empty());
}
#[test]
fn ignore_empty() {
let mut config = DeprecatedCommandLineArguments::default()
.source(ARGUMENTS)
.ignore_empty(true)
.collect()
.unwrap();
assert_arg!(config, "key.a", "value-a");
assert_arg!(config, "key.b", "value-b");
assert_arg!(config, "key.c", "value c");
assert_arg!(config, "key.d", "valUE d");
assert_arg!(config, "key.g", "value-g");
assert!(config.is_empty());
}
}

@ -1,9 +1,7 @@
use std::env;
use config::{ConfigError, Map, Source, Value, ValueKind};
use convert_case::Case;
use crate::settings::loader::split_and_recase_key;
use itertools::Itertools;
#[must_use]
#[derive(Clone, Debug, Default)]
@ -21,11 +19,6 @@ pub struct Environment {
/// an environment key of `REDIS_PASSWORD` to match. Defaults to `_`.
separator: Option<String>,
/// What casing to use for the keys in the environment. By default it will not mutate the key
/// value. Case conversion will be performed after the prefix has been removed on each of the
/// seperated path components individually.
casing: Option<Case>,
/// Ignore empty env values (treat as unset).
ignore_empty: bool,
@ -51,14 +44,9 @@ impl Environment {
self
}
pub fn casing(mut self, casing: Case) -> Self {
self.casing = Some(casing);
self
}
pub fn source<'a, I, S>(mut self, source: I) -> Self
where
I: IntoIterator<Item = &'a (S, S)>,
I: IntoIterator<Item = (S, S)>,
S: AsRef<str> + 'a,
{
self.source = Some(
@ -98,7 +86,7 @@ impl Source for Environment {
return None;
}
let key = split_and_recase_key(separator, self.casing, key);
let key = key.split(separator).join(".");
Some((key, Value::new(Some(&uri), ValueKind::String(value))))
};
@ -138,17 +126,16 @@ mod test {
#[test]
fn default_case() {
let mut config = Environment::default()
.source(ENVS)
.source(ENVS.iter().cloned())
.prefix("PRE__")
.separator("__")
.casing(Case::Camel)
.collect()
.unwrap();
assert_env!(config, "key.a", "value-a");
assert_env!(config, "KEY.A", "value-a");
assert_env!(config, "key.b", "");
assert_env!(config, "key.c.partA", "value c a");
assert_env!(config, "key.cPartB", "value c b");
assert_env!(config, "KEY.C.PART_A", "value c a");
assert_env!(config, "KEY.C_PART_B", "value c b");
assert!(config.is_empty());
}
@ -156,18 +143,17 @@ mod test {
#[test]
fn ignore_empty() {
let mut config = Environment::default()
.source(ENVS)
.source(ENVS.iter().cloned())
.ignore_empty(true)
.source(ENVS)
.source(ENVS.iter().cloned())
.prefix("PRE__")
.separator("__")
.casing(Case::Snake)
.collect()
.unwrap();
assert_env!(config, "key.a", "value-a");
assert_env!(config, "key.c.part_a", "value c a");
assert_env!(config, "key.c_part_b", "value c b");
assert_env!(config, "KEY.A", "value-a");
assert_env!(config, "KEY.C.PART_A", "value c a");
assert_env!(config, "KEY.C_PART_B", "value c b");
assert!(config.is_empty());
}

@ -1,49 +1,28 @@
//! Load a settings object from the config locations.
use std::{collections::HashMap, env, error::Error, fmt::Debug, path::PathBuf};
use std::{env, error::Error, fmt::Debug, path::PathBuf};
use config::{Config, Environment as DeprecatedEnvironment, File};
use convert_case::{Case, Casing};
use eyre::{bail, Context, Result};
use config::{Config, File};
use convert_case::Case;
use eyre::{eyre, Context, Result};
use hyperlane_core::config::*;
use itertools::Itertools;
use serde::de::DeserializeOwned;
use crate::settings::loader::deprecated_arguments::DeprecatedCommandLineArguments;
use crate::settings::loader::{
arguments::CommandLineArguments, case_adapter::CaseAdapter, environment::Environment,
};
mod arguments;
mod deprecated_arguments;
mod case_adapter;
mod environment;
/// Deserialize a settings object from the configs.
pub fn load_settings<T, R>(name: &str) -> ConfigResult<R>
pub fn load_settings<T, R>() -> ConfigResult<R>
where
T: DeserializeOwned + Debug,
R: FromRawConf<T>,
{
let root_path = ConfigPath::default();
let raw =
load_settings_object::<T, &str>(name, &[]).into_config_result(|| root_path.clone())?;
raw.parse_config(&root_path)
}
/// Load a settings object from the config locations.
/// Further documentation can be found in the `settings` module.
fn load_settings_object<T, S>(agent_prefix: &str, ignore_prefixes: &[S]) -> Result<T>
where
T: DeserializeOwned,
S: AsRef<str>,
{
// Derive additional prefix from agent name
let prefix = format!("HYP_{}", agent_prefix).to_ascii_uppercase();
let filtered_env: HashMap<String, String> = env::vars()
.filter(|(k, _v)| {
!ignore_prefixes
.iter()
.any(|prefix| k.starts_with(prefix.as_ref()))
})
.collect();
let mut base_config_sources = vec![];
let mut builder = Config::builder();
@ -51,7 +30,8 @@ where
// Always load the default config files (`rust/config/*.json`)
for entry in PathBuf::from("./config")
.read_dir()
.expect("Failed to open config directory")
.context("Failed to open config directory")
.into_config_result(|| root_path.clone())?
.map(Result::unwrap)
{
if !entry.file_type().unwrap().is_file() {
@ -62,7 +42,7 @@ where
let ext = fname.to_str().unwrap().split('.').last().unwrap_or("");
if ext == "json" {
base_config_sources.push(format!("{:?}", entry.path()));
builder = builder.add_source(File::from(entry.path()));
builder = builder.add_source(CaseAdapter::new(File::from(entry.path()), Case::Flat));
}
}
@ -75,31 +55,41 @@ where
let p = PathBuf::from(path);
if p.is_file() {
if p.extension() == Some("json".as_ref()) {
builder = builder.add_source(File::from(p));
let config_file = File::from(p);
let re_cased_config_file = CaseAdapter::new(config_file, Case::Flat);
builder = builder.add_source(re_cased_config_file);
} else {
bail!("Provided config path via CONFIG_FILES is of an unsupported type ({p:?})")
return Err(eyre!(
"Provided config path via CONFIG_FILES is of an unsupported type ({p:?})"
))
.into_config_result(|| root_path.clone());
}
} else if !p.exists() {
bail!("Provided config path via CONFIG_FILES does not exist ({p:?})")
return Err(eyre!(
"Provided config path via CONFIG_FILES does not exist ({p:?})"
))
.into_config_result(|| root_path.clone());
} else {
bail!("Provided config path via CONFIG_FILES is not a file ({p:?})")
return Err(eyre!(
"Provided config path via CONFIG_FILES is not a file ({p:?})"
))
.into_config_result(|| root_path.clone());
}
}
let config_deserializer = builder
// Use a base configuration env variable prefix
.add_source(
DeprecatedEnvironment::with_prefix("HYP_BASE")
.separator("_")
.source(Some(filtered_env.clone())),
)
.add_source(
DeprecatedEnvironment::with_prefix(&prefix)
.separator("_")
.source(Some(filtered_env)),
)
.add_source(DeprecatedCommandLineArguments::default().separator("."))
.build()?;
.add_source(CaseAdapter::new(
Environment::default().prefix("HYP_").separator("_"),
Case::Flat,
))
.add_source(CaseAdapter::new(
CommandLineArguments::default().separator("."),
Case::Flat,
))
.build()
.context("Failed to load config sources")
.into_config_result(|| root_path.clone())?;
let formatted_config = {
let f = format!("{config_deserializer:#?}");
@ -114,34 +104,26 @@ where
}
};
Config::try_deserialize::<T>(config_deserializer).or_else(|err| {
let mut err = if let Some(source_err) = err.source() {
let source = format!("Config error source: {source_err}");
Err(err).context(source)
} else {
Err(err.into())
};
for cfg_path in base_config_sources.iter().chain(config_file_paths.iter()) {
err = err.with_context(|| format!("Config loaded: {cfg_path}"));
}
let raw_config = Config::try_deserialize::<T>(config_deserializer)
.or_else(|err| {
let mut err = if let Some(source_err) = err.source() {
let source = format!("Config error source: {source_err}");
Err(err).context(source)
} else {
Err(err.into())
};
println!("Error during deserialization, showing the config for debugging: {formatted_config}");
err.context("Config deserialization error, please check the config reference (https://docs.hyperlane.xyz/docs/operators/agent-configuration/configuration-reference)")
})
}
for cfg_path in base_config_sources.iter().chain(config_file_paths.iter()) {
err = err.with_context(|| format!("Config loaded: {cfg_path}"));
}
eprintln!("Loaded config for debugging: {formatted_config}");
err.context("Config deserialization error, please check the config reference (https://docs.hyperlane.xyz/docs/operators/agent-configuration/configuration-reference)")
})
.into_config_result(|| root_path.clone())?;
/// Load a settings object from the config locations and re-join the components with the standard
/// `config` crate separator `.`.
fn split_and_recase_key(sep: &str, case: Option<Case>, key: String) -> String {
if let Some(case) = case {
// if case is given, replace case of each key component and separate them with `.`
key.split(sep).map(|s| s.to_case(case)).join(".")
} else if !sep.is_empty() && sep != "." {
// Just standardize the separator to `.`
key.replace(sep, ".")
} else {
// no changes needed if there was no separator defined and we are preserving case.
key
let res = raw_config.parse_config(&root_path);
if res.is_err() {
eprintln!("Loaded config for debugging: {formatted_config}");
}
res
}

@ -25,14 +25,7 @@
//! #### N.B.: Environment variable names correspond 1:1 with cfg file's JSON object hierarchy.
//!
//! In particular, note that any environment variables whose names are prefixed
//! with:
//!
//! * `HYP_BASE`
//!
//! * `HYP_[agentname]`, where `[agentmame]` is agent-specific, e.g.
//! `HYP_VALIDATOR` or `HYP_RELAYER`.
//!
//! will be read as an override to be applied against the hierarchical structure
//! with `HYP_` will be read as an override to be applied against the hierarchical structure
//! of the configuration provided by the json config file at
//! `./config/<env>/<config>.json`.
//!
@ -40,11 +33,10 @@
//!
//! ```json
//! {
//! "environment": "test",
//! "signers": {},
//! "chains": {
//! "test2": {
//! "domain": "13372",
//! "domainId": "13372",
//! ...
//! },
//! ...
@ -53,11 +45,9 @@
//! ```
//!
//! and an environment variable is supplied which defines
//! `HYP_BASE_CHAINS_TEST2_DOMAIN=1`, then the `decl_settings` macro in
//! `rust/hyperlane-base/src/macros.rs` will directly override the 'domain'
//! field found in the json config to be `1`, since the fields in the
//! environment variable name describe the path traversal to arrive at this
//! field in the JSON config object.
//! `HYP_CHAINS_TEST2_DOMAINID=1`, then the config parser will directly override the value of
//! the field found in config to be `1`, since the fields in the environment variable name describe
//! the path traversal to arrive at this field in the JSON config object.
//!
//! ### Configuration value precedence
//!
@ -67,12 +57,9 @@
//! 1. The files matching `config/<env>/<config>.json`.
//! 2. The order of configs in `CONFIG_FILES` with each sequential one
//! overwriting previous ones as appropriate.
//! 3. Configuration env vars with the prefix `HYP_BASE` intended
//! 3. Configuration env vars with the prefix `HYP` intended
//! to be shared by multiple agents in the same environment
//! E.g. `export HYP_BASE_INBOXES_KOVAN_DOMAIN=3000`
//! 4. Configuration env vars with the prefix `HYP_<agent_prefix>`
//! intended to be used by a specific agent.
//! E.g. `export HYP_RELAYER_ORIGINCHAIN="ethereum"`
//! E.g. `export HYP_CHAINS_ARBITRUM_DOMAINID=3000`
//! 5. Arguments passed to the agent on the command line.
//! E.g. `--originChainName ethereum`
@ -103,7 +90,6 @@ mod signers;
mod trace;
mod checkpoint_syncer;
pub mod deprecated_parser;
pub mod parser;
/// Declare that an agent can be constructed from settings.
@ -117,9 +103,7 @@ macro_rules! impl_loadable_from_settings {
($agent:ident, $settingsparser:ident -> $settingsobj:ident) => {
impl hyperlane_base::LoadableFromSettings for $settingsobj {
fn load() -> hyperlane_core::config::ConfigResult<Self> {
hyperlane_base::settings::loader::load_settings::<$settingsparser, Self>(
stringify!($agent),
)
hyperlane_base::settings::loader::load_settings::<$settingsparser, Self>()
}
}
};

@ -4,6 +4,7 @@ use convert_case::{Case, Casing};
use derive_new::new;
use eyre::{eyre, Context};
use hyperlane_core::{config::*, utils::hex_or_base58_to_h256, H256, U256};
use itertools::Itertools;
use serde::de::{DeserializeOwned, StdError};
use serde_json::Value;
@ -26,7 +27,7 @@ impl<'v> ValueParser<'v> {
/// Get a value at the given key and verify that it is present.
pub fn get_key(&self, key: &str) -> ConfigResult<ValueParser<'v>> {
self.get_opt_key(key)?
self.get_opt_key(&key.to_case(Case::Flat))?
.ok_or_else(|| eyre!("Expected key `{key}` to be defined"))
.into_config_result(|| &self.cwp + key.to_case(Case::Snake))
}
@ -35,7 +36,7 @@ impl<'v> ValueParser<'v> {
pub fn get_opt_key(&self, key: &str) -> ConfigResult<Option<ValueParser<'v>>> {
let cwp = &self.cwp + key.to_case(Case::Snake);
match self.val {
Value::Object(obj) => Ok(obj.get(key).map(|val| Self {
Value::Object(obj) => Ok(obj.get(&key.to_case(Case::Flat)).map(|val| Self {
val,
cwp: cwp.clone(),
})),
@ -45,6 +46,7 @@ impl<'v> ValueParser<'v> {
}
/// Create an iterator over all (key, value) tuples.
/// Be warned that keys will be in flat case.
pub fn into_obj_iter(
self,
) -> ConfigResult<impl Iterator<Item = (String, ValueParser<'v>)> + 'v> {
@ -67,11 +69,40 @@ impl<'v> ValueParser<'v> {
/// Create an iterator over all array elements.
pub fn into_array_iter(self) -> ConfigResult<impl Iterator<Item = ValueParser<'v>>> {
let cwp = self.cwp.clone();
match self.val {
Value::Array(arr) => Ok(arr.iter().enumerate().map(move |(i, v)| Self {
val: v,
cwp: &cwp + i.to_string(),
})),
}))
.map(|itr| Box::new(itr) as Box<dyn Iterator<Item = ValueParser<'v>>>),
Value::Object(obj) => obj
.iter()
// convert all keys to a usize index of their position in the array
.map(|(k, v)| k.parse().map(|k| (k, v)))
// handle any errors during index parsing
.collect::<Result<Vec<(usize, &'v Value)>, _>>()
.context("Expected array or array-like object where all keys are indexes; some keys are not indexes")
// sort by index
.map(|arr| arr.into_iter().sorted_unstable_by_key(|(k, _)| *k))
// check that all indexes are present
.and_then(|itr| {
itr.clone()
.enumerate()
.all(|(expected, (actual, _))| expected == actual)
.then_some(itr)
.ok_or(eyre!(
"Expected array or array-like object where all keys are indexes; some indexes are missing"
))
})
// convert to an iterator of value parsers over the values
.map(|itr| {
itr.map(move |(i, v)| Self {
val: v,
cwp: &cwp + i.to_string(),
})
})
.map(|itr| Box::new(itr) as Box<dyn Iterator<Item = ValueParser<'v>>>),
_ => Err(eyre!("Expected an array type")),
}
.into_config_result(|| self.cwp)

@ -4,14 +4,12 @@
//! and validations it defines are not applied here, we should mirror them.
//! ANY CHANGES HERE NEED TO BE REFLECTED IN THE TYPESCRIPT SDK.
#![allow(dead_code)] // TODO(2214): remove before PR merge
use std::{
cmp::Reverse,
collections::{HashMap, HashSet},
default::Default,
};
use convert_case::{Case, Casing};
use eyre::{eyre, Context};
use hyperlane_core::{
cfg_unwrap_all, config::*, HyperlaneDomain, HyperlaneDomainProtocol, IndexMode,
@ -23,8 +21,8 @@ use serde_json::Value;
pub use self::json_value_parser::ValueParser;
pub use super::envs::*;
use crate::settings::{
chains::IndexSettings, parser::json_value_parser::ParseChain, trace::TracingConfig, ChainConf,
ChainConnectionConf, CoreContractAddresses, Settings, SignerConf,
chains::IndexSettings, trace::TracingConfig, ChainConf, ChainConnectionConf,
CoreContractAddresses, Settings, SignerConf,
};
mod json_value_parser;
@ -83,10 +81,16 @@ impl FromRawConf<RawAgentConf, Option<&HashSet<&str>>> for Settings {
.and_then(parse_signer)
.end();
let default_rpc_consensus_type = p
.chain(&mut err)
.get_opt_key("defaultRpcConsensusType")
.parse_string()
.unwrap_or("fallback");
let chains: HashMap<String, ChainConf> = raw_chains
.into_iter()
.filter_map(|(name, chain)| {
parse_chain(chain, &name)
parse_chain(chain, &name, default_rpc_consensus_type)
.take_config_err(&mut err)
.map(|v| (name, v))
})
@ -107,7 +111,11 @@ impl FromRawConf<RawAgentConf, Option<&HashSet<&str>>> for Settings {
}
/// The chain name and ChainMetadata
fn parse_chain(chain: ValueParser, name: &str) -> ConfigResult<ChainConf> {
fn parse_chain(
chain: ValueParser,
name: &str,
default_rpc_consensus_type: &str,
) -> ConfigResult<ChainConf> {
let mut err = ConfigParsingError::default();
let domain = parse_domain(chain.clone(), name).take_config_err(&mut err);
@ -117,42 +125,44 @@ fn parse_chain(chain: ValueParser, name: &str) -> ConfigResult<ChainConf> {
.and_then(parse_signer)
.end();
// TODO(2214): is it correct to define finality blocks as `confirmations` and not `reorgPeriod`?
// TODO(2214): should we rename `finalityBlocks` in ChainConf?
let finality_blocks = chain
let reorg_period = chain
.chain(&mut err)
.get_opt_key("blocks")
.get_key("confirmations")
.get_key("reorgPeriod")
.parse_u32()
.unwrap_or(1);
let rpcs: Vec<ValueParser> =
if let Some(custom_rpc_urls) = chain.get_opt_key("customRpcUrls").unwrap_or_default() {
// use the custom defined urls, sorted by highest prio first
custom_rpc_urls.chain(&mut err).into_obj_iter().map(|itr| {
itr.map(|(_, url)| {
(
url.chain(&mut err)
.get_opt_key("priority")
.parse_i32()
.unwrap_or(0),
url,
)
})
.sorted_unstable_by_key(|(p, _)| Reverse(*p))
.map(|(_, url)| url)
.collect()
let rpcs_base = chain
.chain(&mut err)
.get_key("rpcUrls")
.into_array_iter()
.map(|urls| {
urls.filter_map(|v| {
v.chain(&mut err)
.get_key("http")
.parse_from_str("Invalid http url")
.end()
})
} else {
// if no custom rpc urls are set, use the default rpc urls
chain
.chain(&mut err)
.get_key("rpcUrls")
.into_array_iter()
.map(Iterator::collect)
}
.collect_vec()
})
.unwrap_or_default();
let rpc_overrides = chain
.chain(&mut err)
.get_opt_key("customRpcUrls")
.parse_string()
.end()
.map(|urls| {
urls.split(',')
.filter_map(|url| {
url.parse()
.take_err(&mut err, || &chain.cwp + "customRpcUrls")
})
.collect_vec()
});
let rpcs = rpc_overrides.unwrap_or(rpcs_base);
if rpcs.is_empty() {
err.push(
&chain.cwp + "rpc_urls",
@ -207,69 +217,58 @@ fn parse_chain(chain: ValueParser, name: &str) -> ConfigResult<ChainConf> {
.get_key("validatorAnnounce")
.parse_address_hash()
.end();
let merkle_tree_hook = chain
.chain(&mut err)
.get_opt_key("merkleTreeHook")
.parse_address_hash()
.end();
cfg_unwrap_all!(&chain.cwp, err: [domain]);
let connection: Option<ChainConnectionConf> = match domain.domain_protocol() {
HyperlaneDomainProtocol::Ethereum => {
if rpcs.len() <= 1 {
let into_connection =
|url| ChainConnectionConf::Ethereum(h_eth::ConnectionConf::Http { url });
rpcs.into_iter().next().and_then(|rpc| {
rpc.chain(&mut err)
.get_key("http")
.parse_from_str("Invalid http url")
.end()
.map(into_connection)
})
rpcs.into_iter()
.next()
.map(|url| ChainConnectionConf::Ethereum(h_eth::ConnectionConf::Http { url }))
} else {
let urls = rpcs
.into_iter()
.filter_map(|rpc| {
rpc.chain(&mut err)
.get_key("http")
.parse_from_str("Invalid http url")
.end()
})
.collect_vec();
let rpc_consensus_type = chain
.chain(&mut err)
.get_opt_key("rpcConsensusType")
.parse_string()
.unwrap_or("fallback");
.unwrap_or(default_rpc_consensus_type);
match rpc_consensus_type {
"fallback" => Some(h_eth::ConnectionConf::HttpFallback { urls }),
"quorum" => Some(h_eth::ConnectionConf::HttpQuorum { urls }),
"single" => Some(h_eth::ConnectionConf::Http {
url: rpcs.into_iter().next().unwrap(),
}),
"fallback" => Some(h_eth::ConnectionConf::HttpFallback { urls: rpcs }),
"quorum" => Some(h_eth::ConnectionConf::HttpQuorum { urls: rpcs }),
ty => Err(eyre!("unknown rpc consensus type `{ty}`"))
.take_err(&mut err, || &chain.cwp + "rpc_consensus_type"),
}
.map(ChainConnectionConf::Ethereum)
}
}
HyperlaneDomainProtocol::Fuel => ParseChain::from_option(rpcs.into_iter().next(), &mut err)
.get_key("http")
.parse_from_str("Invalid http url")
.end()
HyperlaneDomainProtocol::Fuel => rpcs
.into_iter()
.next()
.map(|url| ChainConnectionConf::Fuel(h_fuel::ConnectionConf { url })),
HyperlaneDomainProtocol::Sealevel => {
ParseChain::from_option(rpcs.into_iter().next(), &mut err)
.get_key("http")
.parse_from_str("Invalod http url")
.end()
.map(|url| ChainConnectionConf::Sealevel(h_sealevel::ConnectionConf { url }))
}
HyperlaneDomainProtocol::Sealevel => rpcs
.into_iter()
.next()
.map(|url| ChainConnectionConf::Sealevel(h_sealevel::ConnectionConf { url })),
};
cfg_unwrap_all!(&chain.cwp, err: [connection, mailbox, interchain_gas_paymaster, validator_announce]);
err.into_result(ChainConf {
domain,
signer,
finality_blocks,
reorg_period,
addresses: CoreContractAddresses {
mailbox,
interchain_gas_paymaster,
validator_announce,
merkle_tree_hook,
},
connection,
metrics_conf: Default::default(),
@ -387,3 +386,24 @@ impl FromRawConf<RawAgentSignerConf> for SignerConf {
parse_signer(ValueParser::new(cwp.clone(), &raw.0))
}
}
/// Recursively re-cases a json value's keys to the given case.
pub fn recase_json_value(mut val: Value, case: Case) -> Value {
match &mut val {
Value::Array(ary) => {
for i in ary {
let val = recase_json_value(i.take(), case);
*i = val;
}
}
Value::Object(obj) => {
let keys = obj.keys().cloned().collect_vec();
for key in keys {
let val = obj.remove(&key).unwrap();
obj.insert(key.to_case(case), recase_json_value(val, case));
}
}
_ => {}
}
val
}

@ -69,6 +69,9 @@ impl TracingConfig {
.with_target("hyper", Level::Info)
.with_target("rusoto_core", Level::Info)
.with_target("reqwest", Level::Info)
.with_target("h2", Level::Info)
.with_target("tower", Level::Info)
.with_target("tendermint", Level::Info)
.with_target("tokio", Level::Debug)
.with_target("tokio_util", Level::Debug)
.with_target("ethers_providers", Level::Debug);

@ -3,19 +3,25 @@ use std::fmt::Debug;
use async_trait::async_trait;
use eyre::Result;
use hyperlane_core::{SignedAnnouncement, SignedCheckpoint, SignedCheckpointWithMessageId};
use hyperlane_core::{SignedAnnouncement, SignedCheckpointWithMessageId};
/// A generic trait to read/write Checkpoints offchain
#[async_trait]
pub trait CheckpointSyncer: Debug + Send + Sync {
/// Read the highest index of this Syncer
async fn latest_index(&self) -> Result<Option<u32>>;
/// Attempt to fetch the signed checkpoint at this index
async fn legacy_fetch_checkpoint(&self, index: u32) -> Result<Option<SignedCheckpoint>>;
/// Writes the highest index of this Syncer
async fn write_latest_index(&self, index: u32) -> Result<()>;
/// Update the latest index of this syncer if necessary
async fn update_latest_index(&self, index: u32) -> Result<()> {
let curr = self.latest_index().await?.unwrap_or(0);
if index > curr {
self.write_latest_index(index).await?;
}
Ok(())
}
/// Attempt to fetch the signed (checkpoint, messageId) tuple at this index
async fn fetch_checkpoint(&self, index: u32) -> Result<Option<SignedCheckpointWithMessageId>>;
/// Write the signed checkpoint to this syncer
async fn legacy_write_checkpoint(&self, signed_checkpoint: &SignedCheckpoint) -> Result<()>;
/// Write the signed (checkpoint, messageId) tuple to this syncer
async fn write_checkpoint(
&self,

@ -2,7 +2,7 @@ use std::path::PathBuf;
use async_trait::async_trait;
use eyre::{Context, Result};
use hyperlane_core::{SignedAnnouncement, SignedCheckpoint, SignedCheckpointWithMessageId};
use hyperlane_core::{SignedAnnouncement, SignedCheckpointWithMessageId};
use prometheus::IntGauge;
use crate::traits::CheckpointSyncer;
@ -29,10 +29,6 @@ impl LocalStorage {
Ok(Self { path, latest_index })
}
fn legacy_checkpoint_file_path(&self, index: u32) -> PathBuf {
self.path.join(format!("{}.json", index))
}
fn checkpoint_file_path(&self, index: u32) -> PathBuf {
self.path.join(format!("{}_with_id.json", index))
}
@ -41,14 +37,6 @@ impl LocalStorage {
self.path.join("index.json")
}
async fn write_index(&self, index: u32) -> Result<()> {
let path = self.latest_index_file_path();
tokio::fs::write(&path, index.to_string())
.await
.with_context(|| format!("Writing index to {path:?}"))?;
Ok(())
}
fn announcement_file_path(&self) -> PathBuf {
self.path.join("announcement.json")
}
@ -74,14 +62,12 @@ impl CheckpointSyncer for LocalStorage {
}
}
async fn legacy_fetch_checkpoint(&self, index: u32) -> Result<Option<SignedCheckpoint>> {
match tokio::fs::read(self.legacy_checkpoint_file_path(index)).await {
Ok(data) => {
let checkpoint = serde_json::from_slice(&data)?;
Ok(Some(checkpoint))
}
_ => Ok(None),
}
async fn write_latest_index(&self, index: u32) -> Result<()> {
let path = self.latest_index_file_path();
tokio::fs::write(&path, index.to_string())
.await
.with_context(|| format!("Writing index to {path:?}"))?;
Ok(())
}
async fn fetch_checkpoint(&self, index: u32) -> Result<Option<SignedCheckpointWithMessageId>> {
@ -92,25 +78,6 @@ impl CheckpointSyncer for LocalStorage {
Ok(Some(checkpoint))
}
async fn legacy_write_checkpoint(&self, signed_checkpoint: &SignedCheckpoint) -> Result<()> {
let serialized_checkpoint = serde_json::to_string_pretty(signed_checkpoint)?;
let path = self.legacy_checkpoint_file_path(signed_checkpoint.value.index);
tokio::fs::write(&path, &serialized_checkpoint)
.await
.with_context(|| format!("Writing checkpoint to {path:?}"))?;
match self.latest_index().await? {
Some(current_latest_index) => {
if current_latest_index < signed_checkpoint.value.index {
self.write_index(signed_checkpoint.value.index).await?
}
}
None => self.write_index(signed_checkpoint.value.index).await?,
}
Ok(())
}
async fn write_checkpoint(
&self,
signed_checkpoint: &SignedCheckpointWithMessageId,

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save