chore: Merge branch 'main' into cli-2.0 (#3863)

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
Co-authored-by: Kunal Arora <55632507+aroralanuk@users.noreply.github.com>
Co-authored-by: Connor McEwen <connor.mcewen@gmail.com>
Co-authored-by: Trevor Porter <tkporter4@gmail.com>
Co-authored-by: byeongsu-hong <hong@byeongsu.dev>
Co-authored-by: Avi Atkin <103125634+avious00@users.noreply.github.com>
Co-authored-by: Daniel Savu <23065004+daniel-savu@users.noreply.github.com>
Co-authored-by: J M Rossy <jm.rossy@gmail.com>
Co-authored-by: Ali Alaoui <aalaoui2001@gmail.com>
Co-authored-by: Nam Chu Hoai <nambrot@googlemail.com>
Co-authored-by: Paul Balaji <paul@hyperlane.xyz>
Co-authored-by: omahs <73983677+omahs@users.noreply.github.com>
pull/3856/head
Yorke Rhodes 6 months ago committed by GitHub
parent eb23e77296
commit 49f41d9759
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 8
      .github/CODEOWNERS
  2. 5
      .github/workflows/monorepo-docker.yml
  3. 93
      .github/workflows/test.yml
  4. 7
      Dockerfile
  5. 4
      README.md
  6. 2
      rust/.cargo/config.toml
  7. 109
      rust/Cargo.lock
  8. 6
      rust/Cargo.toml
  9. 2
      rust/Dockerfile
  10. 2
      rust/agents/relayer/.cargo/config.toml
  11. 3
      rust/agents/relayer/Cargo.toml
  12. 65
      rust/agents/relayer/src/msg/op_submitter.rs
  13. 345
      rust/agents/relayer/src/msg/processor.rs
  14. 7
      rust/agents/relayer/src/processor.rs
  15. 83
      rust/agents/relayer/src/relayer.rs
  16. 6
      rust/agents/relayer/src/settings/matching_list.rs
  17. 2
      rust/agents/relayer/src/settings/mod.rs
  18. 1
      rust/agents/scraper/Cargo.toml
  19. 1
      rust/agents/scraper/src/agent.rs
  20. 2
      rust/agents/scraper/src/settings.rs
  21. 1
      rust/agents/validator/Cargo.toml
  22. 2
      rust/agents/validator/src/settings.rs
  23. 1
      rust/agents/validator/src/validator.rs
  24. 2
      rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs
  25. 216
      rust/config/mainnet_config.json
  26. 1
      rust/hyperlane-base/Cargo.toml
  27. 12
      rust/hyperlane-base/src/agent.rs
  28. 26
      rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/backward.rs
  29. 22
      rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs
  30. 2
      rust/hyperlane-base/src/contract_sync/mod.rs
  31. 58
      rust/hyperlane-base/src/db/rocks/hyperlane_db.rs
  32. 2
      rust/hyperlane-base/src/settings/mod.rs
  33. 2
      rust/hyperlane-base/src/settings/parser/mod.rs
  34. 7
      rust/hyperlane-base/src/settings/trace/mod.rs
  35. 1
      solidity/.solhint.json
  36. 1
      solidity/.solhintignore
  37. 12
      solidity/CHANGELOG.md
  38. 2
      solidity/README.md
  39. 148
      solidity/contracts/avs/ECDSAStakeRegistry.sol
  40. 6
      solidity/contracts/avs/ECDSAStakeRegistryStorage.sol
  41. 17
      solidity/contracts/interfaces/avs/vendored/IECDSAStakeRegistryEventsAndErrors.sol
  42. 12
      solidity/contracts/test/ERC20Test.sol
  43. 2
      solidity/contracts/token/extensions/HypFiatToken.sol
  44. 2
      solidity/contracts/token/extensions/HypXERC20.sol
  45. 54
      solidity/contracts/token/extensions/HypXERC20Lockbox.sol
  46. 15
      solidity/contracts/token/interfaces/IXERC20.sol
  47. 61
      solidity/contracts/token/interfaces/IXERC20Lockbox.sol
  48. 4
      solidity/package.json
  49. 30
      solidity/script/avs/DeployAVS.s.sol
  50. 2
      solidity/script/avs/eigenlayer_addresses.json
  51. 17
      solidity/test/avs/HyperlaneServiceManager.t.sol
  52. 22
      solidity/test/token/HypERC20.t.sol
  53. 2
      typescript/ccip-server/CHANGELOG.md
  54. 2
      typescript/ccip-server/package.json
  55. 18
      typescript/cli/CHANGELOG.md
  56. 2
      typescript/cli/README.md
  57. 4
      typescript/cli/cli.ts
  58. 11
      typescript/cli/package.json
  59. 19
      typescript/cli/src/avs/config.ts
  60. 164
      typescript/cli/src/avs/stakeRegistry.ts
  61. 84
      typescript/cli/src/commands/avs.ts
  62. 2
      typescript/cli/src/commands/config.ts
  63. 32
      typescript/cli/src/commands/options.ts
  64. 51
      typescript/cli/src/commands/validator.ts
  65. 6
      typescript/cli/src/config/chain.ts
  66. 185
      typescript/cli/src/config/warp.ts
  67. 1
      typescript/cli/src/consts.ts
  68. 35
      typescript/cli/src/context/context.ts
  69. 12
      typescript/cli/src/deploy/core.ts
  70. 30
      typescript/cli/src/deploy/utils.ts
  71. 236
      typescript/cli/src/deploy/warp.ts
  72. 156
      typescript/cli/src/registry/MergedRegistry.ts
  73. 9
      typescript/cli/src/send/message.ts
  74. 9
      typescript/cli/src/send/transfer.ts
  75. 9
      typescript/cli/src/tests/deployTestErc20.ts
  76. 16
      typescript/cli/src/utils/chains.ts
  77. 3
      typescript/cli/src/utils/env.ts
  78. 9
      typescript/cli/src/utils/files.ts
  79. 166
      typescript/cli/src/validator/address.ts
  80. 2
      typescript/cli/src/version.ts
  81. 12
      typescript/helloworld/CHANGELOG.md
  82. 8
      typescript/helloworld/package.json
  83. 18
      typescript/infra/CHANGELOG.md
  84. 46
      typescript/infra/config/environments/mainnet3/agent.ts
  85. 6
      typescript/infra/config/environments/mainnet3/aw-validators/hyperlane.json
  86. 6
      typescript/infra/config/environments/mainnet3/aw-validators/rc.json
  87. 2
      typescript/infra/config/environments/mainnet3/chains.ts
  88. 2120
      typescript/infra/config/environments/mainnet3/core/verification.json
  89. 26
      typescript/infra/config/environments/mainnet3/funding.ts
  90. 10
      typescript/infra/config/environments/mainnet3/gasPrices.json
  91. 4
      typescript/infra/config/environments/mainnet3/helloworld.ts
  92. 4
      typescript/infra/config/environments/mainnet3/infrastructure.ts
  93. 124
      typescript/infra/config/environments/mainnet3/ism/verification.json
  94. 2
      typescript/infra/config/environments/mainnet3/liquidityLayer.ts
  95. 1
      typescript/infra/config/environments/mainnet3/owners.ts
  96. 14
      typescript/infra/config/environments/mainnet3/supportedChainNames.ts
  97. 42
      typescript/infra/config/environments/mainnet3/tokenPrices.json
  98. 28
      typescript/infra/config/environments/mainnet3/validators.ts
  99. 12
      typescript/infra/config/environments/testnet4/funding.ts
  100. 4
      typescript/infra/config/environments/testnet4/helloworld.ts
  101. Some files were not shown because too many files have changed in this diff Show More

@ -1,9 +1,9 @@
# File extension owners
*.sol @yorhodes @tkporter @aroralanuk @nbayindirli
*.ts @yorhodes @jmrossy @nambrot @nbayindirli
*.ts @yorhodes @jmrossy @nbayindirli
*.rs @tkporter @daniel-savu
*.md @Skunkchain @nambrot @avious00
*.md @Skunkchain @avious00
# Package owners
@ -20,10 +20,10 @@ typescript/sdk @yorhodes @jmrossy
typescript/token @yorhodes @jmrossy @tkporter @aroralanuk @nbayindirli
## Hello World
typescript/helloworld @yorhodes @nambrot
typescript/helloworld @yorhodes
## CLI
typescript/cli @jmrossy @yorhodes @aroralanuk @nbayindirli
## Infra
typescript/infra @tkporter @nambrot
typescript/infra @tkporter

@ -8,6 +8,8 @@ on:
paths:
# For now, because this image is only used to use `infra`, we just build for infra changes
- 'typescript/infra/**'
- 'Dockerfile'
- '.dockerignore'
concurrency:
group: build-push-monorepo-${{ github.ref }}
cancel-in-progress: true
@ -74,3 +76,6 @@ jobs:
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
# To always fetch the latest registry, we use the date as the cache key
build-args: |
REGISTRY_CACHE=${{ steps.taggen.outputs.TAG_DATE }}

@ -19,7 +19,8 @@ env:
LOG_FORMAT: PRETTY
CARGO_TERM_COLOR: always
RUST_BACKTRACE: full
REGISTRY_URI: ../../node_modules/@hyperlane-xyz/registry/dist
# Alongside the monorepo in the directory above the $GITHUB_WORKSPACE.
REGISTRY_URI: ${{ github.workspace }}/../hyperlane-registry
jobs:
yarn-install:
@ -81,6 +82,31 @@ jobs:
- name: build
run: yarn build
checkout-registry:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
repository: hyperlane-xyz/hyperlane-registry
ref: main
path: ./hyperlane-registry
# Put alongside the monorepo in the directory above the $GITHUB_WORKSPACE.
# actions/checkout doesn't allow you to checkout a repository outside of the workspace.
# See https://github.com/actions/checkout/issues/197.
- run: mv ./hyperlane-registry ../
# A workaround for relative paths not being supported by actions/cache.
# See https://github.com/actions/upload-artifact/issues/176#issuecomment-1367855630.
- run: echo "REGISTRY_URI_ABSOLUTE=$(realpath $REGISTRY_URI)" >> $GITHUB_ENV
- name: registry-cache
uses: actions/cache@v3
with:
path: |
${{ env.REGISTRY_URI_ABSOLUTE }}
key: hyperlane-registry-${{ github.event.pull_request.head.sha || github.sha }}
lint-prettier:
runs-on: ubuntu-latest
needs: [yarn-install]
@ -113,7 +139,7 @@ jobs:
yarn-test:
runs-on: ubuntu-latest
needs: [yarn-build]
needs: [yarn-build, checkout-registry]
steps:
- uses: actions/checkout@v3
with:
@ -132,12 +158,23 @@ jobs:
!./rust
key: ${{ github.event.pull_request.head.sha || github.sha }}
# A workaround for relative paths not being supported by actions/cache.
# See https://github.com/actions/upload-artifact/issues/176#issuecomment-1367855630.
- run: echo "REGISTRY_URI_ABSOLUTE=$(realpath $REGISTRY_URI)" >> $GITHUB_ENV
- name: registry-cache
uses: actions/cache@v3
with:
path: |
${{ env.REGISTRY_URI_ABSOLUTE }}
key: hyperlane-registry-${{ github.event.pull_request.head.sha || github.sha }}
- name: Unit Tests
run: yarn test:ci
agent-configs:
runs-on: ubuntu-latest
needs: [yarn-build]
needs: [yarn-build, checkout-registry]
strategy:
fail-fast: false
matrix:
@ -164,6 +201,17 @@ jobs:
!./rust
key: ${{ github.event.pull_request.head.sha || github.sha }}
# A workaround for relative paths not being supported by actions/cache.
# See https://github.com/actions/upload-artifact/issues/176#issuecomment-1367855630.
- run: echo "REGISTRY_URI_ABSOLUTE=$(realpath $REGISTRY_URI)" >> $GITHUB_ENV
- name: registry-cache
uses: actions/cache@v3
with:
path: |
${{ env.REGISTRY_URI_ABSOLUTE }}
key: hyperlane-registry-${{ github.event.pull_request.head.sha || github.sha }}
- name: Generate ${{ matrix.environment }} agent config
run: |
cd typescript/infra
@ -177,7 +225,7 @@ jobs:
e2e-matrix:
runs-on: larger-runner
if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.base_ref == 'main')
needs: [yarn-build]
needs: [yarn-build, checkout-registry]
strategy:
matrix:
e2e-type: [cosmwasm, non-cosmwasm]
@ -230,6 +278,17 @@ jobs:
!./rust
key: ${{ github.event.pull_request.head.sha || github.sha }}
# A workaround for relative paths not being supported by actions/cache.
# See https://github.com/actions/upload-artifact/issues/176#issuecomment-1367855630.
- run: echo "REGISTRY_URI_ABSOLUTE=$(realpath $REGISTRY_URI)" >> $GITHUB_ENV
- name: registry-cache
uses: actions/cache@v3
with:
path: |
${{ env.REGISTRY_URI_ABSOLUTE }}
key: hyperlane-registry-${{ github.event.pull_request.head.sha || github.sha }}
- name: cargo-cache
uses: actions/cache@v3
with:
@ -267,7 +326,7 @@ jobs:
cli-e2e:
runs-on: larger-runner
if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.base_ref == 'main')
needs: [yarn-build]
needs: [yarn-build, checkout-registry]
strategy:
matrix:
include:
@ -323,6 +382,17 @@ jobs:
!./rust
key: ${{ github.event.pull_request.head.sha || github.sha }}
# A workaround for relative paths not being supported by actions/cache.
# See https://github.com/actions/upload-artifact/issues/176#issuecomment-1367855630.
- run: echo "REGISTRY_URI_ABSOLUTE=$(realpath $REGISTRY_URI)" >> $GITHUB_ENV
- name: registry-cache
uses: actions/cache@v3
with:
path: |
${{ env.REGISTRY_URI_ABSOLUTE }}
key: hyperlane-registry-${{ github.event.pull_request.head.sha || github.sha }}
- name: cargo-cache
uses: actions/cache@v3
with:
@ -335,7 +405,7 @@ jobs:
env-test:
runs-on: ubuntu-latest
needs: [yarn-build]
needs: [yarn-build, checkout-registry]
strategy:
fail-fast: false
matrix:
@ -363,6 +433,17 @@ jobs:
!./rust
key: ${{ github.event.pull_request.head.sha || github.sha }}
# A workaround for relative paths not being supported by actions/cache.
# See https://github.com/actions/upload-artifact/issues/176#issuecomment-1367855630.
- run: echo "REGISTRY_URI_ABSOLUTE=$(realpath $REGISTRY_URI)" >> $GITHUB_ENV
- name: registry-cache
uses: actions/cache@v3
with:
path: |
${{ env.REGISTRY_URI_ABSOLUTE }}
key: hyperlane-registry-${{ github.event.pull_request.head.sha || github.sha }}
- name: Fork test ${{ matrix.environment }} ${{ matrix.module }} ${{ matrix.chain }} deployment
run: cd typescript/infra && ./fork.sh ${{ matrix.environment }} ${{ matrix.module }} ${{ matrix.chain }}

@ -27,3 +27,10 @@ COPY typescript ./typescript
COPY solidity ./solidity
RUN yarn build
ENV REGISTRY_URI="/hyperlane-registry"
# To allow us to avoid caching the registry clone, we use a build-time arg to force
# the below steps to be re-run if this arg is changed.
ARG REGISTRY_CACHE="default"
RUN git clone https://github.com/hyperlane-xyz/hyperlane-registry.git "$REGISTRY_URI"

@ -14,9 +14,7 @@
Note this is the branch for Hyperlane v3.
V2 is still in operation but is not being actively developed. The code for V2 can be found in the [v2](https://github.com/hyperlane-xyz/hyperlane-monorepo/tree/v2) branch.
V1 has since been deprecated in favor of V2, but if you are looking for code relating to the existing V1 deployments, refer to the [v1](https://github.com/hyperlane-xyz/hyperlane-monorepo/tree/v1) branch.
V2 is deprecated in favor of V3. The code for V2 can be found in the [v2](https://github.com/hyperlane-xyz/hyperlane-monorepo/tree/v2) branch. For V1 code, refer to the [v1](https://github.com/hyperlane-xyz/hyperlane-monorepo/tree/v1) branch.
## Overview

@ -0,0 +1,2 @@
[build]
rustflags = ["--cfg", "tokio_unstable"]

109
rust/Cargo.lock generated

@ -1347,6 +1347,43 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "console-api"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd326812b3fd01da5bb1af7d340d0d555fd3d4b641e7f1dfcf5962a902952787"
dependencies = [
"futures-core",
"prost 0.12.4",
"prost-types 0.12.4",
"tonic 0.10.2",
"tracing-core",
]
[[package]]
name = "console-subscriber"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7481d4c57092cd1c19dd541b92bdce883de840df30aa5d03fd48a3935c01842e"
dependencies = [
"console-api",
"crossbeam-channel",
"crossbeam-utils",
"futures-task",
"hdrhistogram",
"humantime",
"prost-types 0.12.4",
"serde",
"serde_json",
"thread_local",
"tokio",
"tokio-stream",
"tonic 0.10.2",
"tracing",
"tracing-core",
"tracing-subscriber",
]
[[package]]
name = "console_error_panic_hook"
version = "0.1.7"
@ -1451,7 +1488,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73c9d2043a9e617b0d602fbc0a0ecd621568edbf3a9774890a6d562389bd8e1c"
dependencies = [
"prost 0.11.9",
"prost-types",
"prost-types 0.11.9",
"tendermint-proto 0.32.2 (registry+https://github.com/rust-lang/crates.io-index)",
"tonic 0.9.2",
]
@ -3793,6 +3830,19 @@ dependencies = [
"hashbrown 0.14.3",
]
[[package]]
name = "hdrhistogram"
version = "7.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d"
dependencies = [
"base64 0.21.7",
"byteorder",
"flate2",
"nom",
"num-traits",
]
[[package]]
name = "headers"
version = "0.3.9"
@ -4116,6 +4166,7 @@ dependencies = [
"bs58 0.5.0",
"color-eyre",
"config",
"console-subscriber",
"convert_case 0.6.0",
"derive-new",
"derive_builder",
@ -4946,7 +4997,7 @@ dependencies = [
"cosmwasm-std",
"osmosis-std-derive",
"prost 0.11.9",
"prost-types",
"prost-types 0.11.9",
"schemars",
"serde",
"serde-cw-value",
@ -6534,16 +6585,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd"
dependencies = [
"bytes",
"prost-derive",
"prost-derive 0.11.9",
]
[[package]]
name = "prost"
version = "0.12.3"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a"
checksum = "d0f5d036824e4761737860779c906171497f6d55681139d8312388f8fe398922"
dependencies = [
"bytes",
"prost-derive 0.12.5",
]
[[package]]
@ -6559,6 +6611,19 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "prost-derive"
version = "0.12.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9554e3ab233f0a932403704f1a1d08c30d5ccd931adfdfa1e8b5a19b52c1d55a"
dependencies = [
"anyhow",
"itertools 0.12.0",
"proc-macro2 1.0.76",
"quote 1.0.35",
"syn 2.0.48",
]
[[package]]
name = "prost-types"
version = "0.11.9"
@ -6568,6 +6633,15 @@ dependencies = [
"prost 0.11.9",
]
[[package]]
name = "prost-types"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3235c33eb02c1f1e212abdbe34c78b264b038fb58ca612664343271e36e55ffe"
dependencies = [
"prost 0.12.4",
]
[[package]]
name = "protobuf"
version = "2.28.0"
@ -6944,6 +7018,7 @@ dependencies = [
"async-trait",
"axum",
"config",
"console-subscriber",
"convert_case 0.6.0",
"derive-new",
"derive_more",
@ -6957,6 +7032,7 @@ dependencies = [
"hyperlane-ethereum",
"hyperlane-test",
"itertools 0.12.0",
"mockall",
"num-derive 0.4.1",
"num-traits",
"once_cell",
@ -6968,6 +7044,7 @@ dependencies = [
"strum 0.25.0",
"thiserror",
"tokio",
"tokio-metrics",
"tokio-test",
"tracing",
"tracing-futures",
@ -7630,6 +7707,7 @@ version = "0.1.0"
dependencies = [
"async-trait",
"config",
"console-subscriber",
"derive_more",
"ethers",
"eyre",
@ -9700,7 +9778,7 @@ dependencies = [
"num-traits",
"once_cell",
"prost 0.11.9",
"prost-types",
"prost-types 0.11.9",
"ripemd",
"serde",
"serde_bytes",
@ -9739,7 +9817,7 @@ dependencies = [
"num-derive 0.3.3",
"num-traits",
"prost 0.11.9",
"prost-types",
"prost-types 0.11.9",
"serde",
"serde_bytes",
"subtle-encoding",
@ -9756,7 +9834,7 @@ dependencies = [
"num-derive 0.3.3",
"num-traits",
"prost 0.11.9",
"prost-types",
"prost-types 0.11.9",
"serde",
"serde_bytes",
"subtle-encoding",
@ -9956,6 +10034,7 @@ dependencies = [
"signal-hook-registry",
"socket2 0.5.5",
"tokio-macros",
"tracing",
"windows-sys 0.48.0",
]
@ -9980,6 +10059,17 @@ dependencies = [
"syn 2.0.48",
]
[[package]]
name = "tokio-metrics"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eace09241d62c98b7eeb1107d4c5c64ca3bd7da92e8c218c153ab3a78f9be112"
dependencies = [
"futures-util",
"pin-project-lite",
"tokio-stream",
]
[[package]]
name = "tokio-native-tls"
version = "0.3.1"
@ -10217,7 +10307,7 @@ dependencies = [
"hyper-timeout",
"percent-encoding",
"pin-project",
"prost 0.12.3",
"prost 0.12.4",
"rustls 0.21.10",
"rustls-native-certs 0.6.3",
"rustls-pemfile 1.0.4",
@ -10638,6 +10728,7 @@ dependencies = [
"async-trait",
"axum",
"config",
"console-subscriber",
"derive-new",
"derive_more",
"ethers",

@ -65,6 +65,7 @@ bytes = "1"
clap = "4"
color-eyre = "0.6"
config = "0.13.3"
console-subscriber = "0.2.0"
convert_case = "0.6"
cosmrs = { version = "0.14", default-features = false, features = [
"cosmwasm",
@ -170,11 +171,12 @@ tendermint-rpc = { version = "0.32.0", features = ["http-client", "tokio"] }
thiserror = "1.0"
time = "0.3"
tiny-keccak = "2.0.2"
tokio = { version = "1", features = ["parking_lot"] }
tokio = { version = "1", features = ["parking_lot", "tracing"] }
tokio-metrics = { version = "0.3.1", default-features = false }
tokio-test = "0.4"
toml_edit = "0.19.14"
tonic = "0.9.2"
tracing = { version = "0.1", features = ["release_max_level_debug"] }
tracing = { version = "0.1" }
tracing-error = "0.2"
tracing-futures = "0.2"
tracing-subscriber = { version = "0.3", default-features = false }

@ -27,7 +27,7 @@ RUN \
--mount=id=cargo,type=cache,sharing=locked,target=/usr/src/target \
--mount=id=cargo-home-registry,type=cache,sharing=locked,target=/usr/local/cargo/registry \
--mount=id=cargo-home-git,type=cache,sharing=locked,target=/usr/local/cargo/git \
cargo build --release --bin validator --bin relayer --bin scraper && \
RUSTFLAGS="--cfg tokio_unstable" cargo build --release --bin validator --bin relayer --bin scraper && \
mkdir -p /release && \
cp /usr/src/target/release/validator /release && \
cp /usr/src/target/release/relayer /release && \

@ -0,0 +1,2 @@
[build]
rustflags = ["--cfg", "tokio_unstable"]

@ -13,6 +13,7 @@ version.workspace = true
async-trait.workspace = true
axum.workspace = true
config.workspace = true
console-subscriber.workspace = true
convert_case.workspace = true
derive-new.workspace = true
derive_more.workspace = true
@ -32,6 +33,7 @@ serde_json.workspace = true
strum.workspace = true
thiserror.workspace = true
tokio = { workspace = true, features = ["rt", "macros", "parking_lot", "rt-multi-thread"] }
tokio-metrics.workspace = true
tracing-futures.workspace = true
tracing.workspace = true
@ -41,6 +43,7 @@ hyperlane-ethereum = { path = "../../chains/hyperlane-ethereum" }
[dev-dependencies]
once_cell.workspace = true
mockall.worksapce = true
tokio-test.workspace = true
hyperlane-test = { path = "../../hyperlane-test" }
hyperlane-base = { path = "../../hyperlane-base", features = ["test-utils"] }

@ -4,10 +4,10 @@ use derive_new::new;
use futures::future::join_all;
use futures_util::future::try_join_all;
use prometheus::{IntCounter, IntGaugeVec};
use tokio::spawn;
use tokio::sync::mpsc;
use tokio::task::JoinHandle;
use tokio::time::sleep;
use tokio_metrics::TaskMonitor;
use tracing::{debug, info_span, instrument, instrument::Instrumented, trace, Instrument};
use tracing::{info, warn};
@ -82,12 +82,18 @@ pub struct SerialSubmitter {
metrics: SerialSubmitterMetrics,
/// Max batch size for submitting messages
max_batch_size: u32,
/// tokio task monitor
task_monitor: TaskMonitor,
}
impl SerialSubmitter {
pub fn spawn(self) -> Instrumented<JoinHandle<()>> {
let span = info_span!("SerialSubmitter", destination=%self.domain);
spawn(async move { self.run().await }).instrument(span)
let task_monitor = self.task_monitor.clone();
tokio::spawn(TaskMonitor::instrument(&task_monitor, async move {
self.run().await
}))
.instrument(span)
}
async fn run(self) {
@ -97,6 +103,7 @@ impl SerialSubmitter {
rx: rx_prepare,
retry_rx,
max_batch_size,
task_monitor,
} = self;
let prepare_queue = OpQueue::new(
metrics.submitter_queue_length.clone(),
@ -115,32 +122,40 @@ impl SerialSubmitter {
);
let tasks = [
spawn(receive_task(
domain.clone(),
rx_prepare,
prepare_queue.clone(),
tokio::spawn(TaskMonitor::instrument(
&task_monitor,
receive_task(domain.clone(), rx_prepare, prepare_queue.clone()),
)),
spawn(prepare_task(
domain.clone(),
prepare_queue.clone(),
submit_queue.clone(),
confirm_queue.clone(),
max_batch_size,
metrics.clone(),
tokio::spawn(TaskMonitor::instrument(
&task_monitor,
prepare_task(
domain.clone(),
prepare_queue.clone(),
submit_queue.clone(),
confirm_queue.clone(),
max_batch_size,
metrics.clone(),
),
)),
spawn(submit_task(
domain.clone(),
submit_queue,
confirm_queue.clone(),
max_batch_size,
metrics.clone(),
tokio::spawn(TaskMonitor::instrument(
&task_monitor,
submit_task(
domain.clone(),
submit_queue,
confirm_queue.clone(),
max_batch_size,
metrics.clone(),
),
)),
spawn(confirm_task(
domain.clone(),
prepare_queue,
confirm_queue,
max_batch_size,
metrics,
tokio::spawn(TaskMonitor::instrument(
&task_monitor,
confirm_task(
domain.clone(),
prepare_queue,
confirm_queue,
max_batch_size,
metrics,
),
)),
];

@ -1,4 +1,5 @@
use std::{
cmp::max,
collections::HashMap,
fmt::{Debug, Formatter},
sync::Arc,
@ -8,11 +9,14 @@ use std::{
use async_trait::async_trait;
use derive_new::new;
use eyre::Result;
use hyperlane_base::{db::HyperlaneRocksDB, CoreMetrics};
use hyperlane_base::{
db::{HyperlaneRocksDB, ProcessMessage},
CoreMetrics,
};
use hyperlane_core::{HyperlaneDomain, HyperlaneMessage};
use prometheus::IntGauge;
use tokio::sync::mpsc::UnboundedSender;
use tracing::{debug, trace};
use tracing::{debug, instrument, trace};
use super::{metadata::AppContextClassifier, op_queue::QueueOperation, pending_message::*};
use crate::{processor::ProcessorExt, settings::matching_list::MatchingList};
@ -20,9 +24,7 @@ use crate::{processor::ProcessorExt, settings::matching_list::MatchingList};
/// Finds unprocessed messages from an origin and submits then through a channel
/// for to the appropriate destination.
#[allow(clippy::too_many_arguments)]
#[derive(new)]
pub struct MessageProcessor {
db: HyperlaneRocksDB,
whitelist: Arc<MatchingList>,
blacklist: Arc<MatchingList>,
metrics: MessageProcessorMetrics,
@ -32,16 +34,187 @@ pub struct MessageProcessor {
/// Needed context to send a message for each destination chain
destination_ctxs: HashMap<u32, Arc<MessageContext>>,
metric_app_contexts: Vec<(MatchingList, String)>,
#[new(default)]
message_nonce: u32,
nonce_iterator: ForwardBackwardIterator,
}
#[derive(Debug)]
struct ForwardBackwardIterator {
low_nonce_iter: DirectionalNonceIterator,
high_nonce_iter: DirectionalNonceIterator,
// here for debugging purposes
_domain: String,
}
impl ForwardBackwardIterator {
#[instrument(skip(db), ret)]
fn new(db: Arc<dyn ProcessMessage>) -> Self {
let high_nonce = db.retrieve_highest_seen_message_nonce().ok().flatten();
let domain = db.domain().name().to_owned();
let high_nonce_iter = DirectionalNonceIterator::new(
// If the high nonce is None, we start from the beginning
high_nonce.unwrap_or_default().into(),
NonceDirection::High,
db.clone(),
domain.clone(),
);
let mut low_nonce_iter =
DirectionalNonceIterator::new(high_nonce, NonceDirection::Low, db, domain.clone());
// Decrement the low nonce to avoid processing the same message twice, which causes double counts in metrics
low_nonce_iter.iterate();
debug!(
?low_nonce_iter,
?high_nonce_iter,
?domain,
"Initialized ForwardBackwardIterator"
);
Self {
low_nonce_iter,
high_nonce_iter,
_domain: domain,
}
}
async fn try_get_next_message(
&mut self,
metrics: &MessageProcessorMetrics,
) -> Result<Option<HyperlaneMessage>> {
loop {
let high_nonce_message_status = self.high_nonce_iter.try_get_next_nonce(metrics)?;
let low_nonce_message_status = self.low_nonce_iter.try_get_next_nonce(metrics)?;
// Always prioritize the high nonce message
match (high_nonce_message_status, low_nonce_message_status) {
// Keep iterating if only processed messages are found
(MessageStatus::Processed, _) => {
self.high_nonce_iter.iterate();
}
(_, MessageStatus::Processed) => {
self.low_nonce_iter.iterate();
}
// Otherwise return - either a processable message or nothing to process
(MessageStatus::Processable(high_nonce_message), _) => {
self.high_nonce_iter.iterate();
return Ok(Some(high_nonce_message));
}
(_, MessageStatus::Processable(low_nonce_message)) => {
self.low_nonce_iter.iterate();
return Ok(Some(low_nonce_message));
}
(MessageStatus::Unindexed, MessageStatus::Unindexed) => return Ok(None),
}
// This loop may iterate through millions of processed messages, blocking the runtime.
// So, to avoid starving other futures in this task, yield to the runtime
// on each iteration
tokio::task::yield_now().await;
}
}
}
#[derive(Debug, Clone, Copy, Default, PartialEq)]
enum NonceDirection {
#[default]
High,
Low,
}
#[derive(new)]
struct DirectionalNonceIterator {
nonce: Option<u32>,
direction: NonceDirection,
db: Arc<dyn ProcessMessage>,
domain_name: String,
}
impl Debug for DirectionalNonceIterator {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"DirectionalNonceIterator {{ nonce: {:?}, direction: {:?}, domain: {:?} }}",
self.nonce, self.direction, self.domain_name
)
}
}
impl DirectionalNonceIterator {
#[instrument]
fn iterate(&mut self) {
match self.direction {
NonceDirection::High => self.nonce = self.nonce.map(|n| n.saturating_add(1)),
NonceDirection::Low => {
if let Some(nonce) = self.nonce {
// once the message with nonce zero is processed, we should stop going backwards
self.nonce = nonce.checked_sub(1);
}
}
}
}
fn try_get_next_nonce(
&mut self,
metrics: &MessageProcessorMetrics,
) -> Result<MessageStatus<HyperlaneMessage>> {
if let Some(message) = self.indexed_message_with_nonce()? {
Self::update_max_nonce_gauge(&message, metrics);
if !self.is_message_processed()? {
return Ok(MessageStatus::Processable(message));
} else {
return Ok(MessageStatus::Processed);
}
}
Ok(MessageStatus::Unindexed)
}
fn update_max_nonce_gauge(message: &HyperlaneMessage, metrics: &MessageProcessorMetrics) {
let current_max = metrics.max_last_known_message_nonce_gauge.get();
metrics
.max_last_known_message_nonce_gauge
.set(max(current_max, message.nonce as i64));
if let Some(metrics) = metrics.get(message.destination) {
metrics.set(message.nonce as i64);
}
}
fn indexed_message_with_nonce(&self) -> Result<Option<HyperlaneMessage>> {
match self.nonce {
Some(nonce) => {
let msg = self.db.retrieve_message_by_nonce(nonce)?;
Ok(msg)
}
None => Ok(None),
}
}
fn is_message_processed(&self) -> Result<bool> {
let Some(nonce) = self.nonce else {
return Ok(false);
};
let processed = self.db.retrieve_processed_by_nonce(nonce)?.unwrap_or(false);
if processed {
trace!(
nonce,
domain = self.db.domain().name(),
"Message already marked as processed in DB"
);
}
Ok(processed)
}
}
#[derive(Debug)]
enum MessageStatus<T> {
/// The message wasn't indexed yet so can't be processed.
Unindexed,
// The message was indexed and is ready to be processed.
Processable(T),
// The message was indexed and already processed.
Processed,
}
impl Debug for MessageProcessor {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"MessageProcessor {{ whitelist: {:?}, blacklist: {:?}, message_nonce: {:?} }}",
self.whitelist, self.blacklist, self.message_nonce
"MessageProcessor {{ whitelist: {:?}, blacklist: {:?}, nonce_iterator: {:?}}}",
self.whitelist, self.blacklist, self.nonce_iterator
)
}
}
@ -50,7 +223,7 @@ impl Debug for MessageProcessor {
impl ProcessorExt for MessageProcessor {
/// The domain this processor is getting messages from.
fn domain(&self) -> &HyperlaneDomain {
self.db.domain()
self.nonce_iterator.high_nonce_iter.db.domain()
}
/// One round of processing, extracted from infinite work loop for
@ -61,35 +234,31 @@ impl ProcessorExt for MessageProcessor {
// self.tx_msg and then continue the scan at the next highest
// nonce.
// Scan until we find next nonce without delivery confirmation.
if let Some(msg) = self.try_get_unprocessed_message()? {
if let Some(msg) = self.try_get_unprocessed_message().await? {
debug!(?msg, "Processor working on message");
let destination = msg.destination;
// Skip if not whitelisted.
if !self.whitelist.msg_matches(&msg, true) {
debug!(?msg, whitelist=?self.whitelist, "Message not whitelisted, skipping");
self.message_nonce += 1;
return Ok(());
}
// Skip if the message is blacklisted
if self.blacklist.msg_matches(&msg, false) {
debug!(?msg, blacklist=?self.blacklist, "Message blacklisted, skipping");
self.message_nonce += 1;
return Ok(());
}
// Skip if the message is intended for this origin
if destination == self.domain().id() {
debug!(?msg, "Message destined for self, skipping");
self.message_nonce += 1;
return Ok(());
}
// Skip if the message is intended for a destination we do not service
if !self.send_channels.contains_key(&destination) {
debug!(?msg, "Message destined for unknown domain, skipping");
self.message_nonce += 1;
return Ok(());
}
@ -106,7 +275,6 @@ impl ProcessorExt for MessageProcessor {
app_context,
);
self.send_channels[&destination].send(Box::new(pending_msg) as QueueOperation)?;
self.message_nonce += 1;
} else {
tokio::time::sleep(Duration::from_secs(1)).await;
}
@ -115,34 +283,36 @@ impl ProcessorExt for MessageProcessor {
}
impl MessageProcessor {
fn try_get_unprocessed_message(&mut self) -> Result<Option<HyperlaneMessage>> {
loop {
// First, see if we can find the message so we can update the gauge.
if let Some(message) = self.db.retrieve_message_by_nonce(self.message_nonce)? {
// Update the latest nonce gauges
self.metrics
.max_last_known_message_nonce_gauge
.set(message.nonce as i64);
if let Some(metrics) = self.metrics.get(message.destination) {
metrics.set(message.nonce as i64);
}
pub fn new(
db: HyperlaneRocksDB,
whitelist: Arc<MatchingList>,
blacklist: Arc<MatchingList>,
metrics: MessageProcessorMetrics,
send_channels: HashMap<u32, UnboundedSender<QueueOperation>>,
destination_ctxs: HashMap<u32, Arc<MessageContext>>,
metric_app_contexts: Vec<(MatchingList, String)>,
) -> Self {
Self {
whitelist,
blacklist,
metrics,
send_channels,
destination_ctxs,
metric_app_contexts,
nonce_iterator: ForwardBackwardIterator::new(Arc::new(db) as Arc<dyn ProcessMessage>),
}
}
// If this message has already been processed, on to the next one.
if !self
.db
.retrieve_processed_by_nonce(&self.message_nonce)?
.unwrap_or(false)
{
return Ok(Some(message));
} else {
debug!(nonce=?self.message_nonce, "Message already marked as processed in DB");
self.message_nonce += 1;
}
} else {
trace!(nonce=?self.message_nonce, "No message found in DB for nonce");
return Ok(None);
}
async fn try_get_unprocessed_message(&mut self) -> Result<Option<HyperlaneMessage>> {
trace!(nonce_iterator=?self.nonce_iterator, "Trying to get the next processor message");
let next_message = self
.nonce_iterator
.try_get_next_message(&self.metrics)
.await?;
if next_message.is_none() {
trace!(nonce_iterator=?self.nonce_iterator, "No message found in DB for nonce");
}
Ok(next_message)
}
}
@ -197,7 +367,7 @@ mod test {
use super::*;
use hyperlane_base::{
db::{test_utils, HyperlaneRocksDB},
db::{test_utils, DbResult, HyperlaneRocksDB},
settings::{ChainConf, ChainConnectionConf, Settings},
};
use hyperlane_test::mocks::{MockMailboxContract, MockValidatorAnnounceContract};
@ -209,6 +379,7 @@ mod test {
},
time::sleep,
};
use tokio_metrics::TaskMonitor;
fn dummy_processor_metrics(domain_id: u32) -> MessageProcessorMetrics {
MessageProcessorMetrics {
@ -367,7 +538,7 @@ mod test {
let (message_processor, mut receive_channel) =
dummy_message_processor(origin_domain, destination_domain, db);
let processor = Processor::new(Box::new(message_processor));
let processor = Processor::new(Box::new(message_processor), TaskMonitor::new());
let process_fut = processor.spawn();
let mut pending_messages = vec![];
let pending_message_accumulator = async {
@ -386,6 +557,21 @@ mod test {
pending_messages
}
mockall::mock! {
pub Db {}
impl Debug for Db {
fn fmt<'a>(&self, f: &mut std::fmt::Formatter<'a>) -> std::fmt::Result;
}
impl ProcessMessage for Db {
fn retrieve_highest_seen_message_nonce(&self) -> DbResult<Option<u32>>;
fn retrieve_message_by_nonce(&self, nonce: u32) -> DbResult<Option<HyperlaneMessage>>;
fn retrieve_processed_by_nonce(&self, nonce: u32) -> DbResult<Option<bool>>;
fn domain(&self) -> &HyperlaneDomain;
}
}
#[tokio::test]
async fn test_full_pending_message_persistence_flow() {
test_utils::run_test_db(|db| async move {
@ -440,4 +626,77 @@ mod test {
})
.await;
}
#[tokio::test]
async fn test_forward_backward_iterator() {
let mut mock_db = MockDb::new();
const MAX_ONCHAIN_NONCE: u32 = 4;
const MOCK_HIGHEST_SEEN_NONCE: u32 = 2;
// How many times the db was queried for the max onchain nonce message
let mut retrieve_calls_for_max_onchain_nonce = 0;
mock_db
.expect_domain()
.return_const(dummy_domain(0, "dummy_domain"));
mock_db
.expect_retrieve_highest_seen_message_nonce()
.returning(|| Ok(Some(MOCK_HIGHEST_SEEN_NONCE)));
mock_db
.expect_retrieve_message_by_nonce()
.returning(move |nonce| {
// return `None` the first time we get a query for the last message
// (the `MAX_ONCHAIN_NONCE`th one), to simulate an ongoing indexing that hasn't finished
if nonce == MAX_ONCHAIN_NONCE && retrieve_calls_for_max_onchain_nonce == 0 {
retrieve_calls_for_max_onchain_nonce += 1;
return Ok(None);
}
// otherwise return a message for every nonce in the closed
// interval [0, MAX_ONCHAIN_NONCE]
if nonce > MAX_ONCHAIN_NONCE {
Ok(None)
} else {
Ok(Some(dummy_hyperlane_message(
&dummy_domain(1, "dummy_domain"),
nonce,
)))
}
});
// The messages must be marked as "not processed" in the db for them to be returned
// when the iterator queries them
mock_db
.expect_retrieve_processed_by_nonce()
.returning(|_| Ok(Some(false)));
let dummy_metrics = dummy_processor_metrics(0);
let db = Arc::new(mock_db);
let mut forward_backward_iterator = ForwardBackwardIterator::new(db.clone());
let mut messages = vec![];
while let Some(msg) = forward_backward_iterator
.try_get_next_message(&dummy_metrics)
.await
.unwrap()
{
messages.push(msg.nonce);
}
// we start with 2 (MOCK_HIGHEST_SEEN_NONCE) as the highest seen nonce,
// so we go forward and get 3.
// then we try going forward again but get a `None` (not indexed yet), for nonce 4 (MAX_ONCHAIN_NONCE).
// then we go backwards once and get 1.
// then retry the forward iteration, which should return a message the second time, for nonce 4.
// finally, going forward again returns None so we go backward and get 0.
assert_eq!(messages, vec![2, 3, 1, 4, 0]);
// the final bounds of the iterator are (None, MAX_ONCHAIN_NONCE + 1), where None means
// the backward iterator has reached the beginning (iterated past nonce 0)
assert_eq!(forward_backward_iterator.low_nonce_iter.nonce, None);
assert_eq!(
forward_backward_iterator.high_nonce_iter.nonce,
Some(MAX_ONCHAIN_NONCE + 1)
);
}
}

@ -5,6 +5,7 @@ use derive_new::new;
use eyre::Result;
use hyperlane_core::HyperlaneDomain;
use tokio::task::JoinHandle;
use tokio_metrics::TaskMonitor;
use tracing::{instrument, warn};
#[async_trait]
@ -20,11 +21,15 @@ pub trait ProcessorExt: Send + Debug {
#[derive(new)]
pub struct Processor {
ticker: Box<dyn ProcessorExt>,
task_monitor: TaskMonitor,
}
impl Processor {
pub fn spawn(self) -> JoinHandle<()> {
tokio::spawn(async move { self.main_loop().await })
let task_monitor = self.task_monitor.clone();
tokio::spawn(TaskMonitor::instrument(&task_monitor, async move {
self.main_loop().await
}))
}
#[instrument(ret, skip(self), level = "info", fields(domain=%self.ticker.domain()))]

@ -25,7 +25,8 @@ use tokio::{
},
task::JoinHandle,
};
use tracing::{info, info_span, instrument::Instrumented, warn, Instrument};
use tokio_metrics::TaskMonitor;
use tracing::{error, info, info_span, instrument::Instrumented, warn, Instrument};
use crate::{
merkle_tree::builder::MerkleTreeBuilder,
@ -79,6 +80,8 @@ pub struct Relayer {
// or move them in `core_metrics`, like the validator metrics
agent_metrics: AgentMetrics,
chain_metrics: ChainMetrics,
/// Tokio console server
pub tokio_console_server: Option<console_subscriber::Server>,
}
impl Debug for Relayer {
@ -109,6 +112,7 @@ impl BaseAgent for Relayer {
core_metrics: Arc<CoreMetrics>,
agent_metrics: AgentMetrics,
chain_metrics: ChainMetrics,
tokio_console_server: console_subscriber::Server,
) -> Result<Self>
where
Self: Sized,
@ -280,13 +284,26 @@ impl BaseAgent for Relayer {
core_metrics,
agent_metrics,
chain_metrics,
tokio_console_server: Some(tokio_console_server),
})
}
#[allow(clippy::async_yields_async)]
async fn run(self) {
async fn run(mut self) {
let mut tasks = vec![];
let task_monitor = tokio_metrics::TaskMonitor::new();
if let Some(tokio_console_server) = self.tokio_console_server.take() {
let console_server =
tokio::spawn(TaskMonitor::instrument(&task_monitor.clone(), async move {
info!("Starting tokio console server");
if let Err(e) = tokio_console_server.serve().await {
error!(error=?e, "Tokio console server failed to start");
}
}));
tasks.push(console_server.instrument(info_span!("Tokio console server")));
}
// run server
let mpmc_channel = MpmcChannel::<MessageRetryRequest>::new(ENDPOINT_MESSAGES_QUEUE_SIZE);
let custom_routes = relayer_server::routes(mpmc_channel.sender());
@ -318,6 +335,7 @@ impl BaseAgent for Relayer {
.operation_batch_config()
.map(|c| c.max_batch_size)
.unwrap_or(1),
task_monitor.clone(),
),
);
@ -334,15 +352,25 @@ impl BaseAgent for Relayer {
}
for origin in &self.origin_chains {
tasks.push(self.run_message_sync(origin).await);
tasks.push(self.run_interchain_gas_payment_sync(origin).await);
tasks.push(self.run_merkle_tree_hook_syncs(origin).await);
tasks.push(self.run_message_sync(origin, task_monitor.clone()).await);
tasks.push(
self.run_interchain_gas_payment_sync(origin, task_monitor.clone())
.await,
);
tasks.push(
self.run_merkle_tree_hook_syncs(origin, task_monitor.clone())
.await,
);
}
// each message process attempts to send messages from a chain
for origin in &self.origin_chains {
tasks.push(self.run_message_processor(origin, send_channels.clone()));
tasks.push(self.run_merkle_tree_processor(origin));
tasks.push(self.run_message_processor(
origin,
send_channels.clone(),
task_monitor.clone(),
));
tasks.push(self.run_merkle_tree_processor(origin, task_monitor.clone()));
}
if let Err(err) = try_join_all(tasks).await {
@ -355,22 +383,27 @@ impl BaseAgent for Relayer {
}
impl Relayer {
async fn run_message_sync(&self, origin: &HyperlaneDomain) -> Instrumented<JoinHandle<()>> {
async fn run_message_sync(
&self,
origin: &HyperlaneDomain,
task_monitor: TaskMonitor,
) -> Instrumented<JoinHandle<()>> {
let index_settings = self.as_ref().settings.chains[origin.name()].index_settings();
let contract_sync = self.message_syncs.get(origin).unwrap().clone();
let cursor = contract_sync.cursor(index_settings).await;
tokio::spawn(async move {
tokio::spawn(TaskMonitor::instrument(&task_monitor, async move {
contract_sync
.clone()
.sync("dispatched_messages", cursor)
.await
})
}))
.instrument(info_span!("MessageSync"))
}
async fn run_interchain_gas_payment_sync(
&self,
origin: &HyperlaneDomain,
task_monitor: TaskMonitor,
) -> Instrumented<JoinHandle<()>> {
let index_settings = self.as_ref().settings.chains[origin.name()].index_settings();
let contract_sync = self
@ -379,25 +412,31 @@ impl Relayer {
.unwrap()
.clone();
let cursor = contract_sync.cursor(index_settings).await;
tokio::spawn(async move { contract_sync.clone().sync("gas_payments", cursor).await })
.instrument(info_span!("IgpSync"))
tokio::spawn(TaskMonitor::instrument(&task_monitor, async move {
contract_sync.clone().sync("gas_payments", cursor).await
}))
.instrument(info_span!("IgpSync"))
}
async fn run_merkle_tree_hook_syncs(
&self,
origin: &HyperlaneDomain,
task_monitor: TaskMonitor,
) -> Instrumented<JoinHandle<()>> {
let index_settings = self.as_ref().settings.chains[origin.name()].index.clone();
let contract_sync = self.merkle_tree_hook_syncs.get(origin).unwrap().clone();
let cursor = contract_sync.cursor(index_settings).await;
tokio::spawn(async move { contract_sync.clone().sync("merkle_tree_hook", cursor).await })
.instrument(info_span!("MerkleTreeHookSync"))
tokio::spawn(TaskMonitor::instrument(&task_monitor, async move {
contract_sync.clone().sync("merkle_tree_hook", cursor).await
}))
.instrument(info_span!("MerkleTreeHookSync"))
}
fn run_message_processor(
&self,
origin: &HyperlaneDomain,
send_channels: HashMap<u32, UnboundedSender<QueueOperation>>,
task_monitor: TaskMonitor,
) -> Instrumented<JoinHandle<()>> {
let metrics = MessageProcessorMetrics::new(
&self.core.metrics,
@ -431,12 +470,16 @@ impl Relayer {
);
let span = info_span!("MessageProcessor", origin=%message_processor.domain());
let processor = Processor::new(Box::new(message_processor));
let processor = Processor::new(Box::new(message_processor), task_monitor.clone());
processor.spawn().instrument(span)
}
fn run_merkle_tree_processor(&self, origin: &HyperlaneDomain) -> Instrumented<JoinHandle<()>> {
fn run_merkle_tree_processor(
&self,
origin: &HyperlaneDomain,
task_monitor: TaskMonitor,
) -> Instrumented<JoinHandle<()>> {
let metrics = MerkleTreeProcessorMetrics::new();
let merkle_tree_processor = MerkleTreeProcessor::new(
self.dbs.get(origin).unwrap().clone(),
@ -445,7 +488,7 @@ impl Relayer {
);
let span = info_span!("MerkleTreeProcessor", origin=%merkle_tree_processor.domain());
let processor = Processor::new(Box::new(merkle_tree_processor));
let processor = Processor::new(Box::new(merkle_tree_processor), task_monitor.clone());
processor.spawn().instrument(span)
}
@ -457,6 +500,7 @@ impl Relayer {
receiver: UnboundedReceiver<QueueOperation>,
retry_receiver_channel: MpmcReceiver<MessageRetryRequest>,
batch_size: u32,
task_monitor: TaskMonitor,
) -> Instrumented<JoinHandle<()>> {
let serial_submitter = SerialSubmitter::new(
destination.clone(),
@ -464,10 +508,11 @@ impl Relayer {
retry_receiver_channel,
SerialSubmitterMetrics::new(&self.core.metrics, destination),
batch_size,
task_monitor.clone(),
);
let span = info_span!("SerialSubmitter", destination=%destination);
let destination = destination.clone();
tokio::spawn(async move {
tokio::spawn(TaskMonitor::instrument(&task_monitor, async move {
// Propagate task panics
serial_submitter.spawn().await.unwrap_or_else(|err| {
panic!(
@ -475,7 +520,7 @@ impl Relayer {
destination, err
)
});
})
}))
.instrument(span)
}
}

@ -1,4 +1,4 @@
//! The correct settings shape is defined in the TypeScript SDK metadata. While the the exact shape
//! The correct settings shape is defined in the TypeScript SDK metadata. While the exact shape
//! and validations it defines are not applied here, we should mirror them.
//! ANY CHANGES HERE NEED TO BE REFLECTED IN THE TYPESCRIPT SDK.
@ -267,13 +267,13 @@ impl<'a> From<&'a HyperlaneMessage> for MatchInfo<'a> {
impl MatchingList {
/// Check if a message matches any of the rules.
/// - `default`: What to return if the the matching list is empty.
/// - `default`: What to return if the matching list is empty.
pub fn msg_matches(&self, msg: &HyperlaneMessage, default: bool) -> bool {
self.matches(msg.into(), default)
}
/// Check if a message matches any of the rules.
/// - `default`: What to return if the the matching list is empty.
/// - `default`: What to return if the matching list is empty.
fn matches(&self, info: MatchInfo, default: bool) -> bool {
if let Some(rules) = &self.0 {
matches_any_rule(rules.iter(), info)

@ -1,6 +1,6 @@
//! Relayer configuration
//!
//! The correct settings shape is defined in the TypeScript SDK metadata. While the the exact shape
//! The correct settings shape is defined in the TypeScript SDK metadata. While the exact shape
//! and validations it defines are not applied here, we should mirror them.
//! ANY CHANGES HERE NEED TO BE REFLECTED IN THE TYPESCRIPT SDK.

@ -12,6 +12,7 @@ version.workspace = true
[dependencies]
async-trait.workspace = true
config.workspace = true
console-subscriber.workspace = true
derive_more.workspace = true
ethers.workspace = true
eyre.workspace = true

@ -44,6 +44,7 @@ impl BaseAgent for Scraper {
metrics: Arc<CoreMetrics>,
agent_metrics: AgentMetrics,
chain_metrics: ChainMetrics,
_tokio_console_server: console_subscriber::Server,
) -> eyre::Result<Self>
where
Self: Sized,

@ -1,6 +1,6 @@
//! Scraper configuration.
//!
//! The correct settings shape is defined in the TypeScript SDK metadata. While the the exact shape
//! The correct settings shape is defined in the TypeScript SDK metadata. While the exact shape
//! and validations it defines are not applied here, we should mirror them.
//! ANY CHANGES HERE NEED TO BE REFLECTED IN THE TYPESCRIPT SDK.

@ -13,6 +13,7 @@ version.workspace = true
async-trait.workspace = true
axum.workspace = true
config.workspace = true
console-subscriber.workspace = true
derive_more.workspace = true
derive-new.workspace = true
ethers.workspace = true

@ -1,6 +1,6 @@
//! Validator configuration.
//!
//! The correct settings shape is defined in the TypeScript SDK metadata. While the the exact shape
//! The correct settings shape is defined in the TypeScript SDK metadata. While the exact shape
//! and validations it defines are not applied here, we should mirror them.
//! ANY CHANGES HERE NEED TO BE REFLECTED IN THE TYPESCRIPT SDK.

@ -63,6 +63,7 @@ impl BaseAgent for Validator {
metrics: Arc<CoreMetrics>,
agent_metrics: AgentMetrics,
chain_metrics: ChainMetrics,
_tokio_console_server: console_subscriber::Server,
) -> Result<Self>
where
Self: Sized,

@ -164,7 +164,7 @@ impl<M> SequenceAwareIndexer<HyperlaneMessage> for EthereumMailboxIndexer<M>
where
M: Middleware + 'static,
{
#[instrument(err, skip(self))]
#[instrument(err, skip(self), ret)]
async fn latest_sequence_count_and_tip(&self) -> ChainResult<(Option<u32>, u32)> {
let tip = Indexer::<HyperlaneMessage>::get_finalized_block_number(self).await?;
let sequence = self.contract.nonce().block(u64::from(tip)).call().await?;

@ -1,6 +1,7 @@
{
"chains": {
"ancient8": {
"aggregationHook": "0x1EF4ED658d542524d1D547ba2F94d3B038a55b8f",
"batchContractAddress": "0x4C97D35c668EE5194a13c8DE8Afc18cce40C9F28",
"blockExplorers": [
{
@ -18,6 +19,7 @@
"chainId": 888888888,
"displayName": "Ancient8",
"domainId": 888888888,
"domainRoutingIsm": "0xB6F0f1267B01C27326F61a4B4fe2c73751802685",
"domainRoutingIsmFactory": "0x1052eF3419f26Bec74Ed7CEf4a4FA6812Bc09908",
"fallbackRoutingHook": "0x5E01d8F34b629E3f92d69546bbc4142A7Adee7e9",
"gasCurrencyCoinGeckoId": "ethereum",
@ -25,7 +27,7 @@
"from": 2507127
},
"interchainGasPaymaster": "0x8F1E22d309baa69D398a03cc88E9b46037e988AA",
"interchainSecurityModule": "0x6E3387e12C6e181BF8e712eCa9c60ccEEaBD1c67",
"interchainSecurityModule": "0xBd3C7253F08c040eDB9c54e7CD4f8a5fd1eb935D",
"isTestnet": false,
"mailbox": "0x2f2aFaE1139Ce54feFC03593FeE8AB2aDF4a85A7",
"merkleTreeHook": "0x811808Dd29ba8B0FC6C0ec0b5537035E59745162",
@ -36,6 +38,7 @@
"symbol": "ETH"
},
"pausableHook": "0x66DC49405Ae2956f7E87FEAa9fE8f506C8987462",
"pausableIsm": "0xcf678903c003651DB0bb933820259A16ea9d95e4",
"protocol": "ethereum",
"protocolFee": "0xE0C452DDA7506f0F4dE5C8C1d383F7aD866eA4F0",
"proxyAdmin": "0x0761b0827849abbf7b0cC09CE14e1C93D87f5004",
@ -45,6 +48,7 @@
}
],
"staticAggregationHookFactory": "0xEb9FcFDC9EfDC17c1EC5E1dc085B98485da213D6",
"staticAggregationIsm": "0xBd3C7253F08c040eDB9c54e7CD4f8a5fd1eb935D",
"staticAggregationIsmFactory": "0x8F7454AC98228f3504Bb91eA3D8Adafe6406110A",
"staticMerkleRootMultisigIsmFactory": "0x2C1FAbEcd7bFBdEBF27CcdB67baADB38b6Df90fC",
"staticMessageIdMultisigIsmFactory": "0x8b83fefd896fAa52057798f6426E9f0B080FCCcE",
@ -71,6 +75,7 @@
"chainId": 42161,
"displayName": "Arbitrum",
"domainId": 42161,
"domainRoutingIsm": "0x5d759B5CeEb1C3b0181bEc0F80fb04f820cc35D1",
"domainRoutingIsmFactory": "0xa2931C37957f3079d3B21b877d56E1db930e02a5",
"fallbackRoutingHook": "0x9e8fFb1c26099e75Dd5D794030e2E9AA51471c25",
"gasCurrencyCoinGeckoId": "ethereum",
@ -81,7 +86,7 @@
"interchainAccountIsm": "0xfa8bfcE55B3A0631dF38257615cEF7FCD3523A48",
"interchainAccountRouter": "0xCD0CFFf6eFD943b4b81f2c15847730dbcD30e3aE",
"interchainGasPaymaster": "0x3b6044acd6767f017e99318AA6Ef93b7B06A5a22",
"interchainSecurityModule": "0xD0DBBF922076352cC50B285A0023536561F00EEa",
"interchainSecurityModule": "0x96845a0469363f90779f6D5cd49D79bDDAc69429",
"mailbox": "0x979Ca5202784112f4738403dBec5D0F3B9daabB9",
"merkleTreeHook": "0x748040afB89B8FdBb992799808215419d36A0930",
"name": "arbitrum",
@ -91,15 +96,23 @@
"symbol": "ETH"
},
"pausableHook": "0xEf30f29Dcd3FCB1DCcDA9C7Cbf2A5957E8Ee9Cc3",
"pausableIsm": "0x1E38556b4fE553e6249448960875883990efcf34",
"protocol": "ethereum",
"protocolFee": "0xD0199067DACb8526e7dc524a9a7DCBb57Cd25421",
"proxyAdmin": "0x80Cebd56A65e46c474a1A101e89E76C4c51D179c",
"rpcUrls": [
{
"http": "https://arbitrum.llamarpc.com"
},
{
"http": "https://rpc.ankr.com/arbitrum"
},
{
"http": "https://arb1.arbitrum.io/rpc"
}
],
"staticAggregationHookFactory": "0x9B5f440bBb64Fee337F37e03362b628711Ea09C7",
"staticAggregationIsm": "0x96845a0469363f90779f6D5cd49D79bDDAc69429",
"staticAggregationIsmFactory": "0xD4883084389fC1Eeb4dAfB2ADcFc36B711c310EB",
"staticMerkleRootMultisigIsmFactory": "0x3C330D4A2e2b8443AFaB8E326E64ab4251B7Eae0",
"staticMessageIdMultisigIsmFactory": "0x12Df53079d399a47e9E730df095b712B0FDFA791",
@ -128,6 +141,7 @@
"chainId": 43114,
"displayName": "Avalanche",
"domainId": 43114,
"domainRoutingIsm": "0x9f68F961ba2dF53b1cB3EbCC0b08e89790C6E2f6",
"domainRoutingIsmFactory": "0x28F7907911C7E321c596686AE6D1F20516450037",
"fallbackRoutingHook": "0x61D15D571D5f7A9eF0D1938f072f430bBF024747",
"gasCurrencyCoinGeckoId": "avalanche-2",
@ -138,7 +152,7 @@
"interchainAccountIsm": "0x786c26C1857032617c215f265509d6E44e44Bfe3",
"interchainAccountRouter": "0xA967A6CE0e73fAf672843DECaA372511996E8852",
"interchainGasPaymaster": "0x95519ba800BBd0d34eeAE026fEc620AD978176C0",
"interchainSecurityModule": "0xA36B02a83564f52d9244310Ea439ee6F6AfeFb60",
"interchainSecurityModule": "0xe7a61510EA7197281b49e5bdf1798608d5132595",
"mailbox": "0xFf06aFcaABaDDd1fb08371f9ccA15D73D51FeBD6",
"merkleTreeHook": "0x84eea61D679F42D92145fA052C89900CBAccE95A",
"name": "avalanche",
@ -148,10 +162,14 @@
"symbol": "AVAX"
},
"pausableHook": "0x239eB860770F1C48ABAC9bE9825d20e3E7c018df",
"pausableIsm": "0xd76080269C641e1adb786b72ae60Ddac3b6b8ed0",
"protocol": "ethereum",
"protocolFee": "0xEc4AdA26E51f2685279F37C8aE62BeAd8212D597",
"proxyAdmin": "0xd7CF8c05fd81b8cA7CfF8E6C49B08a9D63265c9B",
"rpcUrls": [
{
"http": "https://rpc.ankr.com/avalanche"
},
{
"http": "https://api.avax.network/ext/bc/C/rpc",
"pagination": {
@ -161,6 +179,7 @@
}
],
"staticAggregationHookFactory": "0x3bF6Ac986C7Af9A9Ac356C0e99C0041EFd8D96e7",
"staticAggregationIsm": "0xe7a61510EA7197281b49e5bdf1798608d5132595",
"staticAggregationIsmFactory": "0xa5E13796eB7d2EDCc88012c8cfF90D69B51FcF9f",
"staticMerkleRootMultisigIsmFactory": "0x896cF1D1B66cD211633eDd589fF158E8Cfaf9B54",
"staticMessageIdMultisigIsmFactory": "0x8819D653DF5b1FC0DdB32189a2704E471AF8483c",
@ -188,6 +207,7 @@
"chainId": 8453,
"displayName": "Base",
"domainId": 8453,
"domainRoutingIsm": "0x80C8F6394c0FcF7bAB16ac08b85484361eCe5888",
"domainRoutingIsmFactory": "0x7E27456a839BFF31CA642c060a2b68414Cb6e503",
"fallbackRoutingHook": "0x4Eb82Ee35b0a1c1d776E3a3B547f9A9bA6FCC9f2",
"gasCurrencyCoinGeckoId": "ethereum",
@ -198,7 +218,7 @@
"interchainAccountIsm": "0x861908E6c8F992537F557da5Fb5876836036b347",
"interchainAccountRouter": "0xa85F9e4fdA2FFF1c07f2726a630443af3faDF830",
"interchainGasPaymaster": "0xc3F23848Ed2e04C0c6d41bd7804fa8f89F940B94",
"interchainSecurityModule": "0x5D1e7D7c5B9e6dDC8439F67F10c578f2A1084f6F",
"interchainSecurityModule": "0x77bE0b5aE400675063Ce2B2B0d692D9341f4b193",
"mailbox": "0xeA87ae93Fa0019a82A727bfd3eBd1cFCa8f64f1D",
"merkleTreeHook": "0x19dc38aeae620380430C200a6E990D5Af5480117",
"name": "base",
@ -208,6 +228,7 @@
"symbol": "ETH"
},
"pausableHook": "0x46fa3A5780e5B90Eaf34BDED554d5353B5ABE9E7",
"pausableIsm": "0x2AF32cF8e3Cf42d221eDa0c843818fA5ee129E27",
"protocol": "ethereum",
"protocolFee": "0x99ca8c74cE7Cfa9d72A51fbb05F9821f5f826b3a",
"proxyAdmin": "0x4Ed7d626f1E96cD1C0401607Bf70D95243E3dEd1",
@ -223,14 +244,17 @@
}
],
"staticAggregationHookFactory": "0x1052eF3419f26Bec74Ed7CEf4a4FA6812Bc09908",
"staticAggregationIsm": "0x77bE0b5aE400675063Ce2B2B0d692D9341f4b193",
"staticAggregationIsmFactory": "0xEb9FcFDC9EfDC17c1EC5E1dc085B98485da213D6",
"staticMerkleRootMultisigIsmFactory": "0x8b83fefd896fAa52057798f6426E9f0B080FCCcE",
"staticMessageIdMultisigIsmFactory": "0x8F7454AC98228f3504Bb91eA3D8Adafe6406110A",
"storageGasOracle": "0xBF12ef4B9f307463D3FB59c3604F294dDCe287E2",
"testRecipient": "0xb7C9307fE90B9AB093c6D3EdeE3259f5378D5f03",
"timelockController": "0x0000000000000000000000000000000000000000",
"validatorAnnounce": "0x182E8d7c5F1B06201b102123FC7dF0EaeB445a7B"
},
"blast": {
"aggregationHook": "0x012278333Ce0A845AE9bD7302867a59Bd5D3635d",
"blockExplorers": [
{
"apiUrl": "https://api.routescan.io/v2/network/mainnet/evm/81457/etherscan/api",
@ -247,6 +271,7 @@
"chainId": 81457,
"displayName": "Blast",
"domainId": 81457,
"domainRoutingIsm": "0x0296D16d371a49F631143612020138896b3eA421",
"domainRoutingIsmFactory": "0x2f2aFaE1139Ce54feFC03593FeE8AB2aDF4a85A7",
"fallbackRoutingHook": "0x6Fae4D9935E2fcb11fC79a64e917fb2BF14DaFaa",
"gasCurrencyCoinGeckoId": "ethereum",
@ -254,7 +279,7 @@
"from": 2496427
},
"interchainGasPaymaster": "0xB3fCcD379ad66CED0c91028520C64226611A48c9",
"interchainSecurityModule": "0x0986f6D82A47045788b0ce8EF68f6C0D77726854",
"interchainSecurityModule": "0x208263bB303B2a737642fB13C765F106a2591be8",
"mailbox": "0x3a867fCfFeC2B790970eeBDC9023E75B0a172aa7",
"merkleTreeHook": "0xC9B8ea6230d6687a4b13fD3C0b8f0Ec607B26465",
"name": "blast",
@ -264,6 +289,7 @@
"symbol": "ETH"
},
"pausableHook": "0xE0C452DDA7506f0F4dE5C8C1d383F7aD866eA4F0",
"pausableIsm": "0x4C97D35c668EE5194a13c8DE8Afc18cce40C9F28",
"protocol": "ethereum",
"protocolFee": "0x12582c7B0f43c6A667CBaA7fA8b112F7fb1E69F0",
"proxyAdmin": "0xeA87ae93Fa0019a82A727bfd3eBd1cFCa8f64f1D",
@ -273,6 +299,7 @@
}
],
"staticAggregationHookFactory": "0x4Ed7d626f1E96cD1C0401607Bf70D95243E3dEd1",
"staticAggregationIsm": "0x208263bB303B2a737642fB13C765F106a2591be8",
"staticAggregationIsmFactory": "0x0761b0827849abbf7b0cC09CE14e1C93D87f5004",
"staticMerkleRootMultisigIsmFactory": "0xEb9FcFDC9EfDC17c1EC5E1dc085B98485da213D6",
"staticMessageIdMultisigIsmFactory": "0x1052eF3419f26Bec74Ed7CEf4a4FA6812Bc09908",
@ -300,6 +327,7 @@
"displayName": "Binance Smart Chain",
"displayNameShort": "Binance",
"domainId": 56,
"domainRoutingIsm": "0xBc3Af0D4930502Ff0f6a8416a7a184c7BFFe19E7",
"domainRoutingIsmFactory": "0xe6Af5720d34213C805C08e2470aea979e3F72F75",
"fallbackRoutingHook": "0x237E81f87F57Badad9e09f13CC676D986cA852e7",
"gasCurrencyCoinGeckoId": "binancecoin",
@ -310,7 +338,7 @@
"interchainAccountIsm": "0xB274Bbbc1df5f1d1763216A93d473fde6f5de043",
"interchainAccountRouter": "0x4BBd67dC995572b40Dc6B3eB6CdE5185a5373868",
"interchainGasPaymaster": "0x78E25e7f84416e69b9339B0A6336EB6EFfF6b451",
"interchainSecurityModule": "0xab3df354baBee6c2B88E2CeD3b2e030e31aA5e61",
"interchainSecurityModule": "0xfA360ff588623A026BF19A1801F2A8F1f045fa33",
"mailbox": "0x2971b9Aec44bE4eb673DF1B88cDB57b96eefe8a4",
"merkleTreeHook": "0xFDb9Cd5f9daAA2E4474019405A328a88E7484f26",
"name": "bsc",
@ -320,6 +348,7 @@
"symbol": "BNB"
},
"pausableHook": "0x7DBdAd1b4A922B65d37d7258a4227b6658344b7f",
"pausableIsm": "0x25dB01caDf91CfD2f7e6dD829Ce81698217F9151",
"protocol": "ethereum",
"protocolFee": "0xA8Aa5f14a5463a78E45CC068F11c867949F3E367",
"proxyAdmin": "0x65993Af9D0D3a64ec77590db7ba362D6eB78eF70",
@ -335,6 +364,7 @@
}
],
"staticAggregationHookFactory": "0xe70E86a7D1e001D419D71F960Cb6CaD59b6A3dB6",
"staticAggregationIsm": "0xfA360ff588623A026BF19A1801F2A8F1f045fa33",
"staticAggregationIsmFactory": "0x38B3878c4fb44d201DA924c4a04bae3EE728c065",
"staticMerkleRootMultisigIsmFactory": "0xfADBc81Ca8A957F1Bf7c78bCc575b28DBDE042b6",
"staticMessageIdMultisigIsmFactory": "0x4B1d8352E35e3BDE36dF5ED2e73C24E35c4a96b7",
@ -374,14 +404,14 @@
"domainRoutingIsm": "0xf18E32428dad0802C5D6F723cB80A6Da889777c4",
"domainRoutingIsmFactory": "0x2A2c22B0a8615ad24839fA6Af302E896Af32d1a3",
"fallbackRoutingHook": "0xDC98a856fb9112894c2fE32267DA8bF35645FAF3",
"gnosisSafeTransactionServiceUrl": "https://mainnet-tx-svc.celo-safe-prod.celo-networks-dev.org/",
"gnosisSafeTransactionServiceUrl": "https://safe-transaction-celo.safe.global/",
"index": {
"from": 22102340
},
"interchainAccountIsm": "0x30a8DEc5318e2aAa9ad5b069fC606c4CfF6f5676",
"interchainAccountRouter": "0x4ED23E3885e1651E62564F78817D91865beba575",
"interchainGasPaymaster": "0x571f1435613381208477ac5d6974310d88AC7cB7",
"interchainSecurityModule": "0x99e8E56Dce3402D6E09A82718937fc1cA2A9491E",
"interchainSecurityModule": "0x0dcb01D4ABfa73fadB17C4B0e8cd52A38BD52c66",
"mailbox": "0x50da3B3907A08a24fe4999F4Dcf337E8dC7954bb",
"merkleTreeHook": "0x04dB778f05854f26E67e0a66b740BBbE9070D366",
"name": "celo",
@ -435,8 +465,10 @@
"chainId": 1,
"displayName": "Ethereum",
"domainId": 1,
"domainRoutingIsm": "0xBA328338044e0C0AFd0591FB6E5e2F83C4e8F742",
"domainRoutingIsmFactory": "0x28fA9552F19039b450498B0d8e5DEAe0d0aAc559",
"fallbackRoutingHook": "0x571f1435613381208477ac5d6974310d88AC7cB7",
"gasCurrencyCoinGeckoId": "ethereum",
"gnosisSafeTransactionServiceUrl": "https://safe-transaction-mainnet.safe.global/",
"index": {
"from": 18422581
@ -444,7 +476,7 @@
"interchainAccountIsm": "0x609707355a53d2aAb6366f48E2b607C599D26B29",
"interchainAccountRouter": "0x8dBae9B1616c46A20591fE0006Bf015E28ca5cC9",
"interchainGasPaymaster": "0x9e6B1022bE9BBF5aFd152483DAD9b88911bC8611",
"interchainSecurityModule": "0xB42b88243F749F47697F01Ae1cbBCA9d4763902a",
"interchainSecurityModule": "0x8CE0c6cAf18DbF5882b35F26E28412f3E9AbDeca",
"mailbox": "0xc005dc82818d67AF737725bD4bf75435d065D239",
"merkleTreeHook": "0x48e6c30B97748d1e2e03bf3e9FbE3890ca5f8CCA",
"name": "ethereum",
@ -454,6 +486,7 @@
"symbol": "ETH"
},
"pausableHook": "0x3A66Dc852e56d3748838b3C27CF381105b83705b",
"pausableIsm": "0xDC98a856fb9112894c2fE32267DA8bF35645FAF3",
"protocol": "ethereum",
"protocolFee": "0x8B05BF30F6247a90006c5837eA63C7905D79e6d8",
"proxyAdmin": "0x75EE15Ee1B4A75Fa3e2fDF5DF3253c25599cc659",
@ -466,6 +499,7 @@
}
],
"staticAggregationHookFactory": "0x6D2555A8ba483CcF4409C39013F5e9a3285D3C9E",
"staticAggregationIsm": "0x5447cdC0f4B1Afd827BF9d2F6b6cE7668d5dc284",
"staticAggregationIsmFactory": "0x46FA191Ad972D9674Ed752B69f9659A0d7b22846",
"staticMerkleRootMultisigIsmFactory": "0x47e8aF9e30C32Ab91060ED587894288786761B45",
"staticMessageIdMultisigIsmFactory": "0xfA21D9628ADce86531854C2B7ef00F07394B0B69",
@ -497,6 +531,7 @@
"chainId": 100,
"displayName": "Gnosis",
"domainId": 100,
"domainRoutingIsm": "0x83873DB8B4982091D0781B4eDF108DCb98075C39",
"domainRoutingIsmFactory": "0xbB5Df000113e767dE11343A16f83De733e5bCC0F",
"fallbackRoutingHook": "0x24f5E353dD03E103Ba2372F7D6FC0cf3A66f849c",
"gasCurrencyCoinGeckoId": "xdai",
@ -507,7 +542,7 @@
"interchainAccountIsm": "0x5a56dff3D92D635372718f86e6dF09C1129CFf53",
"interchainAccountRouter": "0x5E59EBAedeB691408EBAcF6C37218fa2cFcaC9f2",
"interchainGasPaymaster": "0xDd260B99d302f0A3fF885728c086f729c06f227f",
"interchainSecurityModule": "0x8e1aa0687B6d939D5a44304D13B7c922ebB012f1",
"interchainSecurityModule": "0x5DB7edF8C1CF91e34895dB2e4b28d8b9C68ddC7B",
"mailbox": "0xaD09d78f4c6b9dA2Ae82b1D34107802d380Bb74f",
"merkleTreeHook": "0x2684C6F89E901987E1FdB7649dC5Be0c57C61645",
"name": "gnosis",
@ -517,6 +552,7 @@
"symbol": "xDai"
},
"pausableHook": "0xf728C884De5275a608dEC222dACd0f2BF2E23AB6",
"pausableIsm": "0x223F7D3f27E6272266AE4B5B91Fd5C7A2d798cD8",
"protocol": "ethereum",
"protocolFee": "0x9c2214467Daf9e2e1F45b36d08ce0b9C65BFeA88",
"proxyAdmin": "0x81a92A1a272cb09d7b4970b07548463dC7aE0cB7",
@ -530,6 +566,7 @@
}
],
"staticAggregationHookFactory": "0xbC8AA096dabDf4A0200BB9f8D4Cbb644C3D86d7B",
"staticAggregationIsm": "0xe640167B9a283C8b4039fA33f3ac7be6e7E788c5",
"staticAggregationIsmFactory": "0x11EF91d17c5ad3330DbCa709a8841743d3Af6819",
"staticMerkleRootMultisigIsmFactory": "0x8E273260EAd8B72A085B19346A676d355740e875",
"staticMessageIdMultisigIsmFactory": "0x603f46cc520d2fc22957b81e206408590808F02F",
@ -567,7 +604,7 @@
"interchainAccountIsm": "0x31894E7a734540B343d67E491148EB4FC9f7A45B",
"interchainAccountRouter": "0x4E55aDA3ef1942049EA43E904EB01F4A0a9c39bd",
"interchainGasPaymaster": "0x19dc38aeae620380430C200a6E990D5Af5480117",
"interchainSecurityModule": "0x3052aD50De54aAAc5D364d80bBE681d29e924597",
"interchainSecurityModule": "0x440f7AD246F3e75df88a6338E8A33e91DA4B2B05",
"mailbox": "0x2f2aFaE1139Ce54feFC03593FeE8AB2aDF4a85A7",
"merkleTreeHook": "0x0972954923a1e2b2aAb04Fa0c4a0797e5989Cd65",
"name": "inevm",
@ -652,6 +689,7 @@
"domainRoutingIsmFactory": "0x8358D8291e3bEDb04804975eEa0fe9fe0fAfB147",
"fallbackRoutingHook": "0xD1E267d2d7876e97E217BfE61c34AB50FEF52807",
"gasCurrencyCoinGeckoId": "ethereum",
"gnosisSafeTransactionServiceUrl": "https://transaction.safe.manta.network",
"index": {
"from": 437300
},
@ -892,6 +930,39 @@
"timelockController": "0x0000000000000000000000000000000000000000",
"validatorAnnounce": "0x30f5b08e01808643221528BB2f7953bf2830Ef38"
},
"osmosis": {
"bech32Prefix": "osmo",
"blocks": {
"reorgPeriod": 1
},
"canonicalAsset": "uosmo",
"chainId": "osmosis-1",
"contractAddressBytes": 32,
"domainId": "875",
"gasPrice": {
"amount": "0.025",
"denom": "uosmo"
},
"grpcUrls": [
{
"http": "https://osmosis-grpc.publicnode.com:443"
}
],
"index": {
"from": 14389169
},
"interchainGasPaymaster": "0xd20a9dcf61939fc2fe6ad501b9457b1029b3cc7ab12ed72675ea2e10d831ee5d",
"mailbox": "0x9493e39d85dd038022f97d88aba6bff98d98f9a016b4f2e498bf1d9898420172",
"merkleTreeHook": "0x8920e062ee5ed8afccbc155d13ea9049296399ee41403655864fcd243edc7388",
"name": "osmosis1",
"protocol": "cosmos",
"rpcUrls": [
{
"http": "https://osmosis-rpc.publicnode.com:443"
}
],
"validatorAnnounce": "0xaf867da5b09a20ee49161d57f99477c0c42d100f34eb53da0d2eb7fc6c257235"
},
"polygon": {
"aggregationHook": "0x34dAb05650Cf590088bA18aF9d597f3e081bCc47",
"blockExplorers": [
@ -910,6 +981,7 @@
"chainId": 137,
"displayName": "Polygon",
"domainId": 137,
"domainRoutingIsm": "0xBcb9d74E1D2549fc1939023433aaAB11587bc338",
"domainRoutingIsmFactory": "0x0d0E816eE4557689d34fAd5885C53b9393C1D9fA",
"fallbackRoutingHook": "0xca4cCe24E7e06241846F5EA0cda9947F0507C40C",
"gasCurrencyCoinGeckoId": "matic-network",
@ -920,7 +992,7 @@
"interchainAccountIsm": "0x90384bC552e3C48af51Ef7D9473A9bF87431f5c7",
"interchainAccountRouter": "0x5e80f3474825B61183c0F0f0726796F589082420",
"interchainGasPaymaster": "0x0071740Bf129b05C4684abfbBeD248D80971cce2",
"interchainSecurityModule": "0x9a795fB62f86146ec06e2377e3C95Af65c7C20eB",
"interchainSecurityModule": "0xe289bD204Dbb4F3aaFA27Dbe5751C71e101CFD80",
"mailbox": "0x5d934f4e2f797775e53561bB72aca21ba36B96BB",
"merkleTreeHook": "0x73FbD25c3e817DC4B4Cd9d00eff6D83dcde2DfF6",
"name": "polygon",
@ -930,6 +1002,7 @@
"symbol": "ETH"
},
"pausableHook": "0x748040afB89B8FdBb992799808215419d36A0930",
"pausableIsm": "0x6741e91fFDC31c7786E3684427c628dad06299B0",
"protocol": "ethereum",
"protocolFee": "0xF8F3629e308b4758F8396606405989F8D8C9c578",
"proxyAdmin": "0xC4F7590C5d30BE959225dC75640657954A86b980",
@ -945,6 +1018,7 @@
}
],
"staticAggregationHookFactory": "0xFeeB86e70e4a640cDd29636CCE19BD6fe8628135",
"staticAggregationIsm": "0xe289bD204Dbb4F3aaFA27Dbe5751C71e101CFD80",
"staticAggregationIsmFactory": "0x81AdDD9Ca89105063DaDEBd5B4408551Ce850E22",
"staticMerkleRootMultisigIsmFactory": "0xa9E0E18E78b098c2DE36c42E4DDEA13ce214c592",
"staticMessageIdMultisigIsmFactory": "0xEa5Be2AD66BB1BA321B7aCf0A079fBE304B09Ca0",
@ -953,7 +1027,7 @@
"testTokenRecipient": "0x85ac1164878e017b67660a74ff1f41f3D05C02Bb",
"timelockController": "0x0000000000000000000000000000000000000000",
"transactionOverrides": {
"maxFeePerGas": 800000000000,
"maxFeePerGas": 550000000000,
"maxPriorityFeePerGas": 50000000000
},
"validatorAnnounce": "0x454E1a1E1CA8B51506090f1b5399083658eA4Fc5"
@ -980,6 +1054,7 @@
"domainRoutingIsmFactory": "0xe4057c5B0c43Dc18E36b08C39B419F190D29Ac2d",
"fallbackRoutingHook": "0x01aE937A7B05d187bBCBE80F44F41879D3D335a4",
"gasCurrencyCoinGeckoId": "ethereum",
"gnosisSafeTransactionServiceUrl": "https://safe-transaction-zkevm.safe.global/",
"index": {
"from": 6577743
},
@ -1015,6 +1090,56 @@
"timelockController": "0x0000000000000000000000000000000000000000",
"validatorAnnounce": "0x2fa5F5C96419C222cDbCeC797D696e6cE428A7A9"
},
"redstone": {
"blockExplorers": [
{
"apiUrl": "https://explorer.redstone.xyz/api",
"family": "blockscout",
"name": "Redstone Explorer",
"url": "https://explorer.redstone.xyz"
}
],
"blocks": {
"confirmations": 1,
"estimateBlockTime": 2,
"reorgPeriod": 0
},
"chainId": 690,
"displayName": "Redstone",
"domainId": 690,
"domainRoutingIsmFactory": "0x0761b0827849abbf7b0cC09CE14e1C93D87f5004",
"fallbackRoutingHook": "0xA1ac41d8A663fd317cc3BD94C7de92dC4BA4a882",
"gasCurrencyCoinGeckoId": "ethereum",
"index": {
"from": 1797579
},
"interchainGasPaymaster": "0x2Fa570E83009eaEef3a1cbd496a9a30F05266634",
"interchainSecurityModule": "0xF4689C7fA4920C91a6EEEd59630C9C8da7a77D40",
"mailbox": "0xeA87ae93Fa0019a82A727bfd3eBd1cFCa8f64f1D",
"merkleTreeHook": "0x8F1E22d309baa69D398a03cc88E9b46037e988AA",
"name": "redstone",
"nativeToken": {
"decimals": 18,
"name": "Ether",
"symbol": "ETH"
},
"pausableHook": "0xC9B8ea6230d6687a4b13fD3C0b8f0Ec607B26465",
"protocol": "ethereum",
"protocolFee": "0x26f32245fCF5Ad53159E875d5Cae62aEcf19c2d4",
"proxyAdmin": "0x4Ed7d626f1E96cD1C0401607Bf70D95243E3dEd1",
"rpcUrls": [
{
"http": "https://rpc.redstonechain.com"
}
],
"staticAggregationHookFactory": "0x1052eF3419f26Bec74Ed7CEf4a4FA6812Bc09908",
"staticAggregationIsmFactory": "0xEb9FcFDC9EfDC17c1EC5E1dc085B98485da213D6",
"staticMerkleRootMultisigIsmFactory": "0x8b83fefd896fAa52057798f6426E9f0B080FCCcE",
"staticMessageIdMultisigIsmFactory": "0x8F7454AC98228f3504Bb91eA3D8Adafe6406110A",
"storageGasOracle": "0x6Fae4D9935E2fcb11fC79a64e917fb2BF14DaFaa",
"testRecipient": "0x1Ab68dC4f7b6cfcd00218D4b761b7F3b5a724555",
"validatorAnnounce": "0x12582c7B0f43c6A667CBaA7fA8b112F7fb1E69F0"
},
"scroll": {
"aggregationHook": "0x9Bc0FAf446E128a618A88a2F28960Fb2Ca169faE",
"blockExplorers": [
@ -1036,6 +1161,7 @@
"domainRoutingIsmFactory": "0xe03dad16074BC5EEA9A9311257BF02Eb0B6AAA2b",
"fallbackRoutingHook": "0xDa7cECb05C4aeB02c1aFDE277d4306a2da7Bd762",
"gasCurrencyCoinGeckoId": "ethereum",
"gnosisSafeTransactionServiceUrl": "https://transaction.safe.scroll.xyz",
"index": {
"chunk": 999,
"from": 271840
@ -1073,6 +1199,7 @@
"validatorAnnounce": "0xd83A4F747fE80Ed98839e05079B1B7Fe037b1638"
},
"viction": {
"aggregationHook": "0x5c7890FAf9c99dC55926F00d624D7Bc6D7ac6834",
"blockExplorers": [
{
"apiUrl": "https://www.vicscan.xyz/api",
@ -1089,7 +1216,9 @@
"chainId": 88,
"displayName": "Viction",
"domainId": 88,
"domainRoutingIsm": "0x477145b11E1a71fEb658d96A0E27F19495121504",
"domainRoutingIsmFactory": "0x1052eF3419f26Bec74Ed7CEf4a4FA6812Bc09908",
"fallbackRoutingHook": "0x5d69BC38eF3eDb491c0b7186BEc4eC45c4013f93",
"gasCurrencyCoinGeckoId": "tomochain",
"index": {
"chunk": 999,
@ -1098,7 +1227,7 @@
"interchainAccountIsm": "0xD1E267d2d7876e97E217BfE61c34AB50FEF52807",
"interchainAccountRouter": "0x1956848601549de5aa0c887892061fA5aB4f6fC4",
"interchainGasPaymaster": "0x0D63128D887159d63De29497dfa45AFc7C699AE4",
"interchainSecurityModule": "0xBD70Ea9D599a0FC8158B026797177773C3445730",
"interchainSecurityModule": "0xf8F3AF5F6B8f319364c339c0b8cA5975481901eD",
"mailbox": "0x2f2aFaE1139Ce54feFC03593FeE8AB2aDF4a85A7",
"merkleTreeHook": "0x149db7afD694722747035d5AEC7007ccb6F8f112",
"name": "viction",
@ -1107,6 +1236,8 @@
"name": "Viction",
"symbol": "VIC"
},
"pausableHook": "0xDab56C5A1EffFdd23f6BD1243E457B1575984Bc6",
"pausableIsm": "0x92cdbF0Ccdf8E93467FA858fb986fa650A02f2A8",
"protocol": "ethereum",
"protocolFee": "0xd83A4F747fE80Ed98839e05079B1B7Fe037b1638",
"proxyAdmin": "0x0761b0827849abbf7b0cC09CE14e1C93D87f5004",
@ -1119,6 +1250,7 @@
}
],
"staticAggregationHookFactory": "0xEb9FcFDC9EfDC17c1EC5E1dc085B98485da213D6",
"staticAggregationIsm": "0x60586f0b79426f8F406C807a59c7b6478e8bBa0C",
"staticAggregationIsmFactory": "0x8F7454AC98228f3504Bb91eA3D8Adafe6406110A",
"staticMerkleRootMultisigIsmFactory": "0x2C1FAbEcd7bFBdEBF27CcdB67baADB38b6Df90fC",
"staticMessageIdMultisigIsmFactory": "0x8b83fefd896fAa52057798f6426E9f0B080FCCcE",
@ -1127,6 +1259,62 @@
"testTokenRecipient": "0xe042D1fbDf59828dd16b9649Ede7abFc856F7a6c",
"timelockController": "0x0000000000000000000000000000000000000000",
"validatorAnnounce": "0x2fa5F5C96419C222cDbCeC797D696e6cE428A7A9"
},
"zetachain": {
"blockExplorers": [
{
"apiUrl": "https://explorer.zetachain.com",
"family": "other",
"name": "ZetaScan",
"url": "https://explorer.zetachain.com"
}
],
"blocks": {
"confirmations": 1,
"estimateBlockTime": 6,
"reorgPeriod": 0
},
"chainId": 7000,
"displayName": "ZetaChain",
"domainId": 7000,
"domainRoutingIsmFactory": "0x1052eF3419f26Bec74Ed7CEf4a4FA6812Bc09908",
"fallbackRoutingHook": "0x8F1E22d309baa69D398a03cc88E9b46037e988AA",
"gasCurrencyCoinGeckoId": "zetachain",
"index": {
"from": 3068132
},
"interchainGasPaymaster": "0x931dFCc8c1141D6F532FD023bd87DAe0080c835d",
"interchainSecurityModule": "0x8dfE6790DbB2Ecc1bEdb0eECfc1Ff467Ae5d8C89",
"mailbox": "0x2f2aFaE1139Ce54feFC03593FeE8AB2aDF4a85A7",
"merkleTreeHook": "0xE2ee936bEa8e42671c400aC96dE198E06F2bA2A6",
"name": "zetachain",
"nativeToken": {
"decimals": 18,
"name": "ZetaChain",
"symbol": "ZETA"
},
"pausableHook": "0xA1ac41d8A663fd317cc3BD94C7de92dC4BA4a882",
"protocol": "ethereum",
"protocolFee": "0xea820f9BCFD5E16a0dd42071EB61A29874Ad81A4",
"proxyAdmin": "0x0761b0827849abbf7b0cC09CE14e1C93D87f5004",
"rpcUrls": [
{
"http": "https://zetachain-evm.blockpi.network/v1/rpc/public"
},
{
"http": "https://zetachain-athens-evm.blockpi.network/v1/rpc/public"
},
{
"http": "https://zetachain-mainnet-archive.allthatnode.com:8545"
}
],
"staticAggregationHookFactory": "0xEb9FcFDC9EfDC17c1EC5E1dc085B98485da213D6",
"staticAggregationIsmFactory": "0x8F7454AC98228f3504Bb91eA3D8Adafe6406110A",
"staticMerkleRootMultisigIsmFactory": "0x2C1FAbEcd7bFBdEBF27CcdB67baADB38b6Df90fC",
"staticMessageIdMultisigIsmFactory": "0x8b83fefd896fAa52057798f6426E9f0B080FCCcE",
"storageGasOracle": "0xC9B8ea6230d6687a4b13fD3C0b8f0Ec607B26465",
"testRecipient": "0x12582c7B0f43c6A667CBaA7fA8b112F7fb1E69F0",
"validatorAnnounce": "0x48083C69f5a42c6B69ABbAd48AE195BD36770ee2"
}
},
"defaultRpcConsensusType": "fallback"

@ -15,6 +15,7 @@ axum.workspace = true
bs58.workspace = true
color-eyre = { workspace = true, optional = true }
config.workspace = true
console-subscriber.workspace = true
convert_case.workspace = true
derive_builder.workspace = true
derive-new.workspace = true

@ -44,6 +44,7 @@ pub trait BaseAgent: Send + Sync + Debug {
metrics: Arc<CoreMetrics>,
agent_metrics: AgentMetrics,
chain_metrics: ChainMetrics,
tokio_console_server: console_subscriber::Server,
) -> Result<Self>
where
Self: Sized;
@ -75,10 +76,17 @@ pub async fn agent_main<A: BaseAgent>() -> Result<()> {
let core_settings: &Settings = settings.as_ref();
let metrics = settings.as_ref().metrics(A::AGENT_NAME)?;
core_settings.tracing.start_tracing(&metrics)?;
let tokio_server = core_settings.tracing.start_tracing(&metrics)?;
let agent_metrics = create_agent_metrics(&metrics)?;
let chain_metrics = create_chain_metrics(&metrics)?;
let agent = A::from_settings(settings, metrics.clone(), agent_metrics, chain_metrics).await?;
let agent = A::from_settings(
settings,
metrics.clone(),
agent_metrics,
chain_metrics,
tokio_server,
)
.await?;
// This await will only end if a panic happens. We won't crash, but instead gracefully shut down
agent.run().await;

@ -9,12 +9,11 @@ use hyperlane_core::{
HyperlaneSequenceAwareIndexerStoreReader, IndexMode, Indexed, LogMeta, SequenceIndexed,
};
use itertools::Itertools;
use tracing::{debug, warn};
use tracing::{debug, instrument, warn};
use super::{LastIndexedSnapshot, TargetSnapshot};
/// A sequence-aware cursor that syncs backward until there are no earlier logs to index.
#[derive(Debug)]
pub(crate) struct BackwardSequenceAwareSyncCursor<T> {
/// The max chunk size to query for logs.
/// If in sequence mode, this is the max number of sequences to query.
@ -34,6 +33,11 @@ pub(crate) struct BackwardSequenceAwareSyncCursor<T> {
}
impl<T: Debug> BackwardSequenceAwareSyncCursor<T> {
#[instrument(
skip(db),
fields(chunk_size, next_sequence, start_block, index_mode),
ret
)]
pub fn new(
chunk_size: u32,
db: Arc<dyn HyperlaneSequenceAwareIndexerStoreReader<T>>,
@ -61,6 +65,7 @@ impl<T: Debug> BackwardSequenceAwareSyncCursor<T> {
/// Gets the next range of logs to query.
/// If the cursor is fully synced, this returns None.
/// Otherwise, it returns the next range to query, either by block or sequence depending on the mode.
#[instrument(ret)]
pub async fn get_next_range(&mut self) -> Result<Option<RangeInclusive<u32>>> {
// Skip any already indexed logs.
self.skip_indexed().await?;
@ -129,6 +134,11 @@ impl<T: Debug> BackwardSequenceAwareSyncCursor<T> {
// If the sequence hasn't been indexed, break out of the loop.
break;
}
// We've noticed that this loop can run for a long time because the `await`
// points never yield.
// So, to avoid starving other futures in this task, yield to the runtime
// on each iteration
tokio::task::yield_now().await;
}
Ok(())
@ -299,6 +309,17 @@ impl<T: Debug> BackwardSequenceAwareSyncCursor<T> {
}
}
impl<T: Debug> Debug for BackwardSequenceAwareSyncCursor<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("BackwardSequenceAwareSyncCursor")
.field("chunk_size", &self.chunk_size)
.field("current_indexing_snapshot", &self.current_indexing_snapshot)
.field("last_indexed_snapshot", &self.last_indexed_snapshot)
.field("index_mode", &self.index_mode)
.finish()
}
}
#[async_trait]
impl<T: Send + Sync + Clone + Debug + 'static> ContractSyncCursor<T>
for BackwardSequenceAwareSyncCursor<T>
@ -329,6 +350,7 @@ impl<T: Send + Sync + Clone + Debug + 'static> ContractSyncCursor<T>
/// ## logs
/// The logs to ingest. If any logs are duplicated or their sequence is higher than the current indexing snapshot,
/// they are filtered out.
#[instrument(err, ret, skip(logs), fields(range=?range, logs=?logs.iter().map(|(log, _)| log.sequence).collect::<Vec<_>>()))]
async fn update(
&mut self,
logs: Vec<(Indexed<T>, LogMeta)>,

@ -13,12 +13,11 @@ use hyperlane_core::{
SequenceIndexed,
};
use itertools::Itertools;
use tracing::{debug, warn};
use tracing::{debug, instrument, warn};
use super::{LastIndexedSnapshot, TargetSnapshot};
/// A sequence-aware cursor that syncs forwards in perpetuity.
#[derive(Debug)]
pub(crate) struct ForwardSequenceAwareSyncCursor<T> {
/// The max chunk size to query for logs.
/// If in sequence mode, this is the max number of sequences to query.
@ -43,6 +42,11 @@ pub(crate) struct ForwardSequenceAwareSyncCursor<T> {
}
impl<T: Debug> ForwardSequenceAwareSyncCursor<T> {
#[instrument(
skip(db, latest_sequence_querier),
fields(chunk_size, next_sequence, start_block, index_mode),
ret
)]
pub fn new(
chunk_size: u32,
latest_sequence_querier: Arc<dyn SequenceAwareIndexer<T>>,
@ -76,6 +80,7 @@ impl<T: Debug> ForwardSequenceAwareSyncCursor<T> {
/// If there are no logs to index, returns `None`.
/// If there are logs to index, returns the range of logs, either by sequence or block number
/// depending on the mode.
#[instrument(ret)]
pub async fn get_next_range(&mut self) -> Result<Option<RangeInclusive<u32>>> {
// Skip any already indexed logs.
self.skip_indexed().await?;
@ -386,6 +391,18 @@ impl<T: Debug> ForwardSequenceAwareSyncCursor<T> {
}
}
impl<T: Debug> Debug for ForwardSequenceAwareSyncCursor<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ForwardSequenceAwareSyncCursor")
.field("chunk_size", &self.chunk_size)
.field("current_indexing_snapshot", &self.current_indexing_snapshot)
.field("last_indexed_snapshot", &self.last_indexed_snapshot)
.field("target_snapshot", &self.target_snapshot)
.field("index_mode", &self.index_mode)
.finish()
}
}
#[async_trait]
impl<T: Send + Sync + Clone + Debug + 'static> ContractSyncCursor<T>
for ForwardSequenceAwareSyncCursor<T>
@ -420,6 +437,7 @@ impl<T: Send + Sync + Clone + Debug + 'static> ContractSyncCursor<T>
/// - Even if the logs include a gap, in practice these logs will have already been inserted into the DB.
/// This means that while gaps result in a rewind here, already known logs may be "fast forwarded" through,
/// and the cursor won't actually end up re-indexing already known logs.
#[instrument(err, ret, skip(logs), fields(range=?range, logs=?logs.iter().map(|(log, _)| log.sequence).collect::<Vec<_>>()))]
async fn update(
&mut self,
logs: Vec<(Indexed<T>, LogMeta)>,

@ -82,7 +82,7 @@ where
// from the loop (the sleep duration)
#[allow(clippy::never_loop)]
CursorAction::Query(range) => loop {
debug!(?range, "Looking for for events in index range");
debug!(?range, "Looking for events in index range");
let logs = match self.indexer.fetch_logs(range.clone()).await {
Ok(logs) => logs,

@ -23,6 +23,7 @@ const MESSAGE_DISPATCHED_BLOCK_NUMBER: &str = "message_dispatched_block_number_"
const MESSAGE: &str = "message_";
const NONCE_PROCESSED: &str = "nonce_processed_";
const GAS_PAYMENT_BY_SEQUENCE: &str = "gas_payment_by_sequence_";
const HIGHEST_SEEN_MESSAGE_NONCE: &str = "highest_seen_message_nonce_";
const GAS_PAYMENT_FOR_MESSAGE_ID: &str = "gas_payment_sequence_for_message_id_v2_";
const GAS_PAYMENT_META_PROCESSED: &str = "gas_payment_meta_processed_v3_";
const GAS_EXPENDITURE_FOR_MESSAGE_ID: &str = "gas_expenditure_for_message_id_v2_";
@ -34,7 +35,8 @@ const MERKLE_TREE_INSERTION_BLOCK_NUMBER_BY_LEAF_INDEX: &str =
"merkle_tree_insertion_block_number_by_leaf_index_";
const LATEST_INDEXED_GAS_PAYMENT_BLOCK: &str = "latest_indexed_gas_payment_block";
type DbResult<T> = std::result::Result<T, DbError>;
/// Rocks DB result type
pub type DbResult<T> = std::result::Result<T, DbError>;
/// DB handle for storing data tied to a specific Mailbox.
#[derive(Debug, Clone)]
@ -94,6 +96,8 @@ impl HyperlaneRocksDB {
self.store_message_by_id(&id, message)?;
// - `nonce` --> `id`
self.store_message_id_by_nonce(&message.nonce, &id)?;
// Update the max seen nonce to allow forward-backward iteration in the processor
self.try_update_max_seen_message_nonce(message.nonce)?;
// - `nonce` --> `dispatched block number`
self.store_dispatched_block_number_by_nonce(&message.nonce, &dispatched_block_number)?;
Ok(true)
@ -108,6 +112,22 @@ impl HyperlaneRocksDB {
}
}
/// Update the nonce of the highest processed message we're aware of
pub fn try_update_max_seen_message_nonce(&self, nonce: u32) -> DbResult<()> {
let current_max = self
.retrieve_highest_seen_message_nonce()?
.unwrap_or_default();
if nonce >= current_max {
self.store_highest_seen_message_nonce_number(&Default::default(), &nonce)?;
}
Ok(())
}
/// Retrieve the nonce of the highest processed message we're aware of
pub fn retrieve_highest_seen_message_nonce(&self) -> DbResult<Option<u32>> {
self.retrieve_highest_seen_message_nonce_number(&Default::default())
}
/// If the provided gas payment, identified by its metadata, has not been
/// processed, processes the gas payment and records it as processed.
/// Returns whether the gas payment was processed for the first time.
@ -416,6 +436,39 @@ impl HyperlaneWatermarkedLogStore<MerkleTreeInsertion> for HyperlaneRocksDB {
}
}
/// Database interface required for processing messages
pub trait ProcessMessage: Send + Sync {
/// Retrieve the nonce of the highest processed message we're aware of
fn retrieve_highest_seen_message_nonce(&self) -> DbResult<Option<u32>>;
/// Retrieve a message by its nonce
fn retrieve_message_by_nonce(&self, nonce: u32) -> DbResult<Option<HyperlaneMessage>>;
/// Retrieve whether a message has been processed
fn retrieve_processed_by_nonce(&self, nonce: u32) -> DbResult<Option<bool>>;
/// Get the origin domain of the database
fn domain(&self) -> &HyperlaneDomain;
}
impl ProcessMessage for HyperlaneRocksDB {
fn retrieve_highest_seen_message_nonce(&self) -> DbResult<Option<u32>> {
self.retrieve_highest_seen_message_nonce()
}
fn retrieve_message_by_nonce(&self, nonce: u32) -> DbResult<Option<HyperlaneMessage>> {
self.retrieve_message_by_nonce(nonce)
}
fn retrieve_processed_by_nonce(&self, nonce: u32) -> DbResult<Option<bool>> {
self.retrieve_processed_by_nonce(&nonce)
}
fn domain(&self) -> &HyperlaneDomain {
self.domain()
}
}
/// Generate a call to ChainSetup for the given builder
macro_rules! make_store_and_retrieve {
($vis:vis, $name_suffix:ident, $key_prefix: ident, $key_ty:ty, $val_ty:ty$(,)?) => {
@ -479,3 +532,6 @@ make_store_and_retrieve!(
u32,
u64
);
// There's no unit struct Encode/Decode impl, so just use `bool`, have visibility be private (by omitting the first argument), and wrap
// with a function that always uses the `Default::default()` key
make_store_and_retrieve!(, highest_seen_message_nonce_number, HIGHEST_SEEN_MESSAGE_NONCE, bool, u32);

@ -1,6 +1,6 @@
//! Common settings and configuration for Hyperlane agents
//!
//! The correct settings shape is defined in the TypeScript SDK metadata. While the the exact shape
//! The correct settings shape is defined in the TypeScript SDK metadata. While the exact shape
//! and validations it defines are not applied here, we should mirror them.
//! ANY CHANGES HERE NEED TO BE REFLECTED IN THE TYPESCRIPT SDK.
//!

@ -1,6 +1,6 @@
//! This module is responsible for parsing the agent's settings.
//!
//! The correct settings shape is defined in the TypeScript SDK metadata. While the the exact shape
//! The correct settings shape is defined in the TypeScript SDK metadata. While the exact shape
//! and validations it defines are not applied here, we should mirror them.
//! ANY CHANGES HERE NEED TO BE REFLECTED IN THE TYPESCRIPT SDK.

@ -60,7 +60,7 @@ pub struct TracingConfig {
impl TracingConfig {
/// Attempt to instantiate and register a tracing subscriber setup from
/// settings.
pub fn start_tracing(&self, metrics: &CoreMetrics) -> Result<()> {
pub fn start_tracing(&self, metrics: &CoreMetrics) -> Result<console_subscriber::Server> {
let mut target_layer = Targets::new().with_default(self.level);
if self.level < Level::DependencyTrace {
@ -70,6 +70,7 @@ impl TracingConfig {
.with_target("rusoto_core", Level::Info)
.with_target("rustls", Level::Info)
.with_target("reqwest", Level::Info)
.with_target("runtime", Level::Debug)
.with_target("h2", Level::Info)
.with_target("tower", Level::Info)
.with_target("tendermint", Level::Info)
@ -85,13 +86,15 @@ impl TracingConfig {
let fmt_layer: LogOutputLayer<_> = self.fmt.into();
let err_layer = tracing_error::ErrorLayer::default();
let (tokio_layer, tokio_server) = console_subscriber::ConsoleLayer::new();
let subscriber = tracing_subscriber::Registry::default()
.with(tokio_layer)
.with(target_layer)
.with(TimeSpanLifetime::new(metrics))
.with(fmt_layer)
.with(err_layer);
subscriber.try_init()?;
Ok(())
Ok(tokio_server)
}
}

@ -11,7 +11,6 @@
"func-name-mixedcase": "off",
"reason-string": ["warn",{"maxLength":64}],
"prettier/prettier": "error",
"custom-errors": "off",
"gas-custom-errors": "off"
},
"plugins": ["prettier"]

@ -1,2 +1,3 @@
contracts/mock
contracts/test
contracts/interfaces/avs/vendored

@ -1,5 +1,17 @@
# @hyperlane-xyz/core
## 3.13.0
### Minor Changes
- babe816f8: Support xERC20 and xERC20 Lockbox in SDK and CLI
- b440d98be: Added support for registering/deregistering from the Hyperlane AVS
### Patch Changes
- Updated dependencies [0cf692e73]
- @hyperlane-xyz/utils@3.13.0
## 3.12.0
### Patch Changes

@ -28,7 +28,7 @@ yarn test
### Fixtures
Some forge tests may generate fixtures in the [fixtures](./fixtures/) directory. This allows [SDK](../typescript/sdk) tests to leverage forge fuzzing. These are git ignored and should not be committed.
Some forge tests may generate fixtures. This allows the [SDK](https://github.com/hyperlane-xyz/hyperlane-monorepo/tree/main/typescript/sdk) tests to leverage forge fuzzing. These are git ignored and should not be committed.
## License

@ -44,13 +44,14 @@ contract ECDSAStakeRegistry is
__ECDSAStakeRegistry_init(_serviceManager, _thresholdWeight, _quorum);
}
/// @notice Registers a new operator using a provided signature
/// @notice Registers a new operator using a provided signature and signing key
/// @param _operatorSignature Contains the operator's signature, salt, and expiry
/// @param _signingKey The signing key to add to the operator's history
function registerOperatorWithSignature(
address _operator,
ISignatureUtils.SignatureWithSaltAndExpiry memory _operatorSignature
ISignatureUtils.SignatureWithSaltAndExpiry memory _operatorSignature,
address _signingKey
) external {
_registerOperatorWithSig(_operator, _operatorSignature);
_registerOperatorWithSig(msg.sender, _operatorSignature, _signingKey);
}
/// @notice Deregisters an existing operator
@ -58,6 +59,18 @@ contract ECDSAStakeRegistry is
_deregisterOperator(msg.sender);
}
/**
* @notice Updates the signing key for an operator
* @dev Only callable by the operator themselves
* @param _newSigningKey The new signing key to set for the operator
*/
function updateOperatorSigningKey(address _newSigningKey) external {
if (!_operatorRegistered[msg.sender]) {
revert OperatorNotRegistered();
}
_updateOperatorSigningKey(msg.sender, _newSigningKey);
}
/**
* @notice Updates the StakeRegistry's view of one or more operators' stakes adding a new entry in their history of stake checkpoints,
* @dev Queries stakes from the Eigenlayer core DelegationManager contract
@ -107,18 +120,18 @@ contract ECDSAStakeRegistry is
/// @notice Verifies if the provided signature data is valid for the given data hash.
/// @param _dataHash The hash of the data that was signed.
/// @param _signatureData Encoded signature data consisting of an array of signers, an array of signatures, and a reference block number.
/// @param _signatureData Encoded signature data consisting of an array of operators, an array of signatures, and a reference block number.
/// @return The function selector that indicates the signature is valid according to ERC1271 standard.
function isValidSignature(
bytes32 _dataHash,
bytes memory _signatureData
) external view returns (bytes4) {
(
address[] memory signers,
address[] memory operators,
bytes[] memory signatures,
uint32 referenceBlock
) = abi.decode(_signatureData, (address[], bytes[], uint32));
_checkSignatures(_dataHash, signers, signatures, referenceBlock);
_checkSignatures(_dataHash, operators, signatures, referenceBlock);
return IERC1271Upgradeable.isValidSignature.selector;
}
@ -128,6 +141,37 @@ contract ECDSAStakeRegistry is
return _quorum;
}
/**
* @notice Retrieves the latest signing key for a given operator.
* @param _operator The address of the operator.
* @return The latest signing key of the operator.
*/
function getLastestOperatorSigningKey(
address _operator
) external view returns (address) {
return address(uint160(_operatorSigningKeyHistory[_operator].latest()));
}
/**
* @notice Retrieves the latest signing key for a given operator at a specific block number.
* @param _operator The address of the operator.
* @param _blockNumber The block number to get the operator's signing key.
* @return The signing key of the operator at the given block.
*/
function getOperatorSigningKeyAtBlock(
address _operator,
uint256 _blockNumber
) external view returns (address) {
return
address(
uint160(
_operatorSigningKeyHistory[_operator].getAtBlock(
_blockNumber
)
)
);
}
/// @notice Retrieves the last recorded weight for a given operator.
/// @param _operator The address of the operator.
/// @return uint256 - The latest weight of the operator.
@ -313,9 +357,11 @@ contract ECDSAStakeRegistry is
/// @dev registers an operator through a provided signature
/// @param _operatorSignature Contains the operator's signature, salt, and expiry
/// @param _signingKey The signing key to add to the operator's history
function _registerOperatorWithSig(
address _operator,
ISignatureUtils.SignatureWithSaltAndExpiry memory _operatorSignature
ISignatureUtils.SignatureWithSaltAndExpiry memory _operatorSignature,
address _signingKey
) internal virtual {
if (_operatorRegistered[_operator]) {
revert OperatorAlreadyRegistered();
@ -324,6 +370,7 @@ contract ECDSAStakeRegistry is
_operatorRegistered[_operator] = true;
int256 delta = _updateOperatorWeight(_operator);
_updateTotalWeight(delta);
_updateOperatorSigningKey(_operator, _signingKey);
IServiceManager(_serviceManager).registerOperatorToAVS(
_operator,
_operatorSignature
@ -331,6 +378,28 @@ contract ECDSAStakeRegistry is
emit OperatorRegistered(_operator, _serviceManager);
}
/// @dev Internal function to update an operator's signing key
/// @param _operator The address of the operator to update the signing key for
/// @param _newSigningKey The new signing key to set for the operator
function _updateOperatorSigningKey(
address _operator,
address _newSigningKey
) internal {
address oldSigningKey = address(
uint160(_operatorSigningKeyHistory[_operator].latest())
);
if (_newSigningKey == oldSigningKey) {
return;
}
_operatorSigningKeyHistory[_operator].push(uint160(_newSigningKey));
emit SigningKeyUpdate(
_operator,
block.number,
_newSigningKey,
oldSigningKey
);
}
/// @notice Updates the weight of an operator and returns the previous and current weights.
/// @param _operator The address of the operator to update the weight of.
function _updateOperatorWeight(
@ -401,30 +470,33 @@ contract ECDSAStakeRegistry is
/**
* @notice Common logic to verify a batch of ECDSA signatures against a hash, using either last stake weight or at a specific block.
* @param _dataHash The hash of the data the signers endorsed.
* @param _signers A collection of addresses that endorsed the data hash.
* @param _operators A collection of addresses that endorsed the data hash.
* @param _signatures A collection of signatures matching the signers.
* @param _referenceBlock The block number for evaluating stake weight; use max uint32 for latest weight.
*/
function _checkSignatures(
bytes32 _dataHash,
address[] memory _signers,
address[] memory _operators,
bytes[] memory _signatures,
uint32 _referenceBlock
) internal view {
uint256 signersLength = _signers.length;
address lastSigner;
uint256 signersLength = _operators.length;
address currentOperator;
address lastOperator;
address signer;
uint256 signedWeight;
_validateSignaturesLength(signersLength, _signatures.length);
for (uint256 i; i < signersLength; i++) {
address currentSigner = _signers[i];
currentOperator = _operators[i];
signer = _getOperatorSigningKey(currentOperator, _referenceBlock);
_validateSortedSigners(lastSigner, currentSigner);
_validateSignature(currentSigner, _dataHash, _signatures[i]);
_validateSortedSigners(lastOperator, currentOperator);
_validateSignature(signer, _dataHash, _signatures[i]);
lastSigner = currentSigner;
lastOperator = currentOperator;
uint256 operatorWeight = _getOperatorWeight(
currentSigner,
currentOperator,
_referenceBlock
);
signedWeight += operatorWeight;
@ -474,6 +546,27 @@ contract ECDSAStakeRegistry is
}
}
/// @notice Retrieves the operator weight for a signer, either at the last checkpoint or a specified block.
/// @param _operator The operator to query their signing key history for
/// @param _referenceBlock The block number to query the operator's weight at, or the maximum uint32 value for the last checkpoint.
/// @return The weight of the operator.
function _getOperatorSigningKey(
address _operator,
uint32 _referenceBlock
) internal view returns (address) {
if (_referenceBlock >= block.number) {
revert InvalidReferenceBlock();
}
return
address(
uint160(
_operatorSigningKeyHistory[_operator].getAtBlock(
_referenceBlock
)
)
);
}
/// @notice Retrieves the operator weight for a signer, either at the last checkpoint or a specified block.
/// @param _signer The address of the signer whose weight is returned.
/// @param _referenceBlock The block number to query the operator's weight at, or the maximum uint32 value for the last checkpoint.
@ -482,11 +575,10 @@ contract ECDSAStakeRegistry is
address _signer,
uint32 _referenceBlock
) internal view returns (uint256) {
if (_referenceBlock == type(uint32).max) {
return _operatorWeightHistory[_signer].latest();
} else {
return _operatorWeightHistory[_signer].getAtBlock(_referenceBlock);
if (_referenceBlock >= block.number) {
revert InvalidReferenceBlock();
}
return _operatorWeightHistory[_signer].getAtBlock(_referenceBlock);
}
/// @notice Retrieve the total stake weight at a specific block or the latest if not specified.
@ -496,11 +588,10 @@ contract ECDSAStakeRegistry is
function _getTotalWeight(
uint32 _referenceBlock
) internal view returns (uint256) {
if (_referenceBlock == type(uint32).max) {
return _totalWeightHistory.latest();
} else {
return _totalWeightHistory.getAtBlock(_referenceBlock);
if (_referenceBlock >= block.number) {
revert InvalidReferenceBlock();
}
return _totalWeightHistory.getAtBlock(_referenceBlock);
}
/// @notice Retrieves the threshold stake for a given reference block.
@ -510,11 +601,10 @@ contract ECDSAStakeRegistry is
function _getThresholdStake(
uint32 _referenceBlock
) internal view returns (uint256) {
if (_referenceBlock == type(uint32).max) {
return _thresholdWeightHistory.latest();
} else {
return _thresholdWeightHistory.getAtBlock(_referenceBlock);
if (_referenceBlock >= block.number) {
revert InvalidReferenceBlock();
}
return _thresholdWeightHistory.getAtBlock(_referenceBlock);
}
/// @notice Validates that the cumulative stake of signed messages meets or exceeds the required threshold.

@ -30,6 +30,10 @@ abstract contract ECDSAStakeRegistryStorage is
/// @notice Defines the duration after which the stake's weight expires.
uint256 internal _stakeExpiry;
/// @notice Maps an operator to their signing key history using checkpoints
mapping(address => CheckpointsUpgradeable.History)
internal _operatorSigningKeyHistory;
/// @notice Tracks the total stake history over time using checkpoints
CheckpointsUpgradeable.History internal _totalWeightHistory;
@ -51,5 +55,5 @@ abstract contract ECDSAStakeRegistryStorage is
// slither-disable-next-line shadowing-state
/// @dev Reserves storage slots for future upgrades
// solhint-disable-next-line
uint256[42] private __gap;
uint256[40] private __gap;
}

@ -12,8 +12,6 @@ struct Quorum {
StrategyParams[] strategies; // An array of strategy parameters to define the quorum
}
/// part of mock interfaces for vendoring necessary Eigenlayer contracts for the hyperlane AVS
/// @author Layr Labs, Inc.
interface IECDSAStakeRegistryEventsAndErrors {
/// @notice Emitted when the system registers an operator
/// @param _operator The address of the registered operator
@ -61,7 +59,19 @@ interface IECDSAStakeRegistryEventsAndErrors {
/// @notice Emits when setting a new threshold weight.
event ThresholdWeightUpdated(uint256 _thresholdWeight);
/// @notice Emitted when an operator's signing key is updated
/// @param operator The address of the operator whose signing key was updated
/// @param updateBlock The block number at which the signing key was updated
/// @param newSigningKey The operator's signing key after the update
/// @param oldSigningKey The operator's signing key before the update
event SigningKeyUpdate(
address indexed operator,
uint256 indexed updateBlock,
address indexed newSigningKey,
address oldSigningKey
);
/// @notice Indicates when the lengths of the signers array and signatures array do not match.
error LengthMismatch();
/// @notice Indicates encountering an invalid length for the signers or signatures array.
@ -76,6 +86,9 @@ interface IECDSAStakeRegistryEventsAndErrors {
/// @notice Thrown when missing operators in an update
error MustUpdateAllOperators();
/// @notice Reference blocks must be for blocks that have already been confirmed
error InvalidReferenceBlock();
/// @notice Indicates operator weights were out of sync and the signed weight exceed the total
error InvalidSignedWeight();

@ -65,4 +65,16 @@ contract XERC20Test is ERC20Test, IXERC20 {
function burn(address account, uint256 amount) public override {
_burn(account, amount);
}
function setLimits(
address _bridge,
uint256 _mintingLimit,
uint256 _burningLimit
) external {
require(false);
}
function owner() external returns (address) {
return address(0x0);
}
}

@ -5,7 +5,7 @@ import {IFiatToken} from "../interfaces/IFiatToken.sol";
import {HypERC20Collateral} from "../HypERC20Collateral.sol";
// see https://github.com/circlefin/stablecoin-evm/blob/master/doc/tokendesign.md#issuing-and-destroying-tokens
contract HypFiatTokenCollateral is HypERC20Collateral {
contract HypFiatToken is HypERC20Collateral {
constructor(
address _fiatToken,
address _mailbox

@ -4,7 +4,7 @@ pragma solidity >=0.8.0;
import {IXERC20} from "../interfaces/IXERC20.sol";
import {HypERC20Collateral} from "../HypERC20Collateral.sol";
contract HypXERC20Collateral is HypERC20Collateral {
contract HypXERC20 is HypERC20Collateral {
constructor(
address _xerc20,
address _mailbox

@ -0,0 +1,54 @@
// SPDX-License-Identifier: MIT OR Apache-2.0
pragma solidity >=0.8.0;
import {IXERC20Lockbox} from "../interfaces/IXERC20Lockbox.sol";
import {IXERC20, IERC20} from "../interfaces/IXERC20.sol";
import {HypERC20Collateral} from "../HypERC20Collateral.sol";
contract HypXERC20Lockbox is HypERC20Collateral {
uint256 constant MAX_INT = 2 ** 256 - 1;
IXERC20Lockbox public immutable lockbox;
IXERC20 public immutable xERC20;
constructor(
address _lockbox,
address _mailbox
) HypERC20Collateral(address(IXERC20Lockbox(_lockbox).ERC20()), _mailbox) {
lockbox = IXERC20Lockbox(_lockbox);
xERC20 = lockbox.XERC20();
// grant infinite approvals to lockbox
require(
IERC20(wrappedToken).approve(_lockbox, MAX_INT),
"erc20 lockbox approve failed"
);
require(
xERC20.approve(_lockbox, MAX_INT),
"xerc20 lockbox approve failed"
);
}
function _transferFromSender(
uint256 _amount
) internal override returns (bytes memory) {
// transfer erc20 from sender
super._transferFromSender(_amount);
// convert erc20 to xERC20
lockbox.deposit(_amount);
// burn xERC20
xERC20.burn(address(this), _amount);
return bytes("");
}
function _transferTo(
address _recipient,
uint256 _amount,
bytes calldata /*metadata*/
) internal override {
// mint xERC20
xERC20.mint(address(this), _amount);
// convert xERC20 to erc20
lockbox.withdrawTo(_recipient, _amount);
}
}

@ -21,4 +21,19 @@ interface IXERC20 is IERC20 {
* @param _amount The amount of tokens being burned
*/
function burn(address _user, uint256 _amount) external;
/**
* @notice Updates the limits of any bridge
* @dev Can only be called by the owner
* @param _mintingLimit The updated minting limit we are setting to the bridge
* @param _burningLimit The updated burning limit we are setting to the bridge
* @param _bridge The address of the bridge we are setting the limits too
*/
function setLimits(
address _bridge,
uint256 _mintingLimit,
uint256 _burningLimit
) external;
function owner() external returns (address);
}

@ -0,0 +1,61 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity >=0.8.4 <0.9.0;
// adapted from https://github.com/defi-wonderland/xERC20
import {IXERC20} from "./IXERC20.sol";
import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol";
interface IXERC20Lockbox {
/**
* @notice The XERC20 token of this contract
*/
function XERC20() external returns (IXERC20);
/**
* @notice The ERC20 token of this contract
*/
function ERC20() external returns (IERC20);
/**
* @notice Deposit ERC20 tokens into the lockbox
*
* @param _amount The amount of tokens to deposit
*/
function deposit(uint256 _amount) external;
/**
* @notice Deposit ERC20 tokens into the lockbox, and send the XERC20 to a user
*
* @param _user The user to send the XERC20 to
* @param _amount The amount of tokens to deposit
*/
function depositTo(address _user, uint256 _amount) external;
/**
* @notice Deposit the native asset into the lockbox, and send the XERC20 to a user
*
* @param _user The user to send the XERC20 to
*/
function depositNativeTo(address _user) external payable;
/**
* @notice Withdraw ERC20 tokens from the lockbox
*
* @param _amount The amount of tokens to withdraw
*/
function withdraw(uint256 _amount) external;
/**
* @notice Withdraw ERC20 tokens from the lockbox
*
* @param _user The user to withdraw to
* @param _amount The amount of tokens to withdraw
*/
function withdrawTo(address _user, uint256 _amount) external;
}

@ -1,10 +1,10 @@
{
"name": "@hyperlane-xyz/core",
"description": "Core solidity contracts for Hyperlane",
"version": "3.12.2",
"version": "3.13.0",
"dependencies": {
"@eth-optimism/contracts": "^0.6.0",
"@hyperlane-xyz/utils": "3.12.2",
"@hyperlane-xyz/utils": "3.13.0",
"@layerzerolabs/lz-evm-oapp-v2": "2.0.2",
"@openzeppelin/contracts": "^4.9.3",
"@openzeppelin/contracts-upgradeable": "^v4.9.3",

@ -12,6 +12,7 @@ import {ProxyAdmin} from "../../contracts/upgrade/ProxyAdmin.sol";
import {TransparentUpgradeableProxy} from "../../contracts/upgrade/TransparentUpgradeableProxy.sol";
import {ECDSAStakeRegistry} from "../../contracts/avs/ECDSAStakeRegistry.sol";
import {Quorum, StrategyParams} from "../../contracts/interfaces/avs/vendored/IECDSAStakeRegistryEventsAndErrors.sol";
import {ECDSAServiceManagerBase} from "../../contracts/avs/ECDSAServiceManagerBase.sol";
import {HyperlaneServiceManager} from "../../contracts/avs/HyperlaneServiceManager.sol";
import {TestPaymentCoordinator} from "../../contracts/test/avs/TestPaymentCoordinator.sol";
@ -42,6 +43,11 @@ contract DeployAVS is Script {
);
string memory json = vm.readFile(path);
proxyAdmin = ProxyAdmin(
json.readAddress(
string(abi.encodePacked(".", targetEnv, ".proxyAdmin"))
)
);
avsDirectory = IAVSDirectory(
json.readAddress(
string(abi.encodePacked(".", targetEnv, ".avsDirectory"))
@ -88,15 +94,14 @@ contract DeployAVS is Script {
}
}
function run(string memory network) external {
function run(string memory network, string memory metadataUri) external {
deployerPrivateKey = vm.envUint("DEPLOYER_PRIVATE_KEY");
address deployerAddress = vm.addr(deployerPrivateKey);
_loadEigenlayerAddresses(network);
vm.startBroadcast(deployerPrivateKey);
proxyAdmin = new ProxyAdmin();
ECDSAStakeRegistry stakeRegistryImpl = new ECDSAStakeRegistry(
delegationManager
);
@ -118,7 +123,7 @@ contract DeployAVS is Script {
address(proxyAdmin),
abi.encodeWithSelector(
HyperlaneServiceManager.initialize.selector,
msg.sender
address(deployerAddress)
)
);
@ -131,7 +136,24 @@ contract DeployAVS is Script {
quorum
)
);
HyperlaneServiceManager hsm = HyperlaneServiceManager(
address(hsmProxy)
);
require(success, "Failed to initialize ECDSAStakeRegistry");
require(
ECDSAStakeRegistry(address(stakeRegistryProxy)).owner() ==
address(deployerAddress),
"Owner of ECDSAStakeRegistry is not the deployer"
);
require(
HyperlaneServiceManager(address(hsmProxy)).owner() ==
address(deployerAddress),
"Owner of HyperlaneServiceManager is not the deployer"
);
hsm.updateAVSMetadataURI(metadataUri);
console.log(
"ECDSAStakeRegistry Implementation: ",

@ -1,5 +1,6 @@
{
"ethereum": {
"proxyAdmin": "0x75EE15Ee1B4A75Fa3e2fDF5DF3253c25599cc659",
"delegationManager": "0x39053D51B77DC0d36036Fc1fCc8Cb819df8Ef37A",
"avsDirectory": "0x135DDa560e946695d6f155dACaFC6f1F25C1F5AF",
"paymentCoordinator": "",
@ -19,6 +20,7 @@
]
},
"holesky": {
"proxyAdmin": "0x33dB966328Ea213b0f76eF96CA368AB37779F065",
"delegationManager": "0xA44151489861Fe9e3055d95adC98FbD462B948e7",
"avsDirectory": "0x055733000064333CaDDbC92763c58BF0192fFeBf",
"paymentCoordinator": "",

@ -29,6 +29,7 @@ contract HyperlaneServiceManagerTest is EigenlayerBase {
// Operator info
uint256 operatorPrivateKey = 0xdeadbeef;
address operator;
address avsSigningKey = address(0xc0ffee);
bytes32 emptySalt;
uint256 maxExpiry = type(uint256).max;
@ -97,9 +98,11 @@ contract HyperlaneServiceManagerTest is EigenlayerBase {
emptySalt,
maxExpiry
);
vm.prank(operator);
_ecdsaStakeRegistry.registerOperatorWithSignature(
operator,
operatorSignature
operatorSignature,
avsSigningKey
);
// assert
@ -122,12 +125,13 @@ contract HyperlaneServiceManagerTest is EigenlayerBase {
maxExpiry
);
vm.prank(operator);
vm.expectRevert(
"EIP1271SignatureUtils.checkSignature_EIP1271: signature not from signer"
);
_ecdsaStakeRegistry.registerOperatorWithSignature(
operator,
operatorSignature
operatorSignature,
avsSigningKey
);
// assert
@ -409,9 +413,10 @@ contract HyperlaneServiceManagerTest is EigenlayerBase {
maxExpiry
);
vm.prank(operator);
_ecdsaStakeRegistry.registerOperatorWithSignature(
operator,
operatorSignature
operatorSignature,
avsSigningKey
);
}

@ -28,8 +28,8 @@ import {HypERC20} from "../../contracts/token/HypERC20.sol";
import {HypERC20Collateral} from "../../contracts/token/HypERC20Collateral.sol";
import {IXERC20} from "../../contracts/token/interfaces/IXERC20.sol";
import {IFiatToken} from "../../contracts/token/interfaces/IFiatToken.sol";
import {HypXERC20Collateral} from "../../contracts/token/extensions/HypXERC20Collateral.sol";
import {HypFiatTokenCollateral} from "../../contracts/token/extensions/HypFiatTokenCollateral.sol";
import {HypXERC20} from "../../contracts/token/extensions/HypXERC20.sol";
import {HypFiatToken} from "../../contracts/token/extensions/HypFiatToken.sol";
import {HypNative} from "../../contracts/token/HypNative.sol";
import {TokenRouter} from "../../contracts/token/libs/TokenRouter.sol";
import {TokenMessage} from "../../contracts/token/libs/TokenMessage.sol";
@ -394,20 +394,20 @@ contract HypERC20CollateralTest is HypTokenTest {
}
}
contract HypXERC20CollateralTest is HypTokenTest {
contract HypXERC20Test is HypTokenTest {
using TypeCasts for address;
HypXERC20Collateral internal xerc20Collateral;
HypXERC20 internal xerc20Collateral;
function setUp() public override {
super.setUp();
primaryToken = new XERC20Test(NAME, SYMBOL, TOTAL_SUPPLY, DECIMALS);
localToken = new HypXERC20Collateral(
localToken = new HypXERC20(
address(primaryToken),
address(localMailbox)
);
xerc20Collateral = HypXERC20Collateral(address(localToken));
xerc20Collateral = HypXERC20(address(localToken));
xerc20Collateral.enrollRemoteRouter(
DESTINATION,
@ -442,22 +442,22 @@ contract HypXERC20CollateralTest is HypTokenTest {
}
}
contract HypFiatTokenCollateralTest is HypTokenTest {
contract HypFiatTokenTest is HypTokenTest {
using TypeCasts for address;
HypFiatTokenCollateral internal fiatTokenCollateral;
HypFiatToken internal fiatToken;
function setUp() public override {
super.setUp();
primaryToken = new FiatTokenTest(NAME, SYMBOL, TOTAL_SUPPLY, DECIMALS);
localToken = new HypFiatTokenCollateral(
localToken = new HypFiatToken(
address(primaryToken),
address(localMailbox)
);
fiatTokenCollateral = HypFiatTokenCollateral(address(localToken));
fiatToken = HypFiatToken(address(localToken));
fiatTokenCollateral.enrollRemoteRouter(
fiatToken.enrollRemoteRouter(
DESTINATION,
address(remoteToken).addressToBytes32()
);

@ -1,5 +1,7 @@
# @hyperlane-xyz/ccip-server
## 3.13.0
## 3.12.0
## 3.11.1

@ -1,6 +1,6 @@
{
"name": "@hyperlane-xyz/ccip-server",
"version": "3.12.2",
"version": "3.13.0",
"description": "CCIP server",
"typings": "dist/index.d.ts",
"typedocMain": "src/index.ts",

@ -1,5 +1,23 @@
# @hyperlane-xyz/cli
## 3.13.0
### Minor Changes
- b22a0f453: Add hyperlane validator address command to retrieve validator address from AWS
- 39ea7cdef: Implement multi collateral warp routes
- babe816f8: Support xERC20 and xERC20 Lockbox in SDK and CLI
- b440d98be: Added support for registering/deregistering from the Hyperlane AVS
### Patch Changes
- b6b26e2bb: fix: minor change was breaking in registry export
- Updated dependencies [39ea7cdef]
- Updated dependencies [babe816f8]
- Updated dependencies [0cf692e73]
- @hyperlane-xyz/sdk@3.13.0
- @hyperlane-xyz/utils@3.13.0
## 3.12.0
### Minor Changes

@ -12,7 +12,7 @@ To read more about interchain applications, how the protocol works, and how to i
## Setup
Node 16 or newer is required.
Node 18 or newer is required.
**Option 1: Global install:**

@ -5,6 +5,7 @@ import yargs from 'yargs';
import type { LogFormat, LogLevel } from '@hyperlane-xyz/utils';
import './env.js';
import { avsCommand } from './src/commands/avs.js';
import { chainsCommand } from './src/commands/chains.js';
import { configCommand } from './src/commands/config.js';
import { deployCommand } from './src/commands/deploy.js';
@ -20,6 +21,7 @@ import {
} from './src/commands/options.js';
import { sendCommand } from './src/commands/send.js';
import { statusCommand } from './src/commands/status.js';
import { validatorCommand } from './src/commands/validator.js';
import { contextMiddleware } from './src/context/context.js';
import { configureLogger, errorRed } from './src/logger.js';
import { checkVersion } from './src/utils/version-check.js';
@ -48,6 +50,7 @@ try {
},
contextMiddleware,
])
.command(avsCommand)
.command(chainsCommand)
.command(configCommand)
.command(deployCommand)
@ -55,6 +58,7 @@ try {
.command(ismCommand)
.command(sendCommand)
.command(statusCommand)
.command(validatorCommand)
.version(VERSION)
.demandCommand()
.strict()

@ -1,12 +1,15 @@
{
"name": "@hyperlane-xyz/cli",
"version": "3.12.2",
"version": "3.13.0",
"description": "A command-line utility for common Hyperlane operations",
"dependencies": {
"@hyperlane-xyz/registry": "^1.0.7",
"@hyperlane-xyz/sdk": "3.12.2",
"@hyperlane-xyz/utils": "3.12.2",
"@aws-sdk/client-kms": "^3.577.0",
"@aws-sdk/client-s3": "^3.577.0",
"@hyperlane-xyz/registry": "1.3.0",
"@hyperlane-xyz/sdk": "3.13.0",
"@hyperlane-xyz/utils": "3.13.0",
"@inquirer/prompts": "^3.0.0",
"asn1.js": "^5.4.1",
"bignumber.js": "^9.1.1",
"chalk": "^5.3.0",
"ethers": "^5.7.2",

@ -0,0 +1,19 @@
import { ChainMap } from '@hyperlane-xyz/sdk';
import { Address } from '@hyperlane-xyz/utils';
interface AVSContracts {
avsDirectory: Address;
proxyAdmin: Address;
ecdsaStakeRegistry: Address;
hyperlaneServiceManager: Address;
}
// TODO: move to registry
export const avsAddresses: ChainMap<AVSContracts> = {
holesky: {
avsDirectory: '0x055733000064333CaDDbC92763c58BF0192fFeBf',
proxyAdmin: '0x33dB966328Ea213b0f76eF96CA368AB37779F065',
ecdsaStakeRegistry: '0xFfa913705484C9BAea32Ffe9945BeA099A1DFF72',
hyperlaneServiceManager: '0xc76E477437065093D353b7d56c81ff54D167B0Ab',
},
};

@ -0,0 +1,164 @@
import { password } from '@inquirer/prompts';
import { BigNumberish, Wallet, utils } from 'ethers';
import {
ECDSAStakeRegistry__factory,
TestAVSDirectory__factory,
} from '@hyperlane-xyz/core';
import { ChainName } from '@hyperlane-xyz/sdk';
import { Address } from '@hyperlane-xyz/utils';
import { WriteCommandContext } from '../context/types.js';
import { log, logBlue } from '../logger.js';
import { readFileAtPath, resolvePath } from '../utils/files.js';
import { avsAddresses } from './config.js';
export type SignatureWithSaltAndExpiryStruct = {
signature: utils.BytesLike;
salt: utils.BytesLike;
expiry: BigNumberish;
};
export async function registerOperatorWithSignature({
context,
chain,
operatorKeyPath,
avsSigningKey,
}: {
context: WriteCommandContext;
chain: ChainName;
operatorKeyPath: string;
avsSigningKey: Address;
}) {
const { multiProvider } = context;
const operatorAsSigner = await readOperatorFromEncryptedJson(operatorKeyPath);
const provider = multiProvider.getProvider(chain);
const connectedSigner = operatorAsSigner.connect(provider);
const stakeRegistryAddress = avsAddresses[chain].ecdsaStakeRegistry;
const ecdsaStakeRegistry = ECDSAStakeRegistry__factory.connect(
stakeRegistryAddress,
connectedSigner,
);
const domainId = multiProvider.getDomainId(chain);
const avsDirectoryAddress = avsAddresses[chain].avsDirectory;
const operatorSignature = await getOperatorSignature(
domainId,
avsAddresses[chain].hyperlaneServiceManager,
avsDirectoryAddress,
operatorAsSigner,
connectedSigner,
);
// check if the operator is already registered
const operatorStatus = await ecdsaStakeRegistry.operatorRegistered(
operatorAsSigner.address,
);
if (operatorStatus) {
logBlue(
`Operator ${operatorAsSigner.address} already registered to Hyperlane AVS`,
);
return;
}
log(
`Registering operator ${operatorAsSigner.address} attesting ${avsSigningKey} with signature on ${chain}...`,
);
await multiProvider.handleTx(
chain,
ecdsaStakeRegistry.registerOperatorWithSignature(
operatorSignature,
avsSigningKey,
),
);
logBlue(`Operator ${operatorAsSigner.address} registered to Hyperlane AVS`);
}
export async function deregisterOperator({
context,
chain,
operatorKeyPath,
}: {
context: WriteCommandContext;
chain: ChainName;
operatorKeyPath: string;
}) {
const { multiProvider } = context;
const operatorAsSigner = await readOperatorFromEncryptedJson(operatorKeyPath);
const provider = multiProvider.getProvider(chain);
const connectedSigner = operatorAsSigner.connect(provider);
const stakeRegistryAddress = avsAddresses[chain].ecdsaStakeRegistry;
const ecdsaStakeRegistry = ECDSAStakeRegistry__factory.connect(
stakeRegistryAddress,
connectedSigner,
);
log(`Deregistering operator ${operatorAsSigner.address} on ${chain}...`);
await multiProvider.handleTx(chain, ecdsaStakeRegistry.deregisterOperator());
logBlue(
`Operator ${operatorAsSigner.address} deregistered from Hyperlane AVS`,
);
}
async function readOperatorFromEncryptedJson(
operatorKeyPath: string,
): Promise<Wallet> {
const encryptedJson = readFileAtPath(resolvePath(operatorKeyPath));
const keyFilePassword = await password({
mask: '*',
message: 'Enter the password for the operator key file: ',
});
return await Wallet.fromEncryptedJson(encryptedJson, keyFilePassword);
}
async function getOperatorSignature(
domain: number,
serviceManager: Address,
avsDirectory: Address,
operator: Wallet,
signer: Wallet,
): Promise<SignatureWithSaltAndExpiryStruct> {
const avsDirectoryContract = TestAVSDirectory__factory.connect(
avsDirectory,
signer,
);
// random salt is ok, because we register the operator right after
const salt = utils.hexZeroPad(utils.randomBytes(32), 32);
// give a expiry timestamp 1 hour from now
const expiry = utils.hexZeroPad(
utils.hexlify(Math.floor(Date.now() / 1000) + 60 * 60),
32,
);
const signingHash =
await avsDirectoryContract.calculateOperatorAVSRegistrationDigestHash(
operator.address,
serviceManager,
salt,
expiry,
);
// Eigenlayer's AVSDirectory expects the signature over raw signed hash instead of EIP-191 compatible toEthSignedMessageHash
// see https://github.com/Layr-Labs/eigenlayer-contracts/blob/ef2ea4a7459884f381057aa9bbcd29c7148cfb63/src/contracts/libraries/EIP1271SignatureUtils.sol#L22
const signature = operator
._signingKey()
.signDigest(utils.arrayify(signingHash));
return {
signature: utils.joinSignature(signature),
salt,
expiry,
};
}

@ -0,0 +1,84 @@
import { CommandModule, Options } from 'yargs';
import { ChainName } from '@hyperlane-xyz/sdk';
import { Address } from '@hyperlane-xyz/utils';
import {
deregisterOperator,
registerOperatorWithSignature,
} from '../avs/stakeRegistry.js';
import { CommandModuleWithWriteContext } from '../context/types.js';
import { log } from '../logger.js';
/**
* Parent command
*/
export const avsCommand: CommandModule = {
command: 'avs',
describe: 'Interact with the Hyperlane AVS',
builder: (yargs) =>
yargs
.command(registerCommand)
.command(deregisterCommand)
.version(false)
.demandCommand(),
handler: () => log('Command required'),
};
/**
* Registration command
*/
export const registrationOptions: { [k: string]: Options } = {
chain: {
type: 'string',
description: 'Chain to interact with the AVS on',
demandOption: true,
choices: ['holesky', 'ethereum'],
},
operatorKeyPath: {
type: 'string',
description: 'Path to the operator key file',
demandOption: true,
},
avsSigningKey: {
type: 'string',
description: 'Address of the AVS signing key',
demandOption: true,
},
};
const registerCommand: CommandModuleWithWriteContext<{
chain: ChainName;
operatorKeyPath: string;
avsSigningKey: Address;
}> = {
command: 'register',
describe: 'Register operator with the AVS',
builder: registrationOptions,
handler: async ({ context, chain, operatorKeyPath, avsSigningKey }) => {
await registerOperatorWithSignature({
context,
chain,
operatorKeyPath,
avsSigningKey,
});
process.exit(0);
},
};
const deregisterCommand: CommandModuleWithWriteContext<{
chain: ChainName;
operatorKeyPath: string;
}> = {
command: 'deregister',
describe: 'Deregister yourself with the AVS',
builder: registrationOptions,
handler: async ({ context, chain, operatorKeyPath }) => {
await deregisterOperator({
context,
chain,
operatorKeyPath,
});
process.exit(0);
},
};

@ -170,7 +170,7 @@ const validateWarpCommand: CommandModuleWithContext<{ path: string }> = {
path: inputFileCommandOption,
},
handler: async ({ path }) => {
readWarpRouteDeployConfig(path);
await readWarpRouteDeployConfig(path);
logGreen('Config is valid');
process.exit(0);
},

@ -146,3 +146,35 @@ export const addressCommandOption = (
description,
demandOption,
});
/* Validator options */
export const awsAccessKeyCommandOption: Options = {
type: 'string',
description: 'AWS access key of IAM user associated with validator',
default: ENV.AWS_ACCESS_KEY_ID,
defaultDescription: 'process.env.AWS_ACCESS_KEY_ID',
};
export const awsSecretKeyCommandOption: Options = {
type: 'string',
description: 'AWS secret access key of IAM user associated with validator',
default: ENV.AWS_SECRET_ACCESS_KEY,
defaultDescription: 'process.env.AWS_SECRET_ACCESS_KEY',
};
export const awsRegionCommandOption: Options = {
type: 'string',
describe: 'AWS region associated with validator',
default: ENV.AWS_REGION,
defaultDescription: 'process.env.AWS_REGION',
};
export const awsBucketCommandOption: Options = {
type: 'string',
describe: 'AWS S3 bucket containing validator signatures and announcement',
};
export const awsKeyIdCommandOption: Options = {
type: 'string',
describe: 'Key ID from AWS KMS',
};

@ -0,0 +1,51 @@
import { CommandModule } from 'yargs';
import { CommandModuleWithContext } from '../context/types.js';
import { log } from '../logger.js';
import { getValidatorAddress } from '../validator/address.js';
import {
awsAccessKeyCommandOption,
awsBucketCommandOption,
awsKeyIdCommandOption,
awsRegionCommandOption,
awsSecretKeyCommandOption,
} from './options.js';
// Parent command to help configure and set up Hyperlane validators
export const validatorCommand: CommandModule = {
command: 'validator',
describe: 'Configure and manage Hyperlane validators',
builder: (yargs) => yargs.command(addressCommand).demandCommand(),
handler: () => log('Command required'),
};
// If AWS access key needed for future validator commands, move to context
const addressCommand: CommandModuleWithContext<{
accessKey: string;
secretKey: string;
region: string;
bucket: string;
keyId: string;
}> = {
command: 'address',
describe: 'Get the validator address from S3 bucket or KMS key ID',
builder: {
'access-key': awsAccessKeyCommandOption,
'secret-key': awsSecretKeyCommandOption,
region: awsRegionCommandOption,
bucket: awsBucketCommandOption,
'key-id': awsKeyIdCommandOption,
},
handler: async ({ context, accessKey, secretKey, region, bucket, keyId }) => {
await getValidatorAddress({
context,
accessKey,
secretKey,
region,
bucket,
keyId,
});
process.exit(0);
},
};

@ -46,8 +46,8 @@ export async function createChainConfig({
await new ethers.providers.JsonRpcProvider().getNetwork();
return ethers.providers.JsonRpcProvider.defaultUrl();
},
'rpc url',
'Enter http or https',
'rpc url',
);
const provider = new ethers.providers.JsonRpcProvider(rpcUrl);
@ -58,8 +58,8 @@ export async function createChainConfig({
const client = clientName.split('/')[0];
return `${client}${port}`;
},
'chain name',
'Enter (one word, lower case)',
'chain name',
);
const chainId = parseInt(
@ -68,8 +68,8 @@ export async function createChainConfig({
const network = await provider.getNetwork();
return network.chainId.toString();
},
'chain id',
'Enter a (number)',
'chain id',
),
10,
);

@ -1,30 +1,89 @@
import { confirm, input } from '@inquirer/prompts';
import { ethers } from 'ethers';
import { input, select } from '@inquirer/prompts';
import {
ChainMetadata,
ChainMap,
MailboxClientConfig,
TokenType,
WarpCoreConfig,
WarpCoreConfigSchema,
WarpRouteDeployConfig,
WarpRouteDeployConfigSchema,
} from '@hyperlane-xyz/sdk';
import { objFilter } from '@hyperlane-xyz/utils';
import { assert, objMap, promiseObjAll } from '@hyperlane-xyz/utils';
import { CommandContext } from '../context/types.js';
import { errorRed, logBlue, logGreen } from '../logger.js';
import {
detectAndConfirmOrPrompt,
runMultiChainSelectionStep,
runSingleChainSelectionStep,
} from '../utils/chains.js';
import { readYamlOrJson, writeYamlOrJson } from '../utils/files.js';
export function readWarpRouteDeployConfig(
const TYPE_DESCRIPTIONS: Record<TokenType, string> = {
[TokenType.synthetic]: 'A new ERC20 with remote transfer functionality',
[TokenType.collateral]:
'Extends an existing ERC20 with remote transfer functionality',
[TokenType.native]:
'Extends the native token with remote transfer functionality',
[TokenType.collateralVault]:
'Extends an existing ERC4626 with remote transfer functionality',
[TokenType.collateralFiat]:
'Extends an existing FiatToken with remote transfer functionality',
[TokenType.XERC20]:
'Extends an existing xERC20 with Warp Route functionality',
[TokenType.XERC20Lockbox]:
'Extends an existing xERC20 Lockbox with Warp Route functionality',
// TODO: describe
[TokenType.fastSynthetic]: '',
[TokenType.syntheticUri]: '',
[TokenType.fastCollateral]: '',
[TokenType.collateralUri]: '',
[TokenType.nativeScaled]: '',
};
const TYPE_CHOICES = Object.values(TokenType).map((type) => ({
name: type,
value: type,
description: TYPE_DESCRIPTIONS[type],
}));
async function fillDefaults(
context: CommandContext,
config: ChainMap<Partial<MailboxClientConfig>>,
): Promise<ChainMap<MailboxClientConfig>> {
return promiseObjAll(
objMap(config, async (chain, config): Promise<MailboxClientConfig> => {
let mailbox = config.mailbox;
if (!mailbox) {
const addresses = await context.registry.getChainAddresses(chain);
assert(addresses, `No addresses found for chain ${chain}`);
mailbox = addresses.mailbox;
}
let owner = config.owner;
if (!owner) {
owner =
(await context.signer?.getAddress()) ??
(await context.multiProvider.getSignerAddress(chain));
}
return {
owner,
mailbox,
...config,
};
}),
);
}
export async function readWarpRouteDeployConfig(
filePath: string,
): WarpRouteDeployConfig {
const config = readYamlOrJson(filePath);
context?: CommandContext,
): Promise<WarpRouteDeployConfig> {
let config = readYamlOrJson(filePath);
if (!config)
throw new Error(`No warp route deploy config found at ${filePath}`);
if (context) {
config = await fillDefaults(context, config as any);
}
return WarpRouteDeployConfigSchema.parse(config);
}
@ -40,75 +99,71 @@ export async function createWarpRouteDeployConfig({
outPath: string;
}) {
logBlue('Creating a new warp route deployment config');
const baseChain = await runSingleChainSelectionStep(
context.chainMetadata,
'Select base chain with the original token to warp',
const owner = await detectAndConfirmOrPrompt(
async () => context.signer?.getAddress(),
'Enter the desired',
'owner address',
);
const isNative = await confirm({
message:
'Are you creating a route for the native token of the base chain (e.g. Ether on Ethereum)?',
});
const isNft = isNative
? false
: await confirm({ message: 'Is this an NFT (i.e. ERC-721)?' });
const isYieldBearing =
isNative || isNft
? false
: await confirm({
message:
'Do you want this warp route to be yield-bearing (i.e. deposits into ERC-4626 vault)?',
});
const addressMessage = `Enter the ${
isYieldBearing ? 'ERC-4626 vault' : 'collateral token'
} address`;
const baseAddress = isNative
? ethers.constants.AddressZero
: await input({ message: addressMessage });
const metadataWithoutBase = objFilter(
const warpChains = await runMultiChainSelectionStep(
context.chainMetadata,
(chain, _): _ is ChainMetadata => chain !== baseChain,
);
const syntheticChains = await runMultiChainSelectionStep(
metadataWithoutBase,
'Select chains to which the base token will be connected',
'Select chains to connect',
);
// TODO add more prompts here to support customizing the token metadata
let result: WarpRouteDeployConfig;
if (isNative) {
result = {
[baseChain]: {
type: TokenType.native,
},
};
} else {
result = {
[baseChain]: {
type: isYieldBearing ? TokenType.collateralVault : TokenType.collateral,
token: baseAddress,
isNft,
const result: WarpRouteDeployConfig = {};
for (const chain of warpChains) {
logBlue(`Configuring warp route for chain ${chain}`);
const type = await select({
message: `Select ${chain}'s token type`,
choices: TYPE_CHOICES,
});
// TODO: restore NFT prompting
const isNft =
type === TokenType.syntheticUri || type === TokenType.collateralUri;
const mailbox = await detectAndConfirmOrPrompt(
async () => {
const addresses = await context.registry.getChainAddresses(chain);
return addresses?.mailbox;
},
};
}
`For ${chain}, enter the`,
'mailbox address',
);
syntheticChains.map((chain) => {
result[chain] = {
type: TokenType.synthetic,
};
});
switch (type) {
case TokenType.collateral:
case TokenType.XERC20:
case TokenType.XERC20Lockbox:
case TokenType.collateralFiat:
case TokenType.collateralUri:
case TokenType.fastCollateral:
case TokenType.collateralVault:
result[chain] = {
mailbox,
type,
owner,
isNft,
token: await input({
message: `Enter the existing token address on chain ${chain}`,
}),
};
break;
default:
result[chain] = { mailbox, type, owner, isNft };
}
}
if (isValidWarpRouteDeployConfig(result)) {
try {
const parsed = WarpRouteDeployConfigSchema.parse(result);
logGreen(`Warp Route config is valid, writing to file ${outPath}`);
writeYamlOrJson(outPath, result);
} else {
writeYamlOrJson(outPath, parsed);
} catch (e) {
errorRed(
`Warp route deployment config is invalid, please see https://github.com/hyperlane-xyz/hyperlane-monorepo/blob/main/typescript/cli/examples/warp-route-deployment.yaml for an example`,
);
throw new Error('Invalid multisig config');
throw e;
}
}

@ -1,3 +1,4 @@
export const MINIMUM_CORE_DEPLOY_GAS = (1e8).toString();
export const MINIMUM_WARP_DEPLOY_GAS = (1e7).toString();
export const MINIMUM_TEST_SEND_GAS = (3e5).toString();
export const MINIMUM_AVS_GAS = (3e6).toString();

@ -1,13 +1,17 @@
import { ethers } from 'ethers';
import { IRegistry } from '@hyperlane-xyz/registry';
import {
GithubRegistry,
IRegistry,
MergedRegistry,
} from '@hyperlane-xyz/registry';
import { FileSystemRegistry } from '@hyperlane-xyz/registry/fs';
import { ChainName, MultiProvider } from '@hyperlane-xyz/sdk';
import { isNullish } from '@hyperlane-xyz/utils';
import { isHttpsUrl, isNullish, rootLogger } from '@hyperlane-xyz/utils';
import { isSignCommand } from '../commands/signCommands.js';
import { forkNetworkToMultiProvider, verifyAnvil } from '../deploy/dry-run.js';
import { logBlue } from '../logger.js';
import { MergedRegistry } from '../registry/MergedRegistry.js';
import { runSingleChainSelectionStep } from '../utils/chains.js';
import { getImpersonatedSigner, getSigner } from '../utils/keys.js';
@ -81,7 +85,7 @@ export async function getDryRunContext(
}: ContextSettings,
chain?: ChainName,
): Promise<CommandContext> {
const registry = getRegistry(registryUri, registryOverrideUri, true);
const registry = getRegistry(registryUri, registryOverrideUri);
const chainMetadata = await registry.getMetadata();
if (!chain) {
@ -127,14 +131,25 @@ export async function getDryRunContext(
function getRegistry(
primaryRegistryUri: string,
overrideRegistryUri: string,
isDryRun?: boolean,
): IRegistry {
const registryUris = [primaryRegistryUri, overrideRegistryUri]
.map((r) => r.trim())
.filter((r) => !!r);
const logger = rootLogger.child({ module: 'MergedRegistry' });
const registries = [primaryRegistryUri, overrideRegistryUri]
.map((uri) => uri.trim())
.filter((uri) => !!uri)
.map((uri, index) => {
const childLogger = logger.child({ uri, index });
if (isHttpsUrl(uri)) {
return new GithubRegistry({ uri, logger: childLogger });
} else {
return new FileSystemRegistry({
uri,
logger: childLogger,
});
}
});
return new MergedRegistry({
registryUris,
isDryRun,
registries,
logger,
});
}

@ -258,6 +258,7 @@ async function executeDeploy({
registry,
ismFactoryContracts,
artifacts,
context.isDryRun,
);
logGreen('ISM factory contracts deployed');
@ -297,7 +298,12 @@ async function executeDeploy({
};
}
artifacts = objMerge(artifacts, isms);
artifacts = await updateChainAddresses(registry, coreContracts, artifacts);
artifacts = await updateChainAddresses(
registry,
coreContracts,
artifacts,
context.isDryRun,
);
logGreen('✅ Core contracts deployed');
log(JSON.stringify(artifacts, null, 2));
@ -395,6 +401,7 @@ async function updateChainAddresses(
registry: IRegistry,
newContracts: HyperlaneContractsMap<any>,
otherAddresses: HyperlaneAddressesMap<any>,
isDryRun?: boolean,
) {
let newAddresses = serializeContractsMap(newContracts);
// The HyperlaneCoreDeployer is returning a nested object with ISM addresses
@ -407,6 +414,9 @@ async function updateChainAddresses(
);
});
const mergedAddresses = objMerge(otherAddresses, newAddresses);
if (isDryRun) return mergedAddresses;
for (const chainName of Object.keys(newContracts)) {
await registry.updateChain({
chainName,

@ -18,36 +18,6 @@ import { assertSigner } from '../utils/keys.js';
import { completeDryRun } from './dry-run.js';
export async function runPreflightChecks({
context,
origin,
remotes,
minGas,
chainsToGasCheck,
}: {
context: WriteCommandContext;
origin: ChainName;
remotes: ChainName[];
minGas: string;
chainsToGasCheck?: ChainName[];
}) {
log('Running pre-flight checks...');
if (!origin || !remotes?.length) throw new Error('Invalid chain selection');
logGreen('✅ Chain selections are valid');
if (remotes.includes(origin))
throw new Error('Origin and remotes must be distinct');
logGreen('✅ Origin and remote are distinct');
return runPreflightChecksForChains({
context,
chains: [origin, ...remotes],
minGas,
chainsToGasCheck,
});
}
export async function runPreflightChecksForChains({
context,
chains,

@ -1,38 +1,35 @@
import { confirm, input } from '@inquirer/prompts';
import { confirm } from '@inquirer/prompts';
import {
ChainMap,
ChainName,
ConnectionClientConfig,
EvmTokenAdapter,
HypERC20Deployer,
HypERC721Deployer,
HyperlaneContractsMap,
MinimalTokenMetadata,
MultiProtocolProvider,
MultiProvider,
RouterConfig,
TOKEN_TYPE_TO_STANDARD,
TokenConfig,
TokenFactories,
TokenRouterConfig,
TokenType,
WarpCoreConfig,
WarpRouteDeployConfig,
getTokenConnectionId,
isCollateralConfig,
isNativeConfig,
isSyntheticConfig,
isTokenMetadata,
} from '@hyperlane-xyz/sdk';
import { ProtocolType } from '@hyperlane-xyz/utils';
import { readWarpRouteDeployConfig } from '../config/warp.js';
import { MINIMUM_WARP_DEPLOY_GAS } from '../consts.js';
import { WriteCommandContext } from '../context/types.js';
import { log, logBlue, logGray, logGreen } from '../logger.js';
import { log, logBlue, logGray, logGreen, logTable } from '../logger.js';
import { isFile, runFileSelectionStep } from '../utils/files.js';
import { completeDeploy, prepareDeploy, runPreflightChecks } from './utils.js';
import {
completeDeploy,
prepareDeploy,
runPreflightChecksForChains,
} from './utils.js';
interface DeployParams {
context: WriteCommandContext;
configMap: WarpRouteDeployConfig;
}
export async function runWarpRouteDeploy({
context,
@ -59,30 +56,28 @@ export async function runWarpRouteDeploy({
`Using warp route deployment config at ${warpRouteDeploymentConfigPath}`,
);
}
const warpRouteConfig = readWarpRouteDeployConfig(
const warpRouteConfig = await readWarpRouteDeployConfig(
warpRouteDeploymentConfigPath,
);
const configs = await runBuildConfigStep({
context,
warpRouteConfig,
});
);
const deploymentParams = {
context,
...configs,
configMap: warpRouteConfig,
};
logBlue('Warp route deployment plan');
await runDeployPlanStep(deploymentParams);
await runPreflightChecks({
...deploymentParams,
const chains = Object.keys(warpRouteConfig);
await runPreflightChecksForChains({
context,
chains,
minGas: MINIMUM_WARP_DEPLOY_GAS,
});
const userAddress = await signer.getAddress();
const chains = [deploymentParams.origin, ...configs.remotes];
const initialBalances = await prepareDeploy(context, userAddress, chains);
@ -91,111 +86,13 @@ export async function runWarpRouteDeploy({
await completeDeploy(context, 'warp', initialBalances, userAddress, chains);
}
async function runBuildConfigStep({
context,
warpRouteConfig,
}: {
context: WriteCommandContext;
warpRouteConfig: WarpRouteDeployConfig;
}) {
const { registry, signer, multiProvider, skipConfirmation } = context;
log('Assembling token configs');
const chainAddresses = await registry.getAddresses();
const owner = await signer.getAddress();
const requiredRouterFields: Array<keyof ConnectionClientConfig> = ['mailbox'];
const remotes: string[] = [];
/// @dev This will keep track of the base collateral metadata which can get overwritten if there are multiple collaterals.
/// These 'base' variables are used to derive synthetic fields
/// @todo Remove this artifact when multi-collateral is enabled
let baseChainName = '';
let baseMetadata = {} as MinimalTokenMetadata;
// Define configs that coalesce together values from the config file
for (const [chain, config] of Object.entries(warpRouteConfig)) {
// the artifacts, and the SDK as a fallback
config.owner = owner;
config.mailbox = config.mailbox || chainAddresses[chain]?.mailbox;
config.interchainSecurityModule =
config.interchainSecurityModule ||
chainAddresses[chain]?.interchainSecurityModule ||
chainAddresses[chain]?.multisigIsm;
// config.ismFactory: chainAddresses[baseChainName].domainRoutingIsmFactory, // TODO fix when updating from routingIsm
if (isCollateralConfig(config) || isNativeConfig(config)) {
// Store the base metadata
baseChainName = chain;
baseMetadata = await fetchBaseTokenMetadata(chain, config, multiProvider);
log(
`Using token metadata: Name: ${baseMetadata.name}, Symbol: ${baseMetadata.symbol}, Decimals: ${baseMetadata.decimals}`,
);
if (isCollateralConfig(config)) {
config.name = baseMetadata.name;
config.symbol = baseMetadata.symbol;
config.decimals = baseMetadata.decimals;
}
} else if (isSyntheticConfig(config)) {
// Use the config, or baseMetadata
config.name = config.name || baseMetadata.name;
config.symbol = config.symbol || baseMetadata.symbol;
config.totalSupply = config.totalSupply || 0;
remotes.push(chain);
}
let hasShownInfo = false;
// Request input for any address fields that are missing
for (const field of requiredRouterFields) {
if (config[field]) continue;
if (skipConfirmation)
throw new Error(`Field ${field} for token on ${chain} required`);
if (!hasShownInfo) {
logBlue(
'Some router fields are missing. Please enter them now, add them to your warp config, or use the --core flag to use deployment artifacts.',
);
hasShownInfo = true;
}
const value = await input({
message: `Enter ${field} for ${getTokenName(config)} token on ${chain}`,
});
if (!value) throw new Error(`Field ${field} required`);
config[field] = value.trim();
}
}
log('Token configs ready');
return {
configMap: warpRouteConfig,
origin: baseChainName,
metadata: baseMetadata,
remotes,
};
}
interface DeployParams {
context: WriteCommandContext;
configMap: WarpRouteDeployConfig;
metadata: MinimalTokenMetadata;
origin: ChainName;
remotes: ChainName[];
}
async function runDeployPlanStep({ context, configMap }: DeployParams) {
const { skipConfirmation } = context;
async function runDeployPlanStep({
context,
configMap,
origin,
remotes,
}: DeployParams) {
const { signer, skipConfirmation } = context;
const address = await signer.getAddress();
const baseToken = configMap[origin];
const baseName = getTokenName(baseToken);
logBlue('\nDeployment plan');
logGray('===============');
log(`Collateral type will be ${baseToken.type}`);
log(`Transaction signer and owner of new contracts will be ${address}`);
log(`Deploying a warp route with a base of ${baseName} token on ${origin}`);
log(`Connecting it to new synthetic tokens on ${remotes.join(', ')}`);
log(`Using token standard ${configMap.isNft ? 'ERC721' : 'ERC20'}`);
logTable(configMap);
if (skipConfirmation) return;
@ -210,80 +107,67 @@ async function executeDeploy(params: DeployParams) {
const {
configMap,
context: { registry, multiProvider, isDryRun },
context: { registry, multiProvider, isDryRun, dryRunChain },
} = params;
const deployer = configMap.isNft
? new HypERC721Deployer(multiProvider)
: new HypERC20Deployer(multiProvider);
const config = isDryRun
? { [params.origin]: configMap[params.origin] }
: configMap;
const config: WarpRouteDeployConfig =
isDryRun && dryRunChain
? { [dryRunChain]: configMap[dryRunChain] }
: configMap;
const deployedContracts = await deployer.deploy(
config as ChainMap<TokenConfig & RouterConfig>,
); /// @todo remove ChainMap once Hyperlane deployers are refactored
const deployedContracts = await deployer.deploy(config);
logGreen('✅ Hyp token deployments complete');
if (!isDryRun) log('Writing deployment artifacts');
const warpCoreConfig = getWarpCoreConfig(params, deployedContracts);
await registry.addWarpRoute(warpCoreConfig);
const warpCoreConfig = await getWarpCoreConfig(params, deployedContracts);
if (!isDryRun) {
log('Writing deployment artifacts');
await registry.addWarpRoute(warpCoreConfig);
}
log(JSON.stringify(warpCoreConfig, null, 2));
logBlue('Deployment is complete!');
}
async function fetchBaseTokenMetadata(
chain: string,
config: TokenRouterConfig,
multiProvider: MultiProvider,
): Promise<MinimalTokenMetadata> {
if (config.type === TokenType.native) {
// If it's a native token, use the chain's native token metadata
const chainNativeToken = multiProvider.getChainMetadata(chain).nativeToken;
if (chainNativeToken) return chainNativeToken;
else throw new Error(`No native token metadata for ${chain}`);
} else if (
config.type === TokenType.collateralVault ||
config.type === TokenType.collateral
) {
// If it's a collateral type, use a TokenAdapter to query for its metadata
log(`Fetching token metadata for ${config.token} on ${chain}`);
const adapter = new EvmTokenAdapter(
chain,
MultiProtocolProvider.fromMultiProvider(multiProvider),
{ token: config.token },
);
return adapter.getMetadata();
} else {
throw new Error(
`Unsupported token: ${config.type}. Consider setting token metadata in your deployment config.`,
);
}
}
function getTokenName(token: TokenConfig) {
return token.type === TokenType.native ? 'native' : token.name;
}
function getWarpCoreConfig(
{ configMap, metadata }: DeployParams,
async function getWarpCoreConfig(
{ configMap, context }: DeployParams,
contracts: HyperlaneContractsMap<TokenFactories>,
): WarpCoreConfig {
): Promise<WarpCoreConfig> {
const warpCoreConfig: WarpCoreConfig = { tokens: [] };
// TODO: replace with warp read
const tokenMetadata = await HypERC20Deployer.deriveTokenMetadata(
context.multiProvider,
configMap,
);
// First pass, create token configs
for (const [chainName, contract] of Object.entries(contracts)) {
const config = configMap[chainName];
const metadata = {
...tokenMetadata,
...config,
};
if (!isTokenMetadata(metadata)) {
throw new Error('Missing required token metadata');
}
const { decimals } = metadata;
if (!decimals) {
throw new Error('Missing decimals on token metadata');
}
const collateralAddressOrDenom =
config.type === TokenType.collateral ? config.token : undefined;
warpCoreConfig.tokens.push({
chainName,
standard: TOKEN_TYPE_TO_STANDARD[config.type],
name: metadata.name,
symbol: metadata.symbol,
decimals: metadata.decimals,
...metadata,
decimals,
addressOrDenom:
contract[configMap[chainName].type as keyof TokenFactories].address,
collateralAddressOrDenom,

@ -1,156 +0,0 @@
import { Logger } from 'pino';
import {
BaseRegistry,
ChainAddresses,
GithubRegistry,
IRegistry,
RegistryContent,
RegistryType,
} from '@hyperlane-xyz/registry';
import { LocalRegistry } from '@hyperlane-xyz/registry/local';
import {
ChainMap,
ChainMetadata,
ChainName,
WarpCoreConfig,
} from '@hyperlane-xyz/sdk';
import {
isHttpsUrl,
objKeys,
objMerge,
rootLogger,
} from '@hyperlane-xyz/utils';
export interface MergedRegistryOptions {
registryUris: Array<string>;
isDryRun?: boolean;
logger?: Logger;
}
export class MergedRegistry extends BaseRegistry implements IRegistry {
public readonly type = RegistryType.Local;
public readonly registries: Array<IRegistry>;
public readonly isDryRun: boolean;
constructor({ registryUris, logger, isDryRun }: MergedRegistryOptions) {
logger ||= rootLogger.child({ module: 'MergedRegistry' });
super({ uri: '__merged_registry__', logger });
if (!registryUris.length)
throw new Error('At least one registry URI is required');
this.registries = registryUris.map((uri, index) => {
if (isHttpsUrl(uri)) {
return new GithubRegistry({ uri, logger: logger!.child({ index }) });
} else {
return new LocalRegistry({ uri, logger: logger!.child({ index }) });
}
});
this.isDryRun = !!isDryRun;
}
async listRegistryContent(): Promise<RegistryContent> {
const results = await this.multiRegistryRead((r) =>
r.listRegistryContent(),
);
return results.reduce((acc, content) => objMerge(acc, content), {
chains: {},
deployments: {},
});
}
async getChains(): Promise<Array<ChainName>> {
return objKeys(await this.getMetadata);
}
async getMetadata(): Promise<ChainMap<ChainMetadata>> {
const results = await this.multiRegistryRead((r) => r.getMetadata());
return results.reduce((acc, content) => objMerge(acc, content), {});
}
async getChainMetadata(chainName: ChainName): Promise<ChainMetadata | null> {
return (await this.getMetadata())[chainName] || null;
}
async getAddresses(): Promise<ChainMap<ChainAddresses>> {
const results = await this.multiRegistryRead((r) => r.getAddresses());
return results.reduce((acc, content) => objMerge(acc, content), {});
}
async getChainAddresses(
chainName: ChainName,
): Promise<ChainAddresses | null> {
return (await this.getAddresses())[chainName] || null;
}
async addChain(chain: {
chainName: ChainName;
metadata?: ChainMetadata;
addresses?: ChainAddresses;
}): Promise<void> {
return this.multiRegistryWrite(
async (registry) => await registry.addChain(chain),
`adding chain ${chain.chainName}`,
);
}
async updateChain(chain: {
chainName: ChainName;
metadata?: ChainMetadata;
addresses?: ChainAddresses;
}): Promise<void> {
return this.multiRegistryWrite(
async (registry) => await registry.updateChain(chain),
`updating chain ${chain.chainName}`,
);
}
async removeChain(chain: ChainName): Promise<void> {
return this.multiRegistryWrite(
async (registry) => await registry.removeChain(chain),
`removing chain ${chain}`,
);
}
async addWarpRoute(config: WarpCoreConfig): Promise<void> {
return this.multiRegistryWrite(
async (registry) => await registry.addWarpRoute(config),
'adding warp route',
);
}
protected multiRegistryRead<R>(
readFn: (registry: IRegistry) => Promise<R> | R,
) {
return Promise.all(this.registries.map(readFn));
}
protected async multiRegistryWrite(
writeFn: (registry: IRegistry) => Promise<void>,
logMsg: string,
): Promise<void> {
if (this.isDryRun) return;
for (const registry of this.registries) {
// TODO remove this when GithubRegistry supports write methods
if (registry.type === RegistryType.Github) {
this.logger.warn(`skipping ${logMsg} at ${registry.type} registry`);
continue;
}
try {
this.logger.info(
`${logMsg} at ${registry.type} registry at ${registry.uri}`,
);
await writeFn(registry);
this.logger.info(`done ${logMsg} at ${registry.type} registry`);
} catch (error) {
// To prevent loss of artifacts, MergedRegistry write methods are failure tolerant
this.logger.error(
`failure ${logMsg} at ${registry.type} registry`,
error,
);
}
}
}
}

@ -5,7 +5,7 @@ import { addressToBytes32, timeout } from '@hyperlane-xyz/utils';
import { MINIMUM_TEST_SEND_GAS } from '../consts.js';
import { CommandContext, WriteCommandContext } from '../context/types.js';
import { runPreflightChecks } from '../deploy/utils.js';
import { runPreflightChecksForChains } from '../deploy/utils.js';
import { errorRed, log, logBlue, logGreen } from '../logger.js';
import { runSingleChainSelectionStep } from '../utils/chains.js';
@ -42,12 +42,11 @@ export async function sendTestMessage({
);
}
await runPreflightChecks({
await runPreflightChecksForChains({
context,
origin,
remotes: [destination],
minGas: MINIMUM_TEST_SEND_GAS,
chains: [origin, destination],
chainsToGasCheck: [origin],
minGas: MINIMUM_TEST_SEND_GAS,
});
await timeout(

@ -13,7 +13,7 @@ import { timeout } from '@hyperlane-xyz/utils';
import { readWarpRouteConfig } from '../config/warp.js';
import { MINIMUM_TEST_SEND_GAS } from '../consts.js';
import { WriteCommandContext } from '../context/types.js';
import { runPreflightChecks } from '../deploy/utils.js';
import { runPreflightChecksForChains } from '../deploy/utils.js';
import { logBlue, logGreen, logRed } from '../logger.js';
import { runSingleChainSelectionStep } from '../utils/chains.js';
import { runTokenSelectionStep } from '../utils/tokens.js';
@ -57,12 +57,11 @@ export async function sendTestTransfer({
);
}
await runPreflightChecks({
await runPreflightChecksForChains({
context,
origin,
remotes: [destination],
minGas: MINIMUM_TEST_SEND_GAS,
chains: [origin, destination],
chainsToGasCheck: [origin],
minGas: MINIMUM_TEST_SEND_GAS,
});
await timeout(

@ -2,7 +2,7 @@ import { Wallet, providers } from 'ethers';
import fs from 'fs';
import { ERC20Test__factory } from '@hyperlane-xyz/core';
import { TokenType, WarpRouteDeployConfig } from '@hyperlane-xyz/sdk';
import { TokenType } from '@hyperlane-xyz/sdk';
async function deployERC20() {
const [rpcUrl, chain1, chain2, privateKey, outPath] = process.argv.slice(2);
@ -19,13 +19,14 @@ async function deployERC20() {
await contract.deployed();
console.log('Test ERC20 contract deployed', contract.address);
const warpDeploymentConfig: WarpRouteDeployConfig = {
const warpDeploymentConfig = {
[chain1]: {
type: TokenType.collateral,
token: contract.address,
isNft: false,
},
[chain2]: { type: TokenType.synthetic },
[chain2]: {
type: TokenType.synthetic,
},
};
console.log('Writing deployment config to', outPath);

@ -75,18 +75,20 @@ function handleNewChain(chainNames: string[]) {
}
export async function detectAndConfirmOrPrompt(
detect: () => Promise<string>,
label: string,
detect: () => Promise<string | undefined>,
prompt: string,
label: string,
): Promise<string> {
let detectedValue: string | undefined;
try {
detectedValue = await detect();
const confirmed = await confirm({
message: `Detected ${label} as ${detectedValue}, is this correct?`,
});
if (confirmed) {
return detectedValue;
if (detectedValue) {
const confirmed = await confirm({
message: `Detected ${label} as ${detectedValue}, is this correct?`,
});
if (confirmed) {
return detectedValue;
}
}
// eslint-disable-next-line no-empty
} catch (e) {}

@ -4,6 +4,9 @@ const envScheme = z.object({
HYP_KEY: z.string().optional(),
ANVIL_IP_ADDR: z.string().optional(),
ANVIL_PORT: z.number().optional(),
AWS_ACCESS_KEY_ID: z.string().optional(),
AWS_SECRET_ACCESS_KEY: z.string().optional(),
AWS_REGION: z.string().optional(),
});
const parsedEnv = envScheme.safeParse(process.env);

@ -1,6 +1,7 @@
import { input } from '@inquirer/prompts';
import select from '@inquirer/select';
import fs from 'fs';
import os from 'os';
import path from 'path';
import { parse as yamlParse, stringify as yamlStringify } from 'yaml';
@ -15,6 +16,14 @@ export type ArtifactsFile = {
description: string;
};
export function resolvePath(filePath: string): string {
if (filePath.startsWith('~')) {
const homedir = os.homedir();
return path.join(homedir, filePath.slice(1));
}
return filePath;
}
export function isFile(filepath: string) {
if (!filepath) return false;
try {

@ -0,0 +1,166 @@
import { GetPublicKeyCommand, KMSClient } from '@aws-sdk/client-kms';
import { GetObjectCommand, S3Client } from '@aws-sdk/client-s3';
import { input } from '@inquirer/prompts';
// @ts-ignore
import asn1 from 'asn1.js';
import { ethers } from 'ethers';
import { assert } from '@hyperlane-xyz/utils';
import { CommandContext } from '../context/types.js';
import { log, logBlue } from '../logger.js';
export async function getValidatorAddress({
context,
accessKey,
secretKey,
region,
bucket,
keyId,
}: {
context: CommandContext;
accessKey?: string;
secretKey?: string;
region?: string;
bucket?: string;
keyId?: string;
}) {
if (!bucket && !keyId) {
throw new Error('Must provide either an S3 bucket or a KMS Key ID.');
}
// Query user for AWS parameters if not passed in or stored as .env variables
accessKey ||= await getAccessKeyId(context.skipConfirmation);
secretKey ||= await getSecretAccessKey(context.skipConfirmation);
region ||= await getRegion(context.skipConfirmation);
assert(accessKey, 'No access key ID set.');
assert(secretKey, 'No secret access key set.');
assert(region, 'No AWS region set.');
let validatorAddress;
if (bucket) {
validatorAddress = await getAddressFromBucket(
bucket,
accessKey,
secretKey,
region,
);
} else {
validatorAddress = await getAddressFromKey(
keyId!,
accessKey,
secretKey,
region,
);
}
logBlue('Validator address is: ');
log(validatorAddress);
}
/**
* Displays validator key address from
* validator announcement S3 bucket.
*/
async function getAddressFromBucket(
bucket: string,
accessKeyId: string,
secretAccessKey: string,
region: string,
) {
const s3Client = new S3Client({
region: region,
credentials: {
accessKeyId,
secretAccessKey,
},
});
const { Body } = await s3Client.send(
new GetObjectCommand({
Bucket: bucket,
Key: 'announcement.json',
}),
);
if (Body) {
const announcement = JSON.parse(await Body?.transformToString());
return announcement['value']['validator'];
} else {
throw new Error('Announcement file announcement.json not found in bucket');
}
}
/**
* Logs validator key address using AWS KMS key ID.
* Taken from github.com/tkporter/get-aws-kms-address/
*/
async function getAddressFromKey(
keyId: string,
accessKeyId: string,
secretAccessKey: string,
region: string,
) {
const client = new KMSClient({
region: region,
credentials: {
accessKeyId,
secretAccessKey,
},
});
const publicKeyResponse = await client.send(
new GetPublicKeyCommand({ KeyId: keyId }),
);
return getEthereumAddress(Buffer.from(publicKeyResponse.PublicKey!));
}
const EcdsaPubKey = asn1.define('EcdsaPubKey', function (this: any) {
this.seq().obj(
this.key('algo').seq().obj(this.key('a').objid(), this.key('b').objid()),
this.key('pubKey').bitstr(),
);
});
function getEthereumAddress(publicKey: Buffer): string {
// The public key is ASN1 encoded in a format according to
// https://tools.ietf.org/html/rfc5480#section-2
const res = EcdsaPubKey.decode(publicKey, 'der');
let pubKeyBuffer: Buffer = res.pubKey.data;
// The public key starts with a 0x04 prefix that needs to be removed
// more info: https://www.oreilly.com/library/view/mastering-ethereum/9781491971932/ch04.html
pubKeyBuffer = pubKeyBuffer.slice(1, pubKeyBuffer.length);
const address = ethers.utils.keccak256(pubKeyBuffer); // keccak256 hash of publicKey
return `0x${address.slice(-40)}`; // take last 20 bytes as ethereum address
}
async function getAccessKeyId(skipConfirmation: boolean) {
if (skipConfirmation) throw new Error('No AWS access key ID set.');
else
return await input({
message:
'Please enter AWS access key ID or use the AWS_ACCESS_KEY_ID environment variable.',
});
}
async function getSecretAccessKey(skipConfirmation: boolean) {
if (skipConfirmation) throw new Error('No AWS secret access key set.');
else
return await input({
message:
'Please enter AWS secret access key or use the AWS_SECRET_ACCESS_KEY environment variable.',
});
}
async function getRegion(skipConfirmation: boolean) {
if (skipConfirmation) throw new Error('No AWS region set.');
else
return await input({
message:
'Please enter AWS region or use the AWS_REGION environment variable.',
});
}

@ -1 +1 @@
export const VERSION = '3.12.2';
export const VERSION = '3.13.0';

@ -1,5 +1,17 @@
# @hyperlane-xyz/helloworld
## 3.13.0
### Patch Changes
- b6b26e2bb: fix: minor change was breaking in registry export
- Updated dependencies [39ea7cdef]
- Updated dependencies [babe816f8]
- Updated dependencies [b440d98be]
- Updated dependencies [0cf692e73]
- @hyperlane-xyz/sdk@3.13.0
- @hyperlane-xyz/core@3.13.0
## 3.12.0
### Patch Changes

@ -1,11 +1,11 @@
{
"name": "@hyperlane-xyz/helloworld",
"description": "A basic skeleton of an Hyperlane app",
"version": "3.12.2",
"version": "3.13.0",
"dependencies": {
"@hyperlane-xyz/core": "3.12.2",
"@hyperlane-xyz/registry": "^1.0.7",
"@hyperlane-xyz/sdk": "3.12.2",
"@hyperlane-xyz/core": "3.13.0",
"@hyperlane-xyz/registry": "1.3.0",
"@hyperlane-xyz/sdk": "3.13.0",
"@openzeppelin/contracts-upgradeable": "^4.9.3",
"ethers": "^5.7.2"
},

@ -1,5 +1,23 @@
# @hyperlane-xyz/infra
## 3.13.0
### Minor Changes
- 39ea7cdef: Implement multi collateral warp routes
- 0cf692e73: Implement metadata builder fetching from message
### Patch Changes
- b6b26e2bb: fix: minor change was breaking in registry export
- Updated dependencies [b6b26e2bb]
- Updated dependencies [39ea7cdef]
- Updated dependencies [babe816f8]
- Updated dependencies [0cf692e73]
- @hyperlane-xyz/helloworld@3.13.0
- @hyperlane-xyz/sdk@3.13.0
- @hyperlane-xyz/utils@3.13.0
## 3.12.0
### Patch Changes

@ -53,18 +53,20 @@ export const hyperlaneContextAgentChainConfig: AgentChainConfig = {
bsc: true,
celo: true,
ethereum: true,
neutron: true,
gnosis: true,
injective: true,
inevm: true,
mantapacific: true,
mode: true,
moonbeam: true,
neutron: true,
optimism: true,
polygon: true,
gnosis: true,
scroll: true,
polygonzkevm: true,
injective: true,
inevm: true,
redstone: true,
scroll: true,
viction: true,
zetachain: true,
},
[Role.Relayer]: {
arbitrum: true,
@ -75,19 +77,21 @@ export const hyperlaneContextAgentChainConfig: AgentChainConfig = {
bsc: true,
celo: true,
ethereum: true,
// At the moment, we only relay between Neutron and Manta Pacific on the neutron context.
neutron: false,
gnosis: true,
injective: true,
inevm: true,
mantapacific: true,
mode: true,
moonbeam: true,
// At the moment, we only relay between Neutron and Manta Pacific on the neutron context.
neutron: false,
optimism: true,
polygon: true,
gnosis: true,
scroll: true,
polygonzkevm: true,
injective: true,
inevm: true,
redstone: true,
scroll: true,
viction: true,
zetachain: true,
},
[Role.Scraper]: {
arbitrum: true,
@ -98,21 +102,23 @@ export const hyperlaneContextAgentChainConfig: AgentChainConfig = {
bsc: true,
celo: true,
ethereum: true,
gnosis: true,
// Cannot scrape non-EVM chains
neutron: false,
injective: false,
inevm: true,
mantapacific: true,
mode: true,
moonbeam: true,
// Cannot scrape non-EVM chains
neutron: false,
optimism: true,
polygon: true,
gnosis: true,
scroll: true,
polygonzkevm: true,
// Cannot scrape non-EVM chains
injective: false,
inevm: true,
redstone: true,
scroll: true,
// Has RPC non-compliance that breaks scraping.
viction: false,
zetachain: true,
},
};
@ -203,7 +209,7 @@ const hyperlane: RootAgentConfig = {
rpcConsensusType: RpcConsensusType.Fallback,
docker: {
repo,
tag: 'c9c5d37-20240510-014327',
tag: 'd6bb976-20240520-164138',
},
gasPaymentEnforcement: gasPaymentEnforcement,
metricAppContexts,
@ -211,7 +217,7 @@ const hyperlane: RootAgentConfig = {
validators: {
docker: {
repo,
tag: 'c9c5d37-20240510-014327',
tag: 'de8c2a7-20240515-135254',
},
rpcConsensusType: RpcConsensusType.Quorum,
chains: validatorChainConfig(Contexts.Hyperlane),
@ -220,7 +226,7 @@ const hyperlane: RootAgentConfig = {
rpcConsensusType: RpcConsensusType.Fallback,
docker: {
repo,
tag: 'c9c5d37-20240510-014327',
tag: 'd6bb976-20240520-164138',
},
},
};

@ -109,6 +109,9 @@
"0x6a1da2e0b7ae26aaece1377c0a4dbe25b85fa3ca"
]
},
"redstone": {
"validators": ["0x1400b9737007f7978d8b4bbafb4a69c83f0641a7"]
},
"scroll": {
"validators": [
"0xad557170a9f2f21c35e03de07cb30dcbcc3dff63",
@ -118,5 +121,8 @@
},
"viction": {
"validators": ["0x1f87c368f8e05a85ef9126d984a980a20930cb9c"]
},
"zetachain": {
"validators": ["0xa3bca0b80317dbf9c7dce16a16ac89f4ff2b23ef"]
}
}

@ -106,6 +106,9 @@
"0x1cd73544c000fd519784f56e59bc380a5fef53d6"
]
},
"redstone": {
"validators": ["0x51ed7127c0afc0513a0f141e910c5e02b2a9a4b5"]
},
"scroll": {
"validators": [
"0x11387d89856219cf685f22781bf4e85e00468d54",
@ -119,5 +122,8 @@
"0xad94659e2383214e4a1c4e8d3c17caffb75bc31b",
"0x0f9e5775ac4d3b73dd28e5a3f8394443186cb70c"
]
},
"zetachain": {
"validators": ["0xa13d146b47242671466e4041f5fe68d22a2ffe09"]
}
}

@ -30,7 +30,7 @@ export const ethereumMainnetConfigs: ChainMap<ChainMetadata> = {
transactionOverrides: {
// A very high max fee per gas is used as Polygon is susceptible
// to large swings in gas prices.
maxFeePerGas: 800 * 10 ** 9, // 800 gwei
maxFeePerGas: 550 * 10 ** 9, // 550 gwei
maxPriorityFeePerGas: 50 * 10 ** 9, // 50 gwei
},
},

@ -9,7 +9,7 @@ import { environment } from './chains.js';
export const keyFunderConfig: KeyFunderConfig = {
docker: {
repo: 'gcr.io/abacus-labs-dev/hyperlane-monorepo',
tag: '5d1391c-20240418-100607',
tag: 'b22a0f4-20240523-140812',
},
// We're currently using the same deployer/key funder key as mainnet2.
// To minimize nonce clobbering we offset the key funder cron
@ -17,7 +17,7 @@ export const keyFunderConfig: KeyFunderConfig = {
cronSchedule: '45 * * * *', // Every hour at the 45-minute mark
namespace: environment,
prometheusPushGateway:
'http://prometheus-pushgateway.monitoring.svc.cluster.local:9091',
'http://prometheus-prometheus-pushgateway.monitoring.svc.cluster.local:9091',
contextFundingFrom: Contexts.Hyperlane,
contextsAndRolesToFund: {
[Contexts.Hyperlane]: [Role.Relayer, Role.Kathy],
@ -26,39 +26,47 @@ export const keyFunderConfig: KeyFunderConfig = {
connectionType: RpcConsensusType.Fallback,
// desired balance config
desiredBalancePerChain: {
arbitrum: '0.5',
ancient8: '0.5',
avalanche: '5',
bsc: '5',
base: '0.5',
blast: '0.2',
bsc: '5',
celo: '3',
ethereum: '0.5',
gnosis: '5',
inevm: '3',
mantapacific: '0.2',
mode: '0.2',
moonbeam: '5',
polygon: '20',
viction: '3',
// Funder boosts itself upto 5x balance on L2 before dispersing funds
arbitrum: '0.5',
base: '0.5',
optimism: '0.5',
polygon: '20',
polygonzkevm: '0.5',
redstone: '0.2',
scroll: '0.5',
ancient8: '0.5',
viction: '3',
zetachain: '20',
},
desiredKathyBalancePerChain: {
arbitrum: '0.1',
ancient8: '0',
avalanche: '6',
base: '0.05',
blast: '0',
bsc: '0.35',
celo: '150',
ethereum: '0.4',
gnosis: '100',
inevm: '0.05',
mantapacific: '0',
mode: '0',
moonbeam: '250',
optimism: '0.1',
polygon: '85',
polygonzkevm: '0.05',
redstone: '0',
scroll: '0.05',
viction: '0.05',
zetachain: '0',
},
};

@ -28,7 +28,7 @@
"decimals": 9
},
"ethereum": {
"amount": "26.346912847",
"amount": "20",
"decimals": 9
},
"mantapacific": {
@ -63,6 +63,10 @@
"amount": "3.95",
"decimals": 9
},
"redstone": {
"amount": "0.0003",
"decimals": 9
},
"inevm": {
"amount": "0.1",
"decimals": 9
@ -78,5 +82,9 @@
"injective": {
"amount": "700000000",
"decimals": 1
},
"zetachain": {
"amount": "0.0001",
"decimals": 9
}
}

@ -15,7 +15,7 @@ export const hyperlane: HelloWorldConfig = {
kathy: {
docker: {
repo: 'gcr.io/abacus-labs-dev/hyperlane-monorepo',
tag: '86b7f98-20231207-153806',
tag: 'b22a0f4-20240523-140812',
},
chainsToSkip: [],
runEnv: environment,
@ -36,7 +36,7 @@ export const releaseCandidate: HelloWorldConfig = {
kathy: {
docker: {
repo: 'gcr.io/abacus-labs-dev/hyperlane-monorepo',
tag: '0e3f73f-20240206-160718',
tag: 'b22a0f4-20240523-140812',
},
chainsToSkip: [],
runEnv: environment,

@ -9,7 +9,7 @@ export const infrastructure: InfrastructureConfig = {
prometheus: {
deployName: 'prometheus',
// Node exporter does not work with GKE Autopilot
nodeExporterEnabled: false,
nodeExporterEnabled: true,
helmChart: {
// See https://github.com/prometheus-community/helm-charts#usage
repository: {
@ -17,7 +17,7 @@ export const infrastructure: InfrastructureConfig = {
url: 'https://prometheus-community.github.io/helm-charts',
},
name: 'prometheus',
version: '15.0.1',
version: '25.21.0',
},
},
},

@ -3043,6 +3043,68 @@
"name": "DomaingRoutingIsm"
}
],
"redstone": [
{
"address": "0x8b83fefd896fAa52057798f6426E9f0B080FCCcE",
"constructorArguments": "",
"isProxy": false,
"name": "StaticMerkleRootMultisigIsmFactory"
},
{
"address": "0xAF03386044373E2fe26C5b1dCedF5a7e854a7a3F",
"constructorArguments": "",
"isProxy": true,
"name": "StaticMerkleRootMultisigIsm"
},
{
"address": "0x8F7454AC98228f3504Bb91eA3D8Adafe6406110A",
"constructorArguments": "",
"isProxy": false,
"name": "StaticMessageIdMultisigIsmFactory"
},
{
"address": "0x882CD0C5D50b6dD74b36Da4BDb059507fddEDdf2",
"constructorArguments": "",
"isProxy": true,
"name": "StaticMessageIdMultisigIsm"
},
{
"address": "0xEb9FcFDC9EfDC17c1EC5E1dc085B98485da213D6",
"constructorArguments": "",
"isProxy": false,
"name": "StaticAggregationIsmFactory"
},
{
"address": "0x19930232E9aFC4f4F09d09fe2375680fAc2100D0",
"constructorArguments": "",
"isProxy": true,
"name": "StaticAggregationIsm"
},
{
"address": "0x1052eF3419f26Bec74Ed7CEf4a4FA6812Bc09908",
"constructorArguments": "",
"isProxy": false,
"name": "StaticAggregationHookFactory"
},
{
"address": "0x12Ed1BbA182CbC63692F813651BD493B7445C874",
"constructorArguments": "",
"isProxy": true,
"name": "StaticAggregationHook"
},
{
"address": "0x0761b0827849abbf7b0cC09CE14e1C93D87f5004",
"constructorArguments": "",
"isProxy": false,
"name": "DomainRoutingIsmFactory"
},
{
"address": "0x3b9f24fD2ecfed0d3A88fa7f0E4e5747671981D7",
"constructorArguments": "",
"isProxy": true,
"name": "DomaingRoutingIsm"
}
],
"scroll": [
{
"address": "0x2C1FAbEcd7bFBdEBF27CcdB67baADB38b6Df90fC",
@ -3352,5 +3414,67 @@
"isProxy": true,
"name": "DomaingRoutingIsm"
}
],
"zetachain": [
{
"address": "0x2C1FAbEcd7bFBdEBF27CcdB67baADB38b6Df90fC",
"constructorArguments": "",
"isProxy": false,
"name": "StaticMerkleRootMultisigIsmFactory"
},
{
"address": "0x4725F7b8037513915aAf6D6CBDE2920E28540dDc",
"constructorArguments": "",
"isProxy": true,
"name": "StaticMerkleRootMultisigIsm"
},
{
"address": "0x8b83fefd896fAa52057798f6426E9f0B080FCCcE",
"constructorArguments": "",
"isProxy": false,
"name": "StaticMessageIdMultisigIsmFactory"
},
{
"address": "0xAF03386044373E2fe26C5b1dCedF5a7e854a7a3F",
"constructorArguments": "",
"isProxy": true,
"name": "StaticMessageIdMultisigIsm"
},
{
"address": "0x8F7454AC98228f3504Bb91eA3D8Adafe6406110A",
"constructorArguments": "",
"isProxy": false,
"name": "StaticAggregationIsmFactory"
},
{
"address": "0x882CD0C5D50b6dD74b36Da4BDb059507fddEDdf2",
"constructorArguments": "",
"isProxy": true,
"name": "StaticAggregationIsm"
},
{
"address": "0xEb9FcFDC9EfDC17c1EC5E1dc085B98485da213D6",
"constructorArguments": "",
"isProxy": false,
"name": "StaticAggregationHookFactory"
},
{
"address": "0x19930232E9aFC4f4F09d09fe2375680fAc2100D0",
"constructorArguments": "",
"isProxy": true,
"name": "StaticAggregationHook"
},
{
"address": "0x1052eF3419f26Bec74Ed7CEf4a4FA6812Bc09908",
"constructorArguments": "",
"isProxy": false,
"name": "DomainRoutingIsmFactory"
},
{
"address": "0x12Ed1BbA182CbC63692F813651BD493B7445C874",
"constructorArguments": "",
"isProxy": true,
"name": "DomaingRoutingIsm"
}
]
}

@ -49,6 +49,6 @@ export const relayerConfig: LiquidityLayerRelayerConfig = {
},
namespace: environment,
prometheusPushGateway:
'http://prometheus-pushgateway.monitoring.svc.cluster.local:9091',
'http://prometheus-prometheus-pushgateway.monitoring.svc.cluster.local:9091',
connectionType: RpcConsensusType.Single,
};

@ -55,6 +55,7 @@ export const owners: ChainMap<OwnableConfig> = Object.fromEntries(
proxyAdmin: timelocks[local] ?? safes[local] ?? DEPLOYER,
validatorAnnounce: DEPLOYER, // unused
testRecipient: DEPLOYER,
fallbackRoutingHook: DEPLOYER,
},
},
]),

@ -4,21 +4,23 @@ export const supportedChainNames = [
'arbitrum',
'ancient8',
'avalanche',
'base',
'blast',
'bsc',
'celo',
'ethereum',
'neutron',
'gnosis',
'inevm',
'injective',
'mantapacific',
'mode',
'moonbeam',
'neutron',
'optimism',
'polygon',
'gnosis',
'base',
'scroll',
'polygonzkevm',
'injective',
'inevm',
'redstone',
'scroll',
'viction',
'zetachain',
];

@ -1,22 +1,24 @@
{
"arbitrum": "3174.87",
"ancient8": "3174.87",
"avalanche": "38.39",
"base": "3174.87",
"blast": "3174.87",
"bsc": "609.32",
"celo": "0.860923",
"ethereum": "3174.87",
"mantapacific": "3174.87",
"mode": "3174.87",
"moonbeam": "0.338118",
"optimism": "3174.87",
"polygon": "0.730041",
"gnosis": "0.993981",
"scroll": "3174.87",
"polygonzkevm": "3174.87",
"inevm": "28.12",
"viction": "0.775722",
"neutron": "0.842639",
"injective": "28.12"
"arbitrum": "2919.87",
"ancient8": "2919.87",
"avalanche": "33.19",
"base": "2919.87",
"blast": "2919.87",
"bsc": "570.1",
"celo": "0.738559",
"ethereum": "2919.87",
"gnosis": "1.005",
"inevm": "21.59",
"mantapacific": "2919.87",
"mode": "2919.87",
"moonbeam": "0.253144",
"optimism": "2919.87",
"polygon": "0.663051",
"polygonzkevm": "2919.87",
"redstone": "2919.87",
"scroll": "2919.87",
"viction": "0.424231",
"zetachain": "1.53",
"injective": "21.59",
"neutron": "0.606906"
}

@ -380,5 +380,33 @@ export const validatorChainConfig = (
'mode',
),
},
redstone: {
interval: 5,
reorgPeriod: getReorgPeriod('redstone'),
validators: validatorsConfig(
{
[Contexts.Hyperlane]: ['0x1400b9737007f7978d8b4bbafb4a69c83f0641a7'],
[Contexts.ReleaseCandidate]: [
'0x51ed7127c0afc0513a0f141e910c5e02b2a9a4b5',
],
[Contexts.Neutron]: [],
},
'redstone',
),
},
zetachain: {
interval: 5,
reorgPeriod: getReorgPeriod('zetachain'),
validators: validatorsConfig(
{
[Contexts.Hyperlane]: ['0xa3bca0b80317dbf9c7dce16a16ac89f4ff2b23ef'],
[Contexts.ReleaseCandidate]: [
'0xa13d146b47242671466e4041f5fe68d22a2ffe09',
],
[Contexts.Neutron]: [],
},
'zetachain',
),
},
};
};

@ -9,7 +9,7 @@ import { environment } from './chains.js';
export const keyFunderConfig: KeyFunderConfig = {
docker: {
repo: 'gcr.io/abacus-labs-dev/hyperlane-monorepo',
tag: 'b0811ba-20240411-151216',
tag: 'b22a0f4-20240523-140812',
},
// We're currently using the same deployer key as testnet2.
// To minimize nonce clobbering we offset the key funder cron
@ -17,7 +17,7 @@ export const keyFunderConfig: KeyFunderConfig = {
cronSchedule: '15 * * * *', // Every hour at the 15-minute mark
namespace: environment,
prometheusPushGateway:
'http://prometheus-pushgateway.monitoring.svc.cluster.local:9091',
'http://prometheus-prometheus-pushgateway.monitoring.svc.cluster.local:9091',
contextFundingFrom: Contexts.Hyperlane,
contextsAndRolesToFund: {
[Contexts.Hyperlane]: [Role.Relayer, Role.Kathy],
@ -30,11 +30,15 @@ export const keyFunderConfig: KeyFunderConfig = {
bsctestnet: '5',
fuji: '5',
plumetestnet: '0.2',
sepolia: '5',
// Funder boosts itself upto 5x balance on L2 before dispersing funds
scrollsepolia: '1',
sepolia: '5',
},
desiredKathyBalancePerChain: {
alfajores: '1',
bsctestnet: '1',
fuji: '1',
plumetestnet: '0.05',
scrollsepolia: '1',
sepolia: '1',
},
};

@ -15,7 +15,7 @@ export const hyperlaneHelloworld: HelloWorldConfig = {
kathy: {
docker: {
repo: 'gcr.io/abacus-labs-dev/hyperlane-monorepo',
tag: '17ac515-20240402-171932',
tag: 'b22a0f4-20240523-140812',
},
chainsToSkip: [],
runEnv: environment,
@ -35,7 +35,7 @@ export const releaseCandidateHelloworld: HelloWorldConfig = {
kathy: {
docker: {
repo: 'gcr.io/abacus-labs-dev/hyperlane-monorepo',
tag: '17ac515-20240402-171932',
tag: 'b22a0f4-20240523-140812',
},
chainsToSkip: [],
runEnv: environment,

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save