### Description

Opening for that sweet sweet agent image

### Drive-by changes

_Are there any minor or drive-by changes also included?_

### Related issues

- Fixes #[issue number here]

### Backward compatibility

_Are these changes backward compatible?_

Yes
No

_Are there any infrastructure implications, e.g. changes that would
prohibit deploying older commits using this infra tooling?_

None
Yes


### Testing

_What kind of testing have these changes undergone?_

None
Manual
Unit Tests

---------

Co-authored-by: Steven Sloboda <steven@eclipse.builders>
Co-authored-by: Nam Chu Hoai <nambrot@googlemail.com>
Co-authored-by: Yorke Rhodes <yorke@hyperlane.xyz>
Co-authored-by: J M Rossy <jm.rossy@gmail.com>
Co-authored-by: Mattie Conover <git@mconover.dev>
Co-authored-by: Asa Oines <asaoines@gmail.com>
Co-authored-by: Sergei Patrikeev <serejke.best@gmail.com>
Co-authored-by: Daniel Savu <23065004+daniel-savu@users.noreply.github.com>
Co-authored-by: Kunal Arora <55632507+aroralanuk@users.noreply.github.com>
Co-authored-by: yorhodes <yorkerhodesiv@gmail.com>
Co-authored-by: Yorke Rhodes <yorke@useabacus.network>
Co-authored-by: Yorke Rhodes <email@yorke.dev>
Co-authored-by: Asa Oines <asaoines@Asas-MacBook-Pro.local>
Co-authored-by: Kunal Arora <kuarora@ucsd.edu>
Co-authored-by: Anett <44020788+anettrolikova@users.noreply.github.com>
Co-authored-by: Yaqub Mahmoud <yaqub320@gmail.com>
Co-authored-by: Alex <alex@alexbh.dev>
trevor/last-agent-release agents-2023-07-25
Trevor Porter 1 year ago committed by GitHub
parent 04bdce0877
commit 8c5983933b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 9
      .github/workflows/e2e.yml
  2. 14
      .github/workflows/rust.yml
  3. 1
      .gitignore
  4. 1
      .prettierignore
  5. 4455
      rust/Cargo.lock
  6. 252
      rust/Cargo.toml
  7. 1
      rust/Dockerfile
  8. 4
      rust/agents/relayer/Cargo.toml
  9. 10
      rust/agents/relayer/src/msg/processor.rs
  10. 2
      rust/agents/relayer/src/prover.rs
  11. 2
      rust/agents/relayer/src/relayer.rs
  12. 4
      rust/agents/scraper/Cargo.toml
  13. 4
      rust/agents/scraper/migration/Cargo.toml
  14. 4
      rust/agents/validator/Cargo.toml
  15. 7
      rust/agents/validator/src/validator.rs
  16. 6
      rust/chains/hyperlane-ethereum/Cargo.toml
  17. 23
      rust/chains/hyperlane-ethereum/src/interchain_gas.rs
  18. 2
      rust/chains/hyperlane-ethereum/src/interchain_security_module.rs
  19. 65
      rust/chains/hyperlane-ethereum/src/mailbox.rs
  20. 34
      rust/chains/hyperlane-ethereum/src/provider.rs
  21. 10
      rust/chains/hyperlane-ethereum/src/signers.rs
  22. 13
      rust/chains/hyperlane-ethereum/src/singleton_signer.rs
  23. 7
      rust/chains/hyperlane-ethereum/src/tx.rs
  24. 12
      rust/chains/hyperlane-ethereum/src/validator_announce.rs
  25. 4
      rust/chains/hyperlane-fuel/Cargo.toml
  26. 5
      rust/chains/hyperlane-fuel/src/interchain_gas.rs
  27. 17
      rust/chains/hyperlane-fuel/src/mailbox.rs
  28. 33
      rust/chains/hyperlane-sealevel/Cargo.toml
  29. 24
      rust/chains/hyperlane-sealevel/src/client.rs
  30. 76
      rust/chains/hyperlane-sealevel/src/interchain_gas.rs
  31. 94
      rust/chains/hyperlane-sealevel/src/interchain_security_module.rs
  32. 26
      rust/chains/hyperlane-sealevel/src/lib.rs
  33. 748
      rust/chains/hyperlane-sealevel/src/mailbox.rs
  34. 140
      rust/chains/hyperlane-sealevel/src/multisig_ism.rs
  35. 46
      rust/chains/hyperlane-sealevel/src/provider.rs
  36. 7
      rust/chains/hyperlane-sealevel/src/solana/ed25519_program.rs
  37. 360
      rust/chains/hyperlane-sealevel/src/solana/fee_calculator.rs
  38. 101
      rust/chains/hyperlane-sealevel/src/solana/sdk/Cargo.toml
  39. 23
      rust/chains/hyperlane-sealevel/src/solana/sdk/macro/Cargo.toml
  40. 405
      rust/chains/hyperlane-sealevel/src/solana/sdk/macro/src/lib.rs
  41. 1
      rust/chains/hyperlane-sealevel/src/solana/sdk/src/lib.rs
  42. 12
      rust/chains/hyperlane-sealevel/src/solana/secp256k1_program.rs
  43. 1
      rust/chains/hyperlane-sealevel/src/solana/solana_sdk/mod.rs
  44. 423
      rust/chains/hyperlane-sealevel/src/solana/solana_sdk/solana_sdk_macro/mod.rs
  45. 61
      rust/chains/hyperlane-sealevel/src/trait_builder.rs
  46. 79
      rust/chains/hyperlane-sealevel/src/utils.rs
  47. 129
      rust/chains/hyperlane-sealevel/src/validator_announce.rs
  48. 15
      rust/config/sealevel/relayer.env
  49. 49
      rust/config/sealevel/sealevel.json
  50. 13
      rust/config/sealevel/test-keys/test_deployer-account.json
  51. 1
      rust/config/sealevel/test-keys/test_deployer-keypair.json
  52. 10
      rust/config/sealevel/validator.env
  53. 38
      rust/config/testnet_config.json
  54. 27
      rust/ethers-prometheus/Cargo.toml
  55. 16
      rust/hyperlane-base/Cargo.toml
  56. 113
      rust/hyperlane-base/src/contract_sync/cursor.rs
  57. 22
      rust/hyperlane-base/src/contract_sync/mod.rs
  58. 4
      rust/hyperlane-base/src/db/rocks/hyperlane_db.rs
  59. 90
      rust/hyperlane-base/src/settings/chains.rs
  60. 19
      rust/hyperlane-base/src/settings/signers.rs
  61. 5
      rust/hyperlane-base/src/types/checkpoint_syncer.rs
  62. 3
      rust/hyperlane-base/src/types/multisig.rs
  63. 16
      rust/hyperlane-base/tests/chain_config.rs
  64. 36
      rust/hyperlane-core/Cargo.toml
  65. 5
      rust/hyperlane-core/src/accumulator/incremental.rs
  66. 51
      rust/hyperlane-core/src/accumulator/merkle.rs
  67. 44
      rust/hyperlane-core/src/accumulator/mod.rs
  68. 13
      rust/hyperlane-core/src/accumulator/sparse.rs
  69. 143
      rust/hyperlane-core/src/accumulator/zero_hashes.rs
  70. 80
      rust/hyperlane-core/src/chain.rs
  71. 2
      rust/hyperlane-core/src/config/str_or_int.rs
  72. 76
      rust/hyperlane-core/src/error.rs
  73. 2
      rust/hyperlane-core/src/lib.rs
  74. 3
      rust/hyperlane-core/src/test_utils.rs
  75. 18
      rust/hyperlane-core/src/traits/cursor.rs
  76. 24
      rust/hyperlane-core/src/traits/deployed.rs
  77. 10
      rust/hyperlane-core/src/traits/encode.rs
  78. 26
      rust/hyperlane-core/src/traits/indexer.rs
  79. 10
      rust/hyperlane-core/src/traits/interchain_security_module.rs
  80. 10
      rust/hyperlane-core/src/traits/mod.rs
  81. 91
      rust/hyperlane-core/src/traits/signing.rs
  82. 2
      rust/hyperlane-core/src/traits/validator_announce.rs
  83. 10
      rust/hyperlane-core/src/types/checkpoint.rs
  84. 23
      rust/hyperlane-core/src/types/log_metadata.rs
  85. 85
      rust/hyperlane-core/src/types/mod.rs
  86. 310
      rust/hyperlane-core/src/types/primitive_types.rs
  87. 349
      rust/hyperlane-core/src/types/serialize.rs
  88. 31
      rust/hyperlane-core/src/utils.rs
  89. 2
      rust/hyperlane-test/Cargo.toml
  90. 2
      rust/sealevel/.gitignore
  91. 127
      rust/sealevel/README.md
  92. 30
      rust/sealevel/client/Cargo.toml
  93. 160
      rust/sealevel/client/src/cmd_utils.rs
  94. 264
      rust/sealevel/client/src/core.rs
  95. 1239
      rust/sealevel/client/src/main.rs
  96. 566
      rust/sealevel/client/src/warp_route.rs
  97. 1
      rust/sealevel/environments/devnet/solanadevnet/core/keys/hyperlane_sealevel_mailbox-keypair.json
  98. 1
      rust/sealevel/environments/devnet/solanadevnet/core/keys/hyperlane_sealevel_multisig_ism_message_id-keypair.json
  99. 1
      rust/sealevel/environments/devnet/solanadevnet/core/keys/hyperlane_sealevel_validator_announce-keypair.json
  100. 5
      rust/sealevel/environments/devnet/solanadevnet/core/program-ids.json
  101. Some files were not shown because too many files have changed in this diff Show More

@ -38,6 +38,14 @@ jobs:
- name: Install Foundry - name: Install Foundry
uses: onbjerg/foundry-toolchain@v1 uses: onbjerg/foundry-toolchain@v1
- name: Free disk space
run: |
# Based on https://github.com/actions/runner-images/issues/2840#issuecomment-790492173
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: rust cache - name: rust cache
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
with: with:
@ -52,7 +60,6 @@ jobs:
**/node_modules **/node_modules
.yarn/cache .yarn/cache
key: ${{ runner.os }}-yarn-cache-${{ hashFiles('./yarn.lock') }} key: ${{ runner.os }}-yarn-cache-${{ hashFiles('./yarn.lock') }}
- name: build test - name: build test
run: cargo build --release --bin run-locally run: cargo build --release --bin run-locally
- name: run test - name: run test

@ -40,6 +40,13 @@ jobs:
shared-key: "test" shared-key: "test"
workspaces: | workspaces: |
./rust ./rust
- name: Free disk space
run: |
# Based on https://github.com/actions/runner-images/issues/2840#issuecomment-790492173
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Run tests - name: Run tests
run: cargo test run: cargo test
@ -61,6 +68,13 @@ jobs:
shared-key: "lint" shared-key: "lint"
workspaces: | workspaces: |
./rust ./rust
- name: Free disk space
run: |
# Based on https://github.com/actions/runner-images/issues/2840#issuecomment-790492173
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Check - name: Check
run: cargo check --all-features --all-targets run: cargo check --all-features --all-targets
- name: Rustfmt - name: Rustfmt

1
.gitignore vendored

@ -7,7 +7,6 @@ test_deploy.env
**/*.swp **/*.swp
**/*.swo **/*.swo
rust/vendor/
rust/tmp_db rust/tmp_db
rust/tmp.env rust/tmp.env
tmp.env tmp.env

@ -7,4 +7,3 @@
*.Dockerfile *.Dockerfile
Dockerfile Dockerfile

4455
rust/Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -1,17 +1,36 @@
cargo-features = ["edition2021"]
[workspace] [workspace]
members = [ members = [
"agents/relayer", "agents/relayer",
"agents/scraper", "agents/scraper",
"agents/validator", "agents/validator",
"chains/hyperlane-ethereum", "chains/hyperlane-ethereum",
"chains/hyperlane-fuel", "chains/hyperlane-fuel",
"chains/hyperlane-sealevel",
"ethers-prometheus", "ethers-prometheus",
"hyperlane-base", "hyperlane-base",
"hyperlane-core", "hyperlane-core",
"hyperlane-test", "hyperlane-test",
"sealevel/client",
"sealevel/libraries/access-control",
"sealevel/libraries/account-utils",
"sealevel/libraries/ecdsa-signature",
"sealevel/libraries/hyperlane-sealevel-connection-client",
"sealevel/libraries/hyperlane-sealevel-token",
"sealevel/libraries/interchain-security-module-interface",
"sealevel/libraries/message-recipient-interface",
"sealevel/libraries/multisig-ism",
"sealevel/libraries/serializable-account-meta",
"sealevel/libraries/test-transaction-utils",
"sealevel/libraries/test-utils",
"sealevel/programs/hyperlane-sealevel-token",
"sealevel/programs/hyperlane-sealevel-token-collateral",
"sealevel/programs/hyperlane-sealevel-token-native",
"sealevel/programs/ism/multisig-ism-message-id",
"sealevel/programs/ism/test-ism",
"sealevel/programs/mailbox",
"sealevel/programs/mailbox-test",
"sealevel/programs/test-send-receiver",
"sealevel/programs/validator-announce",
"utils/abigen", "utils/abigen",
"utils/backtrace-oneline", "utils/backtrace-oneline",
"utils/hex", "utils/hex",
@ -27,38 +46,241 @@ publish = false
version = "0.1.0" version = "0.1.0"
[workspace.dependencies] [workspace.dependencies]
async-trait = { version = "0.1" } Inflector = "0.11.4"
color-eyre = { version = "0.6" } anyhow = "1.0"
async-trait = "0.1"
base64 = "0.13"
bincode = "1.3"
blake3 = "1.3"
borsh = "0.9"
bs58 = "0.4.0"
clap = "4"
color-eyre = "0.6"
config = "~0.13.3" config = "~0.13.3"
derive-new = "0.5" derive-new = "0.5"
derive_builder = "0.12"
derive_more = "0.99" derive_more = "0.99"
enum_dispatch = "0.3" enum_dispatch = "0.3"
ethers = { git = "https://github.com/hyperlane-xyz/ethers-rs", tag = "2023-06-01" }
ethers-contract = { git = "https://github.com/hyperlane-xyz/ethers-rs", tag = "2023-06-01", features = ["legacy"] }
ethers-core = { git = "https://github.com/hyperlane-xyz/ethers-rs", tag = "2023-06-01" }
ethers-providers = { git = "https://github.com/hyperlane-xyz/ethers-rs", tag = "2023-06-01" }
ethers-signers = { git = "https://github.com/hyperlane-xyz/ethers-rs", tag = "2023-06-01", features = ["aws"] }
eyre = "0.6" eyre = "0.6"
fuels = "0.38" fuels = "0.38"
fuels-code-gen = "0.38" fuels-code-gen = "0.38"
futures = "0.3" futures = "0.3"
futures-util = "0.3" futures-util = "0.3"
hex = "0.4"
itertools = "0.10" itertools = "0.10"
num = {version = "0.4"} jsonrpc-core = "18.0"
log = "0.4"
maplit = "1.0"
num = "0.4"
num-derive = "0.3" num-derive = "0.3"
num-traits = "0.2" num-traits = "0.2"
parking_lot = "0.12"
paste = "1.0" paste = "1.0"
pretty_env_logger = "0.4"
primitive-types = "=0.12.1"
prometheus = "0.13" prometheus = "0.13"
reqwest = "0.11" reqwest = "0.11"
rlp = "=0.5.2"
rocksdb = "0.20" rocksdb = "0.20"
serde = { version = "1.0", features = ["derive"] } semver = "1.0"
serde_bytes = "0.11"
serde_derive = "1.0"
serde_json = "1.0" serde_json = "1.0"
sha2 = "0.10"
solana-account-decoder = "=1.14.13"
solana-banks-client = "=1.14.13"
solana-banks-interface = "=1.14.13"
solana-banks-server = "=1.14.13"
solana-clap-utils = "=1.14.13"
solana-cli-config = "=1.14.13"
solana-client = "=1.14.13"
solana-program = "=1.14.13"
solana-program-test = "=1.14.13"
solana-sdk = "=1.14.13"
solana-transaction-status = "=1.14.13"
solana-zk-token-sdk = "=1.14.13"
spl-associated-token-account = { version = "=1.1.2", features = ["no-entrypoint"] }
spl-noop = { version = "=0.1.3", features = ["no-entrypoint"] }
spl-token = { version = "=3.5.0", features = ["no-entrypoint"] }
spl-token-2022 = { version = "=0.5.0", features = ["no-entrypoint"] }
spl-type-length-value = "=0.1.0"
static_assertions = "1.1" static_assertions = "1.1"
strum = "0.24" strum = "0.24"
strum_macros = "0.24" strum_macros = "0.24"
thiserror = "1.0" thiserror = "1.0"
tokio = { version = "1", features = ["parking_lot"] } tracing-error = "0.2"
tracing = { version = "0.1", features = ["release_max_level_debug"] }
tracing-futures = "0.2" tracing-futures = "0.2"
tracing-subscriber = { version = "0.3", default-features = false } ureq = "2.4"
url = "2.3" url = "2.3"
which = "4.3"
# Required for WASM support https://docs.rs/getrandom/latest/getrandom/#webassembly-support
getrandom = { version = "0.2", features = ["js"] }
[workspace.dependencies.curve25519-dalek]
version = "~3.2"
features = ["serde"]
[workspace.dependencies.ed25519-dalek]
version = "~1.0"
features = []
[workspace.dependencies.ethers]
git = "https://github.com/hyperlane-xyz/ethers-rs"
tag = "2023-06-01"
features = []
[workspace.dependencies.ethers-contract]
git = "https://github.com/hyperlane-xyz/ethers-rs"
tag = "2023-06-01"
features = ["legacy"]
[workspace.dependencies.ethers-core]
git = "https://github.com/hyperlane-xyz/ethers-rs"
tag = "2023-06-01"
features = []
[workspace.dependencies.ethers-providers]
git = "https://github.com/hyperlane-xyz/ethers-rs"
tag = "2023-06-01"
features = []
[workspace.dependencies.ethers-signers]
git = "https://github.com/hyperlane-xyz/ethers-rs"
tag = "2023-06-01"
features = ["aws"]
[workspace.dependencies.generic-array]
version = "0.14"
features = [
"serde",
"more_lengths",
]
default-features = false
[workspace.dependencies.serde]
version = "1.0"
features = ["derive"]
[workspace.dependencies.solana]
path = "patches/solana-1.14.13"
features = []
[workspace.dependencies.tokio]
version = "1"
features = ["parking_lot"]
[workspace.dependencies.tracing]
version = "0.1"
features = ["release_max_level_debug"]
[workspace.dependencies.tracing-subscriber]
version = "0.3"
features = []
default-features = false
[patch.crates-io.curve25519-dalek]
version = "3.2.2"
git = "https://github.com/Eclipse-Laboratories-Inc/curve25519-dalek"
branch = "v3.2.2-relax-zeroize"
[patch.crates-io.ed25519-dalek]
version = "1.0.1"
git = "https://github.com/Eclipse-Laboratories-Inc/ed25519-dalek"
branch = "main"
[patch.crates-io.primitive-types]
version = "=0.12.1"
git = "https://github.com/hyperlane-xyz/parity-common.git"
branch = "hyperlane"
[patch.crates-io.rlp]
version = "=0.5.2"
git = "https://github.com/hyperlane-xyz/parity-common.git"
branch = "hyperlane"
[patch.crates-io.solana-account-decoder]
version = "=1.14.13"
git = "https://github.com/hyperlane-xyz/solana.git"
tag = "hyperlane-1.14.13-2023-07-04"
[patch.crates-io.solana-banks-client]
version = "=1.14.13"
git = "https://github.com/hyperlane-xyz/solana.git"
tag = "hyperlane-1.14.13-2023-07-04"
[patch.crates-io.solana-banks-interface]
version = "=1.14.13"
git = "https://github.com/hyperlane-xyz/solana.git"
tag = "hyperlane-1.14.13-2023-07-04"
[patch.crates-io.solana-banks-server]
version = "=1.14.13"
git = "https://github.com/hyperlane-xyz/solana.git"
tag = "hyperlane-1.14.13-2023-07-04"
[patch.crates-io.solana-clap-utils]
version = "=1.14.13"
git = "https://github.com/hyperlane-xyz/solana.git"
tag = "hyperlane-1.14.13-2023-07-04"
[patch.crates-io.solana-cli-config]
version = "=1.14.13"
git = "https://github.com/hyperlane-xyz/solana.git"
tag = "hyperlane-1.14.13-2023-07-04"
[patch.crates-io.solana-client]
version = "=1.14.13"
git = "https://github.com/hyperlane-xyz/solana.git"
tag = "hyperlane-1.14.13-2023-07-04"
[patch.crates-io.solana-program]
version = "=1.14.13"
git = "https://github.com/hyperlane-xyz/solana.git"
tag = "hyperlane-1.14.13-2023-07-04"
[patch.crates-io.solana-program-test]
version = "=1.14.13"
git = "https://github.com/hyperlane-xyz/solana.git"
tag = "hyperlane-1.14.13-2023-07-04"
[patch.crates-io.solana-sdk]
version = "=1.14.13"
git = "https://github.com/hyperlane-xyz/solana.git"
tag = "hyperlane-1.14.13-2023-07-04"
[patch.crates-io.solana-transaction-status]
version = "=1.14.13"
git = "https://github.com/hyperlane-xyz/solana.git"
tag = "hyperlane-1.14.13-2023-07-04"
[patch.crates-io.solana-zk-token-sdk]
version = "=1.14.13"
git = "https://github.com/hyperlane-xyz/solana.git"
tag = "hyperlane-1.14.13-2023-07-04"
[patch.crates-io.spl-associated-token-account]
version = "=1.1.2"
git = "https://github.com/hyperlane-xyz/solana-program-library.git"
branch = "hyperlane"
[patch.crates-io.spl-noop]
version = "=0.1.3"
git = "https://github.com/hyperlane-xyz/solana-program-library.git"
branch = "hyperlane"
[patch.crates-io.spl-token]
version = "=3.5.0"
git = "https://github.com/hyperlane-xyz/solana-program-library.git"
branch = "hyperlane"
[patch.crates-io.spl-token-2022]
version = "=0.5.0"
git = "https://github.com/hyperlane-xyz/solana-program-library.git"
branch = "hyperlane"
[patch.crates-io.spl-type-length-value]
version = "=0.1.0"
git = "https://github.com/hyperlane-xyz/solana-program-library.git"
branch = "hyperlane"

@ -17,6 +17,7 @@ COPY hyperlane-core ./hyperlane-core
COPY hyperlane-test ./hyperlane-test COPY hyperlane-test ./hyperlane-test
COPY ethers-prometheus ./ethers-prometheus COPY ethers-prometheus ./ethers-prometheus
COPY utils ./utils COPY utils ./utils
COPY sealevel ./sealevel
COPY Cargo.toml . COPY Cargo.toml .
COPY Cargo.lock . COPY Cargo.lock .

@ -1,3 +1,5 @@
cargo-features = ["workspace-inheritance"]
[package] [package]
name = "relayer" name = "relayer"
documentation.workspace = true documentation.workspace = true
@ -29,7 +31,7 @@ tracing-subscriber.workspace = true
tracing.workspace = true tracing.workspace = true
regex = "1.5" regex = "1.5"
hyperlane-core = { path = "../../hyperlane-core" } hyperlane-core = { path = "../../hyperlane-core", features = ["agent"] }
hyperlane-base = { path = "../../hyperlane-base" } hyperlane-base = { path = "../../hyperlane-base" }
hyperlane-ethereum = { path = "../../chains/hyperlane-ethereum" } hyperlane-ethereum = { path = "../../chains/hyperlane-ethereum" }
num-derive.workspace = true num-derive.workspace = true

@ -323,7 +323,7 @@ mod test {
} }
fn add_db_entry(db: &HyperlaneRocksDB, msg: &HyperlaneMessage, retry_count: u32) { fn add_db_entry(db: &HyperlaneRocksDB, msg: &HyperlaneMessage, retry_count: u32) {
db.store_message(&msg, Default::default()).unwrap(); db.store_message(msg, Default::default()).unwrap();
if retry_count > 0 { if retry_count > 0 {
db.store_pending_message_retry_count_by_message_id(&msg.id(), &retry_count) db.store_pending_message_retry_count_by_message_id(&msg.id(), &retry_count)
.unwrap(); .unwrap();
@ -343,14 +343,14 @@ mod test {
/// Only adds database entries to the pending message prefix if the message's /// Only adds database entries to the pending message prefix if the message's
/// retry count is greater than zero /// retry count is greater than zero
fn persist_retried_messages( fn persist_retried_messages(
retries: &Vec<u32>, retries: &[u32],
db: &HyperlaneRocksDB, db: &HyperlaneRocksDB,
destination_domain: &HyperlaneDomain, destination_domain: &HyperlaneDomain,
) { ) {
let mut nonce = 0; let mut nonce = 0;
retries.iter().for_each(|num_retries| { retries.iter().for_each(|num_retries| {
let message = dummy_hyperlane_message(&destination_domain, nonce); let message = dummy_hyperlane_message(destination_domain, nonce);
add_db_entry(&db, &message, *num_retries); add_db_entry(db, &message, *num_retries);
nonce += 1; nonce += 1;
}); });
} }
@ -365,7 +365,7 @@ mod test {
num_operations: usize, num_operations: usize,
) -> Vec<Box<DynPendingOperation>> { ) -> Vec<Box<DynPendingOperation>> {
let (message_processor, mut receive_channel) = let (message_processor, mut receive_channel) =
dummy_message_processor(&origin_domain, &destination_domain, &db); dummy_message_processor(origin_domain, destination_domain, db);
let process_fut = message_processor.spawn(); let process_fut = message_processor.spawn();
let mut pending_messages = vec![]; let mut pending_messages = vec![];

@ -157,7 +157,7 @@ mod test {
// insert the leaves // insert the leaves
for leaf in test_case.leaves.iter() { for leaf in test_case.leaves.iter() {
let hashed_leaf = hash_message(leaf); let hashed_leaf = hash_message(leaf);
tree.ingest(hashed_leaf).unwrap(); tree.ingest(hashed_leaf.into()).unwrap();
} }
// assert the tree has the proper leaf count // assert the tree has the proper leaf count

@ -269,7 +269,7 @@ impl Relayer {
let index_settings = self.as_ref().settings.chains[origin.name()].index.clone(); let index_settings = self.as_ref().settings.chains[origin.name()].index.clone();
let contract_sync = self.message_syncs.get(origin).unwrap().clone(); let contract_sync = self.message_syncs.get(origin).unwrap().clone();
let cursor = contract_sync let cursor = contract_sync
.forward_backward_message_sync_cursor(index_settings.chunk_size) .forward_backward_message_sync_cursor(index_settings)
.await; .await;
tokio::spawn(async move { tokio::spawn(async move {
contract_sync contract_sync

@ -1,3 +1,5 @@
cargo-features = ["workspace-inheritance"]
[package] [package]
name = "scraper" name = "scraper"
documentation.workspace = true documentation.workspace = true
@ -28,7 +30,7 @@ tracing.workspace = true
hex = { path = "../../utils/hex" } hex = { path = "../../utils/hex" }
hyperlane-base = { path = "../../hyperlane-base" } hyperlane-base = { path = "../../hyperlane-base" }
hyperlane-core = { path = "../../hyperlane-core" } hyperlane-core = { path = "../../hyperlane-core", features = ["agent"] }
migration = { path = "migration" } migration = { path = "migration" }
[dev-dependencies] [dev-dependencies]

@ -1,3 +1,5 @@
cargo-features = ["workspace-inheritance"]
[package] [package]
name = "migration" name = "migration"
documentation.workspace = true documentation.workspace = true
@ -18,8 +20,6 @@ serde.workspace = true
time = "0.3" time = "0.3"
tokio = { workspace = true, features = ["rt", "macros", "parking_lot"] } tokio = { workspace = true, features = ["rt", "macros", "parking_lot"] }
hyperlane-core = { path = "../../../hyperlane-core" }
# bin-only deps # bin-only deps
tracing-subscriber.workspace = true tracing-subscriber.workspace = true
tracing.workspace = true tracing.workspace = true

@ -1,3 +1,5 @@
cargo-features = ["workspace-inheritance"]
[package] [package]
name = "validator" name = "validator"
documentation.workspace = true documentation.workspace = true
@ -22,7 +24,7 @@ tracing-futures.workspace = true
tracing-subscriber.workspace = true tracing-subscriber.workspace = true
tracing.workspace = true tracing.workspace = true
hyperlane-core = { path = "../../hyperlane-core" } hyperlane-core = { path = "../../hyperlane-core", features = ["agent"] }
hyperlane-base = { path = "../../hyperlane-base" } hyperlane-base = { path = "../../hyperlane-base" }
hyperlane-ethereum = { path = "../../chains/hyperlane-ethereum" } hyperlane-ethereum = { path = "../../chains/hyperlane-ethereum" }

@ -148,7 +148,7 @@ impl Validator {
.clone(); .clone();
let contract_sync = self.message_sync.clone(); let contract_sync = self.message_sync.clone();
let cursor = contract_sync let cursor = contract_sync
.forward_backward_message_sync_cursor(index_settings.chunk_size) .forward_backward_message_sync_cursor(index_settings)
.await; .await;
tokio::spawn(async move { tokio::spawn(async move {
contract_sync contract_sync
@ -256,7 +256,10 @@ impl Validator {
info!("Validator has announced signature storage location"); info!("Validator has announced signature storage location");
break; break;
} }
info!("Validator has not announced signature storage location"); info!(
announced_locations=?locations,
"Validator has not announced signature storage location"
);
let balance_delta = self let balance_delta = self
.validator_announce .validator_announce
.announce_tokens_needed(signed_announcement.clone()) .announce_tokens_needed(signed_announcement.clone())

@ -1,3 +1,5 @@
cargo-features = ["workspace-inheritance"]
[package] [package]
name = "hyperlane-ethereum" name = "hyperlane-ethereum"
documentation.workspace = true documentation.workspace = true
@ -16,8 +18,8 @@ ethers-core.workspace = true
ethers-signers.workspace = true ethers-signers.workspace = true
ethers.workspace = true ethers.workspace = true
futures-util.workspace = true futures-util.workspace = true
hex = "0.4.3" hex.workspace = true
num = "0.4" num.workspace = true
reqwest.workspace = true reqwest.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true

@ -9,9 +9,9 @@ use ethers::prelude::Middleware;
use tracing::instrument; use tracing::instrument;
use hyperlane_core::{ use hyperlane_core::{
ChainCommunicationError, ChainResult, ContractLocator, HyperlaneAbi, HyperlaneChain, BlockRange, ChainCommunicationError, ChainResult, ContractLocator, HyperlaneAbi,
HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexer, InterchainGasPaymaster, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneProvider, IndexRange, Indexer,
InterchainGasPayment, LogMeta, H160, H256, InterchainGasPaymaster, InterchainGasPayment, LogMeta, H160, H256,
}; };
use crate::contracts::i_interchain_gas_paymaster::{ use crate::contracts::i_interchain_gas_paymaster::{
@ -87,14 +87,19 @@ where
#[instrument(err, skip(self))] #[instrument(err, skip(self))]
async fn fetch_logs( async fn fetch_logs(
&self, &self,
from_block: u32, range: IndexRange,
to_block: u32,
) -> ChainResult<Vec<(InterchainGasPayment, LogMeta)>> { ) -> ChainResult<Vec<(InterchainGasPayment, LogMeta)>> {
let BlockRange(range) = range else {
return Err(ChainCommunicationError::from_other_str(
"EthereumInterchainGasPaymasterIndexer only supports block-based indexing",
));
};
let events = self let events = self
.contract .contract
.gas_payment_filter() .gas_payment_filter()
.from_block(from_block) .from_block(*range.start())
.to_block(to_block) .to_block(*range.end())
.query_with_meta() .query_with_meta()
.await?; .await?;
@ -104,8 +109,8 @@ where
( (
InterchainGasPayment { InterchainGasPayment {
message_id: H256::from(log.message_id), message_id: H256::from(log.message_id),
payment: log.payment, payment: log.payment.into(),
gas_amount: log.gas_amount, gas_amount: log.gas_amount.into(),
}, },
log_meta.into(), log_meta.into(),
) )

@ -121,7 +121,7 @@ where
); );
let (verifies, gas_estimate) = try_join(tx.call(), tx.estimate_gas()).await?; let (verifies, gas_estimate) = try_join(tx.call(), tx.estimate_gas()).await?;
if verifies { if verifies {
Ok(Some(gas_estimate)) Ok(Some(gas_estimate.into()))
} else { } else {
Ok(None) Ok(None)
} }

@ -9,15 +9,15 @@ use async_trait::async_trait;
use ethers::abi::AbiEncode; use ethers::abi::AbiEncode;
use ethers::prelude::Middleware; use ethers::prelude::Middleware;
use ethers_contract::builders::ContractCall; use ethers_contract::builders::ContractCall;
use hyperlane_core::accumulator::incremental::IncrementalMerkle;
use hyperlane_core::accumulator::TREE_DEPTH;
use tracing::instrument; use tracing::instrument;
use hyperlane_core::accumulator::incremental::IncrementalMerkle;
use hyperlane_core::accumulator::TREE_DEPTH;
use hyperlane_core::{ use hyperlane_core::{
utils::fmt_bytes, ChainCommunicationError, ChainResult, Checkpoint, ContractLocator, utils::fmt_bytes, BlockRange, ChainCommunicationError, ChainResult, Checkpoint,
HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneMessage, ContractLocator, HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain,
HyperlaneProtocolError, HyperlaneProvider, Indexer, LogMeta, Mailbox, MessageIndexer, HyperlaneMessage, HyperlaneProtocolError, HyperlaneProvider, IndexRange, Indexer, LogMeta,
RawHyperlaneMessage, TxCostEstimate, TxOutcome, H160, H256, U256, Mailbox, MessageIndexer, RawHyperlaneMessage, TxCostEstimate, TxOutcome, H160, H256, U256,
}; };
use crate::contracts::arbitrum_node_interface::ArbitrumNodeInterface; use crate::contracts::arbitrum_node_interface::ArbitrumNodeInterface;
@ -130,16 +130,18 @@ where
} }
#[instrument(err, skip(self))] #[instrument(err, skip(self))]
async fn fetch_logs( async fn fetch_logs(&self, range: IndexRange) -> ChainResult<Vec<(HyperlaneMessage, LogMeta)>> {
&self, let BlockRange(range) = range else {
from: u32, return Err(ChainCommunicationError::from_other_str(
to: u32, "EthereumMailboxIndexer only supports block-based indexing",
) -> ChainResult<Vec<(HyperlaneMessage, LogMeta)>> { ))
};
let mut events: Vec<(HyperlaneMessage, LogMeta)> = self let mut events: Vec<(HyperlaneMessage, LogMeta)> = self
.contract .contract
.dispatch_filter() .dispatch_filter()
.from_block(from) .from_block(*range.start())
.to_block(to) .to_block(*range.end())
.query_with_meta() .query_with_meta()
.await? .await?
.into_iter() .into_iter()
@ -176,12 +178,18 @@ where
} }
#[instrument(err, skip(self))] #[instrument(err, skip(self))]
async fn fetch_logs(&self, from: u32, to: u32) -> ChainResult<Vec<(H256, LogMeta)>> { async fn fetch_logs(&self, range: IndexRange) -> ChainResult<Vec<(H256, LogMeta)>> {
let BlockRange(range) = range else {
return Err(ChainCommunicationError::from_other_str(
"EthereumMailboxIndexer only supports block-based indexing",
))
};
Ok(self Ok(self
.contract .contract
.process_id_filter() .process_id_filter()
.from_block(from) .from_block(*range.start())
.to_block(to) .to_block(*range.end())
.query_with_meta() .query_with_meta()
.await? .await?
.into_iter() .into_iter()
@ -380,6 +388,7 @@ where
Some(fixed_block_number), Some(fixed_block_number),
) )
.await .await
.map(Into::into)
.map_err(ChainCommunicationError::from_other)?; .map_err(ChainCommunicationError::from_other)?;
} }
@ -446,13 +455,13 @@ where
Some( Some(
arbitrum_node_interface arbitrum_node_interface
.estimate_retryable_ticket( .estimate_retryable_ticket(
H160::zero(), H160::zero().into(),
// Give the sender a deposit, otherwise it reverts // Give the sender a deposit, otherwise it reverts
U256::MAX, U256::MAX.into(),
self.contract.address(), self.contract.address(),
U256::zero(), U256::zero().into(),
H160::zero(), H160::zero().into(),
H160::zero(), H160::zero().into(),
contract_call.calldata().unwrap_or_default(), contract_call.calldata().unwrap_or_default(),
) )
.estimate_gas() .estimate_gas()
@ -469,9 +478,9 @@ where
.map_err(ChainCommunicationError::from_other)?; .map_err(ChainCommunicationError::from_other)?;
Ok(TxCostEstimate { Ok(TxCostEstimate {
gas_limit, gas_limit: gas_limit.into(),
gas_price, gas_price: gas_price.into(),
l2_gas_limit, l2_gas_limit: l2_gas_limit.map(|v| v.into()),
}) })
} }
@ -501,8 +510,9 @@ mod test {
use ethers::{ use ethers::{
providers::{MockProvider, Provider}, providers::{MockProvider, Provider},
types::{Block, Transaction}, types::{Block, Transaction, U256 as EthersU256},
}; };
use hyperlane_core::{ use hyperlane_core::{
ContractLocator, HyperlaneDomain, HyperlaneMessage, KnownHyperlaneDomain, Mailbox, ContractLocator, HyperlaneDomain, HyperlaneMessage, KnownHyperlaneDomain, Mailbox,
TxCostEstimate, H160, H256, U256, TxCostEstimate, H160, H256, U256,
@ -534,7 +544,7 @@ mod test {
assert!(mailbox.arbitrum_node_interface.is_some()); assert!(mailbox.arbitrum_node_interface.is_some());
// Confirm `H160::from_low_u64_ne(0xC8)` does what's expected // Confirm `H160::from_low_u64_ne(0xC8)` does what's expected
assert_eq!( assert_eq!(
mailbox.arbitrum_node_interface.as_ref().unwrap().address(), H160::from(mailbox.arbitrum_node_interface.as_ref().unwrap().address()),
H160::from_str("0x00000000000000000000000000000000000000C8").unwrap(), H160::from_str("0x00000000000000000000000000000000000000C8").unwrap(),
); );
@ -544,7 +554,8 @@ mod test {
// RPC 4: eth_gasPrice by process_estimate_costs // RPC 4: eth_gasPrice by process_estimate_costs
// Return 15 gwei // Return 15 gwei
let gas_price: U256 = ethers::utils::parse_units("15", "gwei").unwrap().into(); let gas_price: U256 =
EthersU256::from(ethers::utils::parse_units("15", "gwei").unwrap()).into();
mock_provider.push(gas_price).unwrap(); mock_provider.push(gas_price).unwrap();
// RPC 3: eth_estimateGas to the ArbitrumNodeInterface's estimateRetryableTicket function by process_estimate_costs // RPC 3: eth_estimateGas to the ArbitrumNodeInterface's estimateRetryableTicket function by process_estimate_costs

@ -6,13 +6,13 @@ use std::time::Duration;
use async_trait::async_trait; use async_trait::async_trait;
use derive_new::new; use derive_new::new;
use ethers::prelude::Middleware; use ethers::prelude::Middleware;
use hyperlane_core::ethers_core_types;
use tokio::time::sleep; use tokio::time::sleep;
use tracing::instrument; use tracing::instrument;
use hyperlane_core::{ use hyperlane_core::{
BlockInfo, ChainCommunicationError, ChainResult, ContractLocator, HyperlaneChain, BlockInfo, ChainCommunicationError, ChainResult, ContractLocator, HyperlaneChain,
HyperlaneDomain, HyperlaneProvider, HyperlaneProviderError, TxnInfo, TxnReceiptInfo, H160, HyperlaneDomain, HyperlaneProvider, HyperlaneProviderError, TxnInfo, TxnReceiptInfo, H256,
H256,
}; };
use crate::BuildableWithProvider; use crate::BuildableWithProvider;
@ -51,7 +51,11 @@ where
{ {
#[instrument(err, skip(self))] #[instrument(err, skip(self))]
async fn get_block_by_hash(&self, hash: &H256) -> ChainResult<BlockInfo> { async fn get_block_by_hash(&self, hash: &H256) -> ChainResult<BlockInfo> {
let block = get_with_retry_on_none(hash, |h| self.provider.get_block(*h)).await?; let block = get_with_retry_on_none(hash, |h| {
let eth_h256: ethers_core_types::H256 = h.into();
self.provider.get_block(eth_h256)
})
.await?;
Ok(BlockInfo { Ok(BlockInfo {
hash: *hash, hash: *hash,
timestamp: block.timestamp.as_u64(), timestamp: block.timestamp.as_u64(),
@ -72,19 +76,19 @@ where
.map_err(ChainCommunicationError::from_other)? .map_err(ChainCommunicationError::from_other)?
.map(|r| -> Result<_, HyperlaneProviderError> { .map(|r| -> Result<_, HyperlaneProviderError> {
Ok(TxnReceiptInfo { Ok(TxnReceiptInfo {
gas_used: r.gas_used.ok_or(HyperlaneProviderError::NoGasUsed)?, gas_used: r.gas_used.ok_or(HyperlaneProviderError::NoGasUsed)?.into(),
cumulative_gas_used: r.cumulative_gas_used, cumulative_gas_used: r.cumulative_gas_used.into(),
effective_gas_price: r.effective_gas_price, effective_gas_price: r.effective_gas_price.map(Into::into),
}) })
}) })
.transpose()?; .transpose()?;
Ok(TxnInfo { Ok(TxnInfo {
hash: *hash, hash: *hash,
max_fee_per_gas: txn.max_fee_per_gas, max_fee_per_gas: txn.max_fee_per_gas.map(Into::into),
max_priority_fee_per_gas: txn.max_priority_fee_per_gas, max_priority_fee_per_gas: txn.max_priority_fee_per_gas.map(Into::into),
gas_price: txn.gas_price, gas_price: txn.gas_price.map(Into::into),
gas_limit: txn.gas, gas_limit: txn.gas.into(),
nonce: txn.nonce.as_u64(), nonce: txn.nonce.as_u64(),
sender: txn.from.into(), sender: txn.from.into(),
recipient: txn.to.map(Into::into), recipient: txn.to.map(Into::into),
@ -96,7 +100,7 @@ where
async fn is_contract(&self, address: &H256) -> ChainResult<bool> { async fn is_contract(&self, address: &H256) -> ChainResult<bool> {
let code = self let code = self
.provider .provider
.get_code(H160::from(*address), None) .get_code(ethers_core_types::H160::from(*address), None)
.await .await
.map_err(ChainCommunicationError::from_other)?; .map_err(ChainCommunicationError::from_other)?;
Ok(!code.is_empty()) Ok(!code.is_empty())
@ -111,10 +115,14 @@ where
async fn get_storage_at(&self, address: H256, location: H256) -> ChainResult<H256> { async fn get_storage_at(&self, address: H256, location: H256) -> ChainResult<H256> {
let storage = self let storage = self
.provider .provider
.get_storage_at(H160::from(address), location, None) .get_storage_at(
ethers_core_types::H160::from(address),
location.into(),
None,
)
.await .await
.map_err(ChainCommunicationError::from_other)?; .map_err(ChainCommunicationError::from_other)?;
Ok(storage) Ok(storage.into())
} }
} }

@ -4,7 +4,9 @@ use ethers::types::transaction::eip2718::TypedTransaction;
use ethers::types::transaction::eip712::Eip712; use ethers::types::transaction::eip712::Eip712;
use ethers_signers::{AwsSigner, AwsSignerError, LocalWallet, Signer, WalletError}; use ethers_signers::{AwsSigner, AwsSignerError, LocalWallet, Signer, WalletError};
use hyperlane_core::{HyperlaneSigner, HyperlaneSignerError, H160, H256}; use hyperlane_core::{
HyperlaneSigner, HyperlaneSignerError, Signature as HyperlaneSignature, H160, H256,
};
/// Ethereum-supported signer types /// Ethereum-supported signer types
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -83,15 +85,15 @@ impl Signer for Signers {
#[async_trait] #[async_trait]
impl HyperlaneSigner for Signers { impl HyperlaneSigner for Signers {
fn eth_address(&self) -> H160 { fn eth_address(&self) -> H160 {
Signer::address(self) Signer::address(self).into()
} }
async fn sign_hash(&self, hash: &H256) -> Result<Signature, HyperlaneSignerError> { async fn sign_hash(&self, hash: &H256) -> Result<HyperlaneSignature, HyperlaneSignerError> {
let mut signature = Signer::sign_message(self, hash) let mut signature = Signer::sign_message(self, hash)
.await .await
.map_err(|err| HyperlaneSignerError::from(Box::new(err) as Box<_>))?; .map_err(|err| HyperlaneSignerError::from(Box::new(err) as Box<_>))?;
signature.v = 28 - (signature.v % 2); signature.v = 28 - (signature.v % 2);
Ok(signature) Ok(signature.into())
} }
} }

@ -6,7 +6,9 @@ use thiserror::Error;
use tokio::sync::{mpsc, oneshot}; use tokio::sync::{mpsc, oneshot};
use tracing::warn; use tracing::warn;
use hyperlane_core::{HyperlaneSigner, HyperlaneSignerError, H160, H256}; use hyperlane_core::{
HyperlaneSigner, HyperlaneSignerError, Signature as HyperlaneSignature, H160, H256,
};
use crate::Signers; use crate::Signers;
@ -50,11 +52,14 @@ impl HyperlaneSigner for SingletonSignerHandle {
self.address self.address
} }
async fn sign_hash(&self, hash: &H256) -> Result<Signature, HyperlaneSignerError> { async fn sign_hash(&self, hash: &H256) -> Result<HyperlaneSignature, HyperlaneSignerError> {
let (tx, rx) = oneshot::channel(); let (tx, rx) = oneshot::channel();
let task = (*hash, tx); let task = (*hash, tx);
self.tx.send(task).map_err(SingletonSignerError::from)?; self.tx.send(task).map_err(SingletonSignerError::from)?;
rx.await.map_err(SingletonSignerError::from)? match rx.await {
Ok(res) => res.map(Into::into),
Err(err) => Err(SingletonSignerError::from(err).into()),
}
} }
} }
@ -94,7 +99,7 @@ impl SingletonSigner {
} }
} }
}; };
if tx.send(res).is_err() { if tx.send(res.map(Into::into)).is_err() {
warn!( warn!(
"Failed to send signature back to the signer handle because the channel was closed" "Failed to send signature back to the signer handle because the channel was closed"
); );

@ -38,7 +38,7 @@ where
let dispatch_fut = tx.send(); let dispatch_fut = tx.send();
let dispatched = dispatch_fut.await?; let dispatched = dispatch_fut.await?;
let tx_hash: H256 = *dispatched; let tx_hash: H256 = (*dispatched).into();
info!(?to, %data, ?tx_hash, "Dispatched tx"); info!(?to, %data, ?tx_hash, "Dispatched tx");
@ -80,7 +80,8 @@ where
} else { } else {
tx.estimate_gas() tx.estimate_gas()
.await? .await?
.saturating_add(U256::from(GAS_ESTIMATE_BUFFER)) .saturating_add(U256::from(GAS_ESTIMATE_BUFFER).into())
.into()
}; };
let Ok((max_fee, max_priority_fee)) = provider.estimate_eip1559_fees(None).await else { let Ok((max_fee, max_priority_fee)) = provider.estimate_eip1559_fees(None).await else {
// Is not EIP 1559 chain // Is not EIP 1559 chain
@ -92,7 +93,7 @@ where
) { ) {
// Polygon needs a max priority fee >= 30 gwei // Polygon needs a max priority fee >= 30 gwei
let min_polygon_fee = U256::from(30_000_000_000u64); let min_polygon_fee = U256::from(30_000_000_000u64);
max_priority_fee.max(min_polygon_fee) max_priority_fee.max(min_polygon_fee.into())
} else { } else {
max_priority_fee max_priority_fee
}; };

@ -83,7 +83,7 @@ where
) -> ChainResult<ContractCall<M, bool>> { ) -> ChainResult<ContractCall<M, bool>> {
let serialized_signature: [u8; 65] = announcement.signature.into(); let serialized_signature: [u8; 65] = announcement.signature.into();
let tx = self.contract.announce( let tx = self.contract.announce(
announcement.value.validator, announcement.value.validator.into(),
announcement.value.storage_location, announcement.value.storage_location,
serialized_signature.into(), serialized_signature.into(),
); );
@ -127,7 +127,9 @@ where
) -> ChainResult<Vec<Vec<String>>> { ) -> ChainResult<Vec<Vec<String>>> {
let storage_locations = self let storage_locations = self
.contract .contract
.get_announced_storage_locations(validators.iter().map(|v| H160::from(*v)).collect()) .get_announced_storage_locations(
validators.iter().map(|v| H160::from(*v).into()).collect(),
)
.call() .call()
.await?; .await?;
Ok(storage_locations) Ok(storage_locations)
@ -136,6 +138,8 @@ where
#[instrument(ret, skip(self))] #[instrument(ret, skip(self))]
async fn announce_tokens_needed(&self, announcement: SignedType<Announcement>) -> Option<U256> { async fn announce_tokens_needed(&self, announcement: SignedType<Announcement>) -> Option<U256> {
let validator = announcement.value.validator; let validator = announcement.value.validator;
let eth_h160: ethers::types::H160 = validator.into();
let Ok(contract_call) = self let Ok(contract_call) = self
.announce_contract_call(announcement, None) .announce_contract_call(announcement, None)
.await .await
@ -144,7 +148,7 @@ where
return None; return None;
}; };
let Ok(balance) = self.provider.get_balance(validator, None).await let Ok(balance) = self.provider.get_balance(eth_h160, None).await
else { else {
trace!("Unable to query balance"); trace!("Unable to query balance");
return None; return None;
@ -155,7 +159,7 @@ where
trace!("Unable to get announce max cost"); trace!("Unable to get announce max cost");
return None; return None;
}; };
Some(max_cost.saturating_sub(balance)) Some(max_cost.saturating_sub(balance).into())
} }
#[instrument(err, ret, skip(self))] #[instrument(err, ret, skip(self))]

@ -1,3 +1,5 @@
cargo-features = ["workspace-inheritance"]
[package] [package]
name = "hyperlane-fuel" name = "hyperlane-fuel"
documentation.workspace = true documentation.workspace = true
@ -8,7 +10,7 @@ publish.workspace = true
version.workspace = true version.workspace = true
[dependencies] [dependencies]
anyhow = "1.0" anyhow.workspace = true
async-trait.workspace = true async-trait.workspace = true
fuels.workspace = true fuels.workspace = true
serde.workspace = true serde.workspace = true

@ -1,7 +1,7 @@
use async_trait::async_trait; use async_trait::async_trait;
use hyperlane_core::{ use hyperlane_core::{
ChainResult, HyperlaneChain, HyperlaneContract, Indexer, InterchainGasPaymaster, ChainResult, HyperlaneChain, HyperlaneContract, IndexRange, Indexer, InterchainGasPaymaster,
}; };
use hyperlane_core::{HyperlaneDomain, HyperlaneProvider, InterchainGasPayment, LogMeta, H256}; use hyperlane_core::{HyperlaneDomain, HyperlaneProvider, InterchainGasPayment, LogMeta, H256};
@ -35,8 +35,7 @@ pub struct FuelInterchainGasPaymasterIndexer {}
impl Indexer<InterchainGasPayment> for FuelInterchainGasPaymasterIndexer { impl Indexer<InterchainGasPayment> for FuelInterchainGasPaymasterIndexer {
async fn fetch_logs( async fn fetch_logs(
&self, &self,
from_block: u32, range: IndexRange,
to_block: u32,
) -> ChainResult<Vec<(InterchainGasPayment, LogMeta)>> { ) -> ChainResult<Vec<(InterchainGasPayment, LogMeta)>> {
todo!() todo!()
} }

@ -4,13 +4,14 @@ use std::num::NonZeroU64;
use async_trait::async_trait; use async_trait::async_trait;
use fuels::prelude::{Bech32ContractId, WalletUnlocked}; use fuels::prelude::{Bech32ContractId, WalletUnlocked};
use hyperlane_core::accumulator::incremental::IncrementalMerkle;
use tracing::instrument; use tracing::instrument;
use hyperlane_core::{ use hyperlane_core::{
accumulator::incremental::IncrementalMerkle, utils::fmt_bytes, ChainCommunicationError, utils::fmt_bytes, ChainCommunicationError, ChainResult, Checkpoint, ContractLocator,
ChainResult, Checkpoint, ContractLocator, HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneMessage,
HyperlaneDomain, HyperlaneMessage, HyperlaneProvider, Indexer, LogMeta, Mailbox, HyperlaneProvider, IndexRange, Indexer, LogMeta, Mailbox, TxCostEstimate, TxOutcome, H256,
TxCostEstimate, TxOutcome, H256, U256, U256,
}; };
use crate::{ use crate::{
@ -153,11 +154,7 @@ pub struct FuelMailboxIndexer {}
#[async_trait] #[async_trait]
impl Indexer<HyperlaneMessage> for FuelMailboxIndexer { impl Indexer<HyperlaneMessage> for FuelMailboxIndexer {
async fn fetch_logs( async fn fetch_logs(&self, range: IndexRange) -> ChainResult<Vec<(HyperlaneMessage, LogMeta)>> {
&self,
from: u32,
to: u32,
) -> ChainResult<Vec<(HyperlaneMessage, LogMeta)>> {
todo!() todo!()
} }
@ -168,7 +165,7 @@ impl Indexer<HyperlaneMessage> for FuelMailboxIndexer {
#[async_trait] #[async_trait]
impl Indexer<H256> for FuelMailboxIndexer { impl Indexer<H256> for FuelMailboxIndexer {
async fn fetch_logs(&self, from: u32, to: u32) -> ChainResult<Vec<(H256, LogMeta)>> { async fn fetch_logs(&self, range: IndexRange) -> ChainResult<Vec<(H256, LogMeta)>> {
todo!() todo!()
} }

@ -0,0 +1,33 @@
cargo-features = ["workspace-inheritance"]
[package]
name = "hyperlane-sealevel"
version = "0.1.0"
edition = "2021"
[dependencies]
anyhow.workspace = true
async-trait.workspace = true
base64.workspace = true
borsh.workspace = true
jsonrpc-core.workspace = true
num-traits.workspace = true
serde.workspace = true
solana-account-decoder.workspace = true
solana-client.workspace = true
solana-sdk.workspace = true
solana-transaction-status.workspace = true
thiserror.workspace = true
tracing-futures.workspace = true
tracing.workspace = true
url.workspace = true
hyperlane-core = { path = "../../hyperlane-core" }
hyperlane-sealevel-mailbox = { path = "../../sealevel/programs/mailbox", features = ["no-entrypoint"] }
hyperlane-sealevel-interchain-security-module-interface = { path = "../../sealevel/libraries/interchain-security-module-interface" }
hyperlane-sealevel-message-recipient-interface = { path = "../../sealevel/libraries/message-recipient-interface" }
serializable-account-meta = { path = "../../sealevel/libraries/serializable-account-meta" }
account-utils = { path = "../../sealevel/libraries/account-utils" }
multisig-ism = { path = "../../sealevel/libraries/multisig-ism" }
hyperlane-sealevel-multisig-ism-message-id = { path = "../../sealevel/programs/ism/multisig-ism-message-id", features = ["no-entrypoint"] }
hyperlane-sealevel-validator-announce = { path = "../../sealevel/programs/validator-announce", features = ["no-entrypoint"] }

@ -0,0 +1,24 @@
use solana_client::nonblocking::rpc_client::RpcClient;
/// Kludge to implement Debug for RpcClient.
pub(crate) struct RpcClientWithDebug(RpcClient);
impl RpcClientWithDebug {
pub fn new(rpc_endpoint: String) -> Self {
Self(RpcClient::new(rpc_endpoint))
}
}
impl std::fmt::Debug for RpcClientWithDebug {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("RpcClient { ... }")
}
}
impl std::ops::Deref for RpcClientWithDebug {
type Target = RpcClient;
fn deref(&self) -> &Self::Target {
&self.0
}
}

@ -0,0 +1,76 @@
use async_trait::async_trait;
use hyperlane_core::{
ChainResult, ContractLocator, HyperlaneChain, HyperlaneContract, HyperlaneDomain,
HyperlaneProvider, IndexRange, Indexer, InterchainGasPaymaster, InterchainGasPayment, LogMeta,
H256,
};
use tracing::{info, instrument};
use crate::{ConnectionConf, SealevelProvider};
use solana_sdk::pubkey::Pubkey;
/// A reference to an IGP contract on some Sealevel chain
#[derive(Debug)]
pub struct SealevelInterchainGasPaymaster {
program_id: Pubkey,
domain: HyperlaneDomain,
}
impl SealevelInterchainGasPaymaster {
/// Create a new Sealevel IGP.
pub fn new(_conf: &ConnectionConf, locator: ContractLocator) -> Self {
let program_id = Pubkey::from(<[u8; 32]>::from(locator.address));
Self {
program_id,
domain: locator.domain.clone(),
}
}
}
impl HyperlaneContract for SealevelInterchainGasPaymaster {
fn address(&self) -> H256 {
self.program_id.to_bytes().into()
}
}
impl HyperlaneChain for SealevelInterchainGasPaymaster {
fn domain(&self) -> &HyperlaneDomain {
&self.domain
}
fn provider(&self) -> Box<dyn HyperlaneProvider> {
Box::new(SealevelProvider::new(self.domain.clone()))
}
}
impl InterchainGasPaymaster for SealevelInterchainGasPaymaster {}
/// Struct that retrieves event data for a Sealevel IGP contract
#[derive(Debug)]
pub struct SealevelInterchainGasPaymasterIndexer {}
impl SealevelInterchainGasPaymasterIndexer {
/// Create a new Sealevel IGP indexer.
pub fn new(_conf: &ConnectionConf, _locator: ContractLocator) -> Self {
Self {}
}
}
#[async_trait]
impl Indexer<InterchainGasPayment> for SealevelInterchainGasPaymasterIndexer {
#[instrument(err, skip(self))]
async fn fetch_logs(
&self,
_range: IndexRange,
) -> ChainResult<Vec<(InterchainGasPayment, LogMeta)>> {
info!("Gas payment indexing not implemented for Sealevel");
Ok(vec![])
}
#[instrument(level = "debug", err, ret, skip(self))]
async fn get_finalized_block_number(&self) -> ChainResult<u32> {
// As a workaround to avoid gas payment indexing on Sealevel,
// we pretend the block number is 1.
Ok(1)
}
}

@ -0,0 +1,94 @@
use async_trait::async_trait;
use num_traits::cast::FromPrimitive;
use solana_sdk::{instruction::Instruction, pubkey::Pubkey, signature::Keypair};
use tracing::warn;
use hyperlane_core::{
ChainCommunicationError, ChainResult, ContractLocator, HyperlaneChain, HyperlaneContract,
HyperlaneDomain, HyperlaneMessage, InterchainSecurityModule, ModuleType, H256, U256,
};
use hyperlane_sealevel_interchain_security_module_interface::InterchainSecurityModuleInstruction;
use serializable_account_meta::SimulationReturnData;
use crate::{utils::simulate_instruction, ConnectionConf, RpcClientWithDebug};
/// A reference to an InterchainSecurityModule contract on some Sealevel chain
#[derive(Debug)]
pub struct SealevelInterchainSecurityModule {
rpc_client: RpcClientWithDebug,
payer: Option<Keypair>,
program_id: Pubkey,
domain: HyperlaneDomain,
}
impl SealevelInterchainSecurityModule {
/// Create a new sealevel InterchainSecurityModule
pub fn new(conf: &ConnectionConf, locator: ContractLocator, payer: Option<Keypair>) -> Self {
let rpc_client = RpcClientWithDebug::new(conf.url.to_string());
let program_id = Pubkey::from(<[u8; 32]>::from(locator.address));
Self {
rpc_client,
payer,
program_id,
domain: locator.domain.clone(),
}
}
}
impl HyperlaneContract for SealevelInterchainSecurityModule {
fn address(&self) -> H256 {
self.program_id.to_bytes().into()
}
}
impl HyperlaneChain for SealevelInterchainSecurityModule {
fn domain(&self) -> &HyperlaneDomain {
&self.domain
}
fn provider(&self) -> Box<dyn hyperlane_core::HyperlaneProvider> {
Box::new(crate::SealevelProvider::new(self.domain.clone()))
}
}
#[async_trait]
impl InterchainSecurityModule for SealevelInterchainSecurityModule {
async fn module_type(&self) -> ChainResult<ModuleType> {
let instruction = Instruction::new_with_bytes(
self.program_id,
&InterchainSecurityModuleInstruction::Type
.encode()
.map_err(ChainCommunicationError::from_other)?[..],
vec![],
);
let module = simulate_instruction::<SimulationReturnData<u32>>(
&self.rpc_client,
self.payer
.as_ref()
.ok_or_else(|| ChainCommunicationError::SignerUnavailable)?,
instruction,
)
.await?
.ok_or_else(|| {
ChainCommunicationError::from_other_str("No return data was returned from the ISM")
})?
.return_data;
if let Some(module_type) = ModuleType::from_u32(module) {
Ok(module_type)
} else {
warn!(%module, "Unknown module type");
Ok(ModuleType::Unused)
}
}
async fn dry_run_verify(
&self,
_message: &HyperlaneMessage,
_metadata: &[u8],
) -> ChainResult<Option<U256>> {
// TODO: Implement this once we have aggregation ISM support in Sealevel
Ok(Some(U256::zero()))
}
}

@ -0,0 +1,26 @@
//! Implementation of hyperlane for Sealevel.
#![forbid(unsafe_code)]
#![warn(missing_docs)]
#![deny(warnings)]
pub use crate::multisig_ism::*;
pub(crate) use client::RpcClientWithDebug;
pub use interchain_gas::*;
pub use interchain_security_module::*;
pub use mailbox::*;
pub use provider::*;
pub use solana_sdk::signer::keypair::Keypair;
pub use trait_builder::*;
pub use validator_announce::*;
mod interchain_gas;
mod interchain_security_module;
mod mailbox;
mod multisig_ism;
mod provider;
mod trait_builder;
mod utils;
mod client;
mod validator_announce;

@ -0,0 +1,748 @@
#![allow(warnings)] // FIXME remove
use std::{collections::HashMap, num::NonZeroU64, str::FromStr as _};
use async_trait::async_trait;
use borsh::{BorshDeserialize, BorshSerialize};
use jsonrpc_core::futures_util::TryFutureExt;
use tracing::{debug, info, instrument, warn};
use hyperlane_core::{
accumulator::incremental::IncrementalMerkle, ChainCommunicationError, ChainResult, Checkpoint,
ContractLocator, Decode as _, Encode as _, HyperlaneAbi, HyperlaneChain, HyperlaneContract,
HyperlaneDomain, HyperlaneMessage, HyperlaneProvider, IndexRange, Indexer, LogMeta, Mailbox,
MessageIndexer, SequenceRange, TxCostEstimate, TxOutcome, H256, U256,
};
use hyperlane_sealevel_interchain_security_module_interface::{
InterchainSecurityModuleInstruction, VerifyInstruction,
};
use hyperlane_sealevel_mailbox::{
accounts::{DispatchedMessageAccount, InboxAccount, OutboxAccount},
instruction::InboxProcess,
mailbox_dispatched_message_pda_seeds, mailbox_inbox_pda_seeds, mailbox_outbox_pda_seeds,
mailbox_process_authority_pda_seeds, mailbox_processed_message_pda_seeds,
};
use hyperlane_sealevel_message_recipient_interface::{
HandleInstruction, MessageRecipientInstruction,
};
use serializable_account_meta::SimulationReturnData;
use solana_account_decoder::{UiAccountEncoding, UiDataSliceConfig};
use solana_client::{
nonblocking::rpc_client::RpcClient,
rpc_config::{RpcAccountInfoConfig, RpcProgramAccountsConfig, RpcSendTransactionConfig},
rpc_filter::{Memcmp, MemcmpEncodedBytes, RpcFilterType},
};
use solana_sdk::{
account::Account,
commitment_config::CommitmentConfig,
compute_budget::ComputeBudgetInstruction,
hash::Hash,
instruction::AccountMeta,
instruction::Instruction,
message::Message,
pubkey::Pubkey,
signature::Signature,
signer::{keypair::Keypair, Signer as _},
transaction::{Transaction, VersionedTransaction},
};
use solana_transaction_status::{
EncodedConfirmedBlock, EncodedTransaction, EncodedTransactionWithStatusMeta,
UiInnerInstructions, UiInstruction, UiMessage, UiParsedInstruction, UiReturnDataEncoding,
UiTransaction, UiTransactionReturnData, UiTransactionStatusMeta,
};
use crate::RpcClientWithDebug;
use crate::{
utils::{get_account_metas, simulate_instruction},
ConnectionConf, SealevelProvider,
};
const SYSTEM_PROGRAM: &str = "11111111111111111111111111111111";
const SPL_NOOP: &str = "noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV";
// FIXME solana uses the first 64 byte signature of a transaction to uniquely identify the
// transaction rather than a 32 byte transaction hash like ethereum. Hash it here to reduce
// size - requires more thought to ensure this makes sense to do...
fn signature_to_txn_hash(signature: &Signature) -> H256 {
H256::from(solana_sdk::hash::hash(signature.as_ref()).to_bytes())
}
// The max amount of compute units for a transaction.
// TODO: consider a more sane value and/or use IGP gas payments instead.
const PROCESS_COMPUTE_UNITS: u32 = 1_400_000;
/// A reference to a Mailbox contract on some Sealevel chain
pub struct SealevelMailbox {
program_id: Pubkey,
inbox: (Pubkey, u8),
outbox: (Pubkey, u8),
rpc_client: RpcClient,
domain: HyperlaneDomain,
payer: Option<Keypair>,
}
impl SealevelMailbox {
/// Create a new sealevel mailbox
pub fn new(
conf: &ConnectionConf,
locator: ContractLocator,
payer: Option<Keypair>,
) -> ChainResult<Self> {
// Set the `processed` commitment at rpc level
let rpc_client =
RpcClient::new_with_commitment(conf.url.to_string(), CommitmentConfig::processed());
let program_id = Pubkey::from(<[u8; 32]>::from(locator.address));
let domain = locator.domain.id();
let inbox = Pubkey::find_program_address(mailbox_inbox_pda_seeds!(), &program_id);
let outbox = Pubkey::find_program_address(mailbox_outbox_pda_seeds!(), &program_id);
debug!(
"domain={}\nmailbox={}\ninbox=({}, {})\noutbox=({}, {})",
domain, program_id, inbox.0, inbox.1, outbox.0, outbox.1,
);
Ok(SealevelMailbox {
program_id,
inbox,
outbox,
rpc_client,
domain: locator.domain.clone(),
payer,
})
}
pub fn inbox(&self) -> (Pubkey, u8) {
self.inbox
}
pub fn outbox(&self) -> (Pubkey, u8) {
self.outbox
}
/// Simulates an instruction, and attempts to deserialize it into a T.
/// If no return data at all was returned, returns Ok(None).
/// If some return data was returned but deserialization was unsuccesful,
/// an Err is returned.
pub async fn simulate_instruction<T: BorshDeserialize + BorshSerialize>(
&self,
instruction: Instruction,
) -> ChainResult<Option<T>> {
simulate_instruction(
&self.rpc_client,
self.payer
.as_ref()
.ok_or_else(|| ChainCommunicationError::SignerUnavailable)?,
instruction,
)
.await
}
/// Simulates an Instruction that will return a list of AccountMetas.
pub async fn get_account_metas(
&self,
instruction: Instruction,
) -> ChainResult<Vec<AccountMeta>> {
get_account_metas(
&self.rpc_client,
self.payer
.as_ref()
.ok_or_else(|| ChainCommunicationError::SignerUnavailable)?,
instruction,
)
.await
}
/// Gets the recipient ISM given a recipient program id and the ISM getter account metas.
pub async fn get_recipient_ism(
&self,
recipient_program_id: Pubkey,
ism_getter_account_metas: Vec<AccountMeta>,
) -> ChainResult<Pubkey> {
let mut accounts = vec![
// Inbox PDA
AccountMeta::new_readonly(self.inbox.0, false),
// The recipient program.
AccountMeta::new_readonly(recipient_program_id, false),
];
accounts.extend(ism_getter_account_metas);
let instruction = Instruction::new_with_borsh(
self.program_id,
&hyperlane_sealevel_mailbox::instruction::Instruction::InboxGetRecipientIsm(
recipient_program_id,
),
accounts,
);
let ism = self
.simulate_instruction::<SimulationReturnData<Pubkey>>(instruction)
.await?
.ok_or(ChainCommunicationError::from_other_str(
"No return data from InboxGetRecipientIsm instruction",
))?
.return_data;
Ok(ism)
}
/// Gets the account metas required for the recipient's
/// `MessageRecipientInstruction::InterchainSecurityModule` instruction.
pub async fn get_ism_getter_account_metas(
&self,
recipient_program_id: Pubkey,
) -> ChainResult<Vec<AccountMeta>> {
let instruction =
hyperlane_sealevel_message_recipient_interface::MessageRecipientInstruction::InterchainSecurityModuleAccountMetas;
self.get_account_metas_with_instruction_bytes(
recipient_program_id,
&instruction
.encode()
.map_err(ChainCommunicationError::from_other)?,
hyperlane_sealevel_message_recipient_interface::INTERCHAIN_SECURITY_MODULE_ACCOUNT_METAS_PDA_SEEDS,
).await
}
/// Gets the account metas required for the ISM's `Verify` instruction.
pub async fn get_ism_verify_account_metas(
&self,
ism: Pubkey,
metadata: Vec<u8>,
message: Vec<u8>,
) -> ChainResult<Vec<AccountMeta>> {
let instruction =
InterchainSecurityModuleInstruction::VerifyAccountMetas(VerifyInstruction {
metadata,
message,
});
self.get_account_metas_with_instruction_bytes(
ism,
&instruction
.encode()
.map_err(ChainCommunicationError::from_other)?,
hyperlane_sealevel_interchain_security_module_interface::VERIFY_ACCOUNT_METAS_PDA_SEEDS,
)
.await
}
/// Gets the account metas required for the recipient's `MessageRecipientInstruction::Handle` instruction.
pub async fn get_handle_account_metas(
&self,
message: &HyperlaneMessage,
) -> ChainResult<Vec<AccountMeta>> {
let recipient_program_id = Pubkey::new_from_array(message.recipient.into());
let instruction = MessageRecipientInstruction::HandleAccountMetas(HandleInstruction {
sender: message.sender,
origin: message.origin,
message: message.body.clone(),
});
self.get_account_metas_with_instruction_bytes(
recipient_program_id,
&instruction
.encode()
.map_err(ChainCommunicationError::from_other)?,
hyperlane_sealevel_message_recipient_interface::HANDLE_ACCOUNT_METAS_PDA_SEEDS,
)
.await
}
async fn get_account_metas_with_instruction_bytes(
&self,
program_id: Pubkey,
instruction_data: &[u8],
account_metas_pda_seeds: &[&[u8]],
) -> ChainResult<Vec<AccountMeta>> {
let (account_metas_pda_key, _) =
Pubkey::find_program_address(account_metas_pda_seeds, &program_id);
let instruction = Instruction::new_with_bytes(
program_id,
instruction_data,
vec![AccountMeta::new(account_metas_pda_key, false)],
);
self.get_account_metas(instruction).await
}
}
impl HyperlaneContract for SealevelMailbox {
fn address(&self) -> H256 {
self.program_id.to_bytes().into()
}
}
impl HyperlaneChain for SealevelMailbox {
fn domain(&self) -> &HyperlaneDomain {
&self.domain
}
fn provider(&self) -> Box<dyn HyperlaneProvider> {
Box::new(SealevelProvider::new(self.domain.clone()))
}
}
impl std::fmt::Debug for SealevelMailbox {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self as &dyn HyperlaneContract)
}
}
// TODO refactor the sealevel client into a lib and bin, pull in and use the lib here rather than
// duplicating.
#[async_trait]
impl Mailbox for SealevelMailbox {
#[instrument(err, ret, skip(self))]
async fn count(&self, _maybe_lag: Option<NonZeroU64>) -> ChainResult<u32> {
let tree = self.tree(_maybe_lag).await?;
tree.count()
.try_into()
.map_err(ChainCommunicationError::from_other)
}
#[instrument(err, ret, skip(self))]
async fn delivered(&self, id: H256) -> ChainResult<bool> {
let (processed_message_account_key, _processed_message_account_bump) =
Pubkey::find_program_address(
mailbox_processed_message_pda_seeds!(id),
&self.program_id,
);
let account = self
.rpc_client
.get_account_with_commitment(
&processed_message_account_key,
CommitmentConfig::finalized(),
)
.await
.map_err(ChainCommunicationError::from_other)?;
Ok(account.value.is_some())
}
#[instrument(err, ret, skip(self))]
async fn tree(&self, lag: Option<NonZeroU64>) -> ChainResult<IncrementalMerkle> {
assert!(
lag.is_none(),
"Sealevel does not support querying point-in-time"
);
let outbox_account = self
.rpc_client
.get_account_with_commitment(&self.outbox.0, CommitmentConfig::finalized())
.await
.map_err(ChainCommunicationError::from_other)?
.value
.ok_or_else(|| {
ChainCommunicationError::from_other_str("Could not find account data")
})?;
let outbox = OutboxAccount::fetch(&mut outbox_account.data.as_ref())
.map_err(ChainCommunicationError::from_other)?
.into_inner();
Ok(outbox.tree)
}
#[instrument(err, ret, skip(self))]
async fn latest_checkpoint(&self, lag: Option<NonZeroU64>) -> ChainResult<Checkpoint> {
assert!(
lag.is_none(),
"Sealevel does not support querying point-in-time"
);
let tree = self.tree(lag).await?;
let root = tree.root();
let count: u32 = tree
.count()
.try_into()
.map_err(ChainCommunicationError::from_other)?;
let index = count.checked_sub(1).ok_or_else(|| {
ChainCommunicationError::from_contract_error_str(
"Outbox is empty, cannot compute checkpoint",
)
})?;
let checkpoint = Checkpoint {
mailbox_address: self.program_id.to_bytes().into(),
mailbox_domain: self.domain.id(),
root,
index,
};
Ok(checkpoint)
}
#[instrument(err, ret, skip(self))]
async fn default_ism(&self) -> ChainResult<H256> {
let inbox_account = self
.rpc_client
.get_account(&self.inbox.0)
.await
.map_err(ChainCommunicationError::from_other)?;
let inbox = InboxAccount::fetch(&mut inbox_account.data.as_ref())
.map_err(ChainCommunicationError::from_other)?
.into_inner();
Ok(inbox.default_ism.to_bytes().into())
}
#[instrument(err, ret, skip(self))]
async fn recipient_ism(&self, recipient: H256) -> ChainResult<H256> {
let recipient_program_id = Pubkey::new_from_array(recipient.0);
// Get the account metas required for the recipient.InterchainSecurityModule instruction.
let ism_getter_account_metas = self
.get_ism_getter_account_metas(recipient_program_id)
.await?;
// Get the ISM to use.
let ism_pubkey = self
.get_recipient_ism(recipient_program_id, ism_getter_account_metas)
.await?;
Ok(ism_pubkey.to_bytes().into())
}
#[instrument(err, ret, skip(self))]
async fn process(
&self,
message: &HyperlaneMessage,
metadata: &[u8],
_tx_gas_limit: Option<U256>,
) -> ChainResult<TxOutcome> {
let recipient: Pubkey = message.recipient.0.into();
let mut encoded_message = vec![];
message.write_to(&mut encoded_message).unwrap();
let payer = self
.payer
.as_ref()
.ok_or_else(|| ChainCommunicationError::SignerUnavailable)?;
let mut instructions = Vec::with_capacity(2);
// Set the compute unit limit.
instructions.push(ComputeBudgetInstruction::set_compute_unit_limit(
PROCESS_COMPUTE_UNITS,
));
// "processed" level commitment does not guarantee finality.
// roughly 5% of blocks end up on a dropped fork.
// However we don't want this function to be a bottleneck and there already
// is retry logic in the agents.
let commitment = CommitmentConfig::processed();
let (process_authority_key, _process_authority_bump) = Pubkey::try_find_program_address(
mailbox_process_authority_pda_seeds!(&recipient),
&self.program_id,
)
.ok_or_else(|| {
ChainCommunicationError::from_other_str(
"Could not find program address for process authority",
)
})?;
let (processed_message_account_key, _processed_message_account_bump) =
Pubkey::try_find_program_address(
mailbox_processed_message_pda_seeds!(message.id()),
&self.program_id,
)
.ok_or_else(|| {
ChainCommunicationError::from_other_str(
"Could not find program address for processed message account",
)
})?;
// Get the account metas required for the recipient.InterchainSecurityModule instruction.
let ism_getter_account_metas = self.get_ism_getter_account_metas(recipient).await?;
// Get the recipient ISM.
let ism = self
.get_recipient_ism(recipient, ism_getter_account_metas.clone())
.await?;
let ixn =
hyperlane_sealevel_mailbox::instruction::Instruction::InboxProcess(InboxProcess {
metadata: metadata.to_vec(),
message: encoded_message.clone(),
});
let ixn_data = ixn
.into_instruction_data()
.map_err(ChainCommunicationError::from_other)?;
// Craft the accounts for the transaction.
let mut accounts: Vec<AccountMeta> = vec![
AccountMeta::new_readonly(payer.pubkey(), true),
AccountMeta::new_readonly(Pubkey::from_str(SYSTEM_PROGRAM).unwrap(), false),
AccountMeta::new(self.inbox.0, false),
AccountMeta::new_readonly(process_authority_key, false),
AccountMeta::new(processed_message_account_key, false),
];
accounts.extend(ism_getter_account_metas);
accounts.extend([
AccountMeta::new_readonly(Pubkey::from_str(SPL_NOOP).unwrap(), false),
AccountMeta::new_readonly(ism, false),
]);
// Get the account metas required for the ISM.Verify instruction.
let ism_verify_account_metas = self
.get_ism_verify_account_metas(ism, metadata.into(), encoded_message)
.await?;
accounts.extend(ism_verify_account_metas);
// The recipient.
accounts.extend([AccountMeta::new_readonly(recipient, false)]);
// Get account metas required for the Handle instruction
let handle_account_metas = self.get_handle_account_metas(message).await?;
accounts.extend(handle_account_metas);
let inbox_instruction = Instruction {
program_id: self.program_id,
data: ixn_data,
accounts,
};
tracing::info!("accounts={:#?}", inbox_instruction.accounts);
instructions.push(inbox_instruction);
let (recent_blockhash, _) = self
.rpc_client
.get_latest_blockhash_with_commitment(commitment)
.await
.map_err(ChainCommunicationError::from_other)?;
let txn = Transaction::new_signed_with_payer(
&instructions,
Some(&payer.pubkey()),
&[payer],
recent_blockhash,
);
let signature = self
.rpc_client
.send_and_confirm_transaction(&txn)
.await
.map_err(ChainCommunicationError::from_other)?;
tracing::info!("signature={}", signature);
tracing::info!("txn={:?}", txn);
let executed = self
.rpc_client
.confirm_transaction_with_commitment(&signature, commitment)
.await
.map_err(|err| warn!("Failed to confirm inbox process transaction: {}", err))
.map(|ctx| ctx.value)
.unwrap_or(false);
let txid = signature_to_txn_hash(&signature);
Ok(TxOutcome {
txid,
executed,
// TODO use correct data upon integrating IGP support
gas_price: U256::zero(),
gas_used: U256::zero(),
})
}
#[instrument(err, ret, skip(self))]
async fn process_estimate_costs(
&self,
_message: &HyperlaneMessage,
_metadata: &[u8],
) -> ChainResult<TxCostEstimate> {
// TODO use correct data upon integrating IGP support
Ok(TxCostEstimate {
gas_limit: U256::zero(),
gas_price: U256::zero(),
l2_gas_limit: None,
})
}
fn process_calldata(&self, _message: &HyperlaneMessage, _metadata: &[u8]) -> Vec<u8> {
todo!()
}
}
/// Struct that retrieves event data for a Sealevel Mailbox contract
#[derive(Debug)]
pub struct SealevelMailboxIndexer {
rpc_client: RpcClientWithDebug,
mailbox: SealevelMailbox,
program_id: Pubkey,
}
impl SealevelMailboxIndexer {
pub fn new(conf: &ConnectionConf, locator: ContractLocator) -> ChainResult<Self> {
let program_id = Pubkey::from(<[u8; 32]>::from(locator.address));
let rpc_client = RpcClientWithDebug::new(conf.url.to_string());
let mailbox = SealevelMailbox::new(conf, locator, None)?;
Ok(Self {
program_id,
rpc_client,
mailbox,
})
}
async fn get_finalized_block_number(&self) -> ChainResult<u32> {
let height = self
.rpc_client
.get_block_height()
.await
.map_err(ChainCommunicationError::from_other)?
.try_into()
// FIXME solana block height is u64...
.expect("sealevel block height exceeds u32::MAX");
Ok(height)
}
async fn get_message_with_nonce(&self, nonce: u32) -> ChainResult<(HyperlaneMessage, LogMeta)> {
let target_message_account_bytes = &[
&hyperlane_sealevel_mailbox::accounts::DISPATCHED_MESSAGE_DISCRIMINATOR[..],
&nonce.to_le_bytes()[..],
]
.concat();
let target_message_account_bytes = base64::encode(target_message_account_bytes);
// First, find all accounts with the matching account data.
// To keep responses small in case there is ever more than 1
// match, we don't request the full account data, and just request
// the `unique_message_pubkey` field.
let memcmp = RpcFilterType::Memcmp(Memcmp {
// Ignore the first byte, which is the `initialized` bool flag.
offset: 1,
bytes: MemcmpEncodedBytes::Base64(target_message_account_bytes),
encoding: None,
});
let config = RpcProgramAccountsConfig {
filters: Some(vec![memcmp]),
account_config: RpcAccountInfoConfig {
encoding: Some(UiAccountEncoding::Base64),
// Don't return any data
data_slice: Some(UiDataSliceConfig {
offset: 1 + 8 + 4 + 8, // the offset to get the `unique_message_pubkey` field
length: 32, // the length of the `unique_message_pubkey` field
}),
commitment: Some(CommitmentConfig::finalized()),
min_context_slot: None,
},
with_context: Some(false),
};
let accounts = self
.rpc_client
.get_program_accounts_with_config(&self.mailbox.program_id, config)
.await
.map_err(ChainCommunicationError::from_other)?;
// Now loop through matching accounts and find the one with a valid account pubkey
// that proves it's an actual message storage PDA.
let mut valid_message_storage_pda_pubkey = Option::<Pubkey>::None;
for (pubkey, account) in accounts.iter() {
let unique_message_pubkey = Pubkey::new(&account.data);
let (expected_pubkey, _bump) = Pubkey::try_find_program_address(
mailbox_dispatched_message_pda_seeds!(unique_message_pubkey),
&self.mailbox.program_id,
)
.ok_or_else(|| {
ChainCommunicationError::from_other_str(
"Could not find program address for unique_message_pubkey",
)
})?;
if expected_pubkey == *pubkey {
valid_message_storage_pda_pubkey = Some(*pubkey);
break;
}
}
let valid_message_storage_pda_pubkey =
valid_message_storage_pda_pubkey.ok_or_else(|| {
ChainCommunicationError::from_other_str(
"Could not find valid message storage PDA pubkey",
)
})?;
// Now that we have the valid message storage PDA pubkey, we can get the full account data.
let account = self
.rpc_client
.get_account_with_commitment(
&valid_message_storage_pda_pubkey,
CommitmentConfig::finalized(),
)
.await
.map_err(ChainCommunicationError::from_other)?
.value
.ok_or_else(|| {
ChainCommunicationError::from_other_str("Could not find account data")
})?;
let dispatched_message_account =
DispatchedMessageAccount::fetch(&mut account.data.as_ref())
.map_err(ChainCommunicationError::from_other)?
.into_inner();
let hyperlane_message =
HyperlaneMessage::read_from(&mut &dispatched_message_account.encoded_message[..])?;
Ok((
hyperlane_message,
LogMeta {
address: self.mailbox.program_id.to_bytes().into(),
block_number: dispatched_message_account.slot,
// TODO: get these when building out scraper support.
// It's inconvenient to get these :|
block_hash: H256::zero(),
transaction_hash: H256::zero(),
transaction_index: 0,
log_index: U256::zero(),
},
))
}
}
#[async_trait]
impl MessageIndexer for SealevelMailboxIndexer {
#[instrument(err, skip(self))]
async fn fetch_count_at_tip(&self) -> ChainResult<(u32, u32)> {
let tip = Indexer::<HyperlaneMessage>::get_finalized_block_number(self as _).await?;
// TODO: need to make sure the call and tip are at the same height?
let count = self.mailbox.count(None).await?;
Ok((count, tip))
}
}
#[async_trait]
impl Indexer<HyperlaneMessage> for SealevelMailboxIndexer {
async fn fetch_logs(&self, range: IndexRange) -> ChainResult<Vec<(HyperlaneMessage, LogMeta)>> {
let SequenceRange(range) = range else {
return Err(ChainCommunicationError::from_other_str(
"SealevelMailboxIndexer only supports sequence-based indexing",
))
};
info!(
?range,
"Fetching SealevelMailboxIndexer HyperlaneMessage logs"
);
let mut messages = Vec::with_capacity((range.end() - range.start()) as usize);
for nonce in range {
messages.push(self.get_message_with_nonce(nonce).await?);
}
Ok(messages)
}
async fn get_finalized_block_number(&self) -> ChainResult<u32> {
self.get_finalized_block_number().await
}
}
#[async_trait]
impl Indexer<H256> for SealevelMailboxIndexer {
async fn fetch_logs(&self, _range: IndexRange) -> ChainResult<Vec<(H256, LogMeta)>> {
todo!()
}
async fn get_finalized_block_number(&self) -> ChainResult<u32> {
self.get_finalized_block_number().await
}
}
struct SealevelMailboxAbi;
// TODO figure out how this is used and if we can support it for sealevel.
impl HyperlaneAbi for SealevelMailboxAbi {
const SELECTOR_SIZE_BYTES: usize = 8;
fn fn_map() -> HashMap<Vec<u8>, &'static str> {
todo!()
}
}

@ -0,0 +1,140 @@
use async_trait::async_trait;
use hyperlane_core::{
ChainCommunicationError, ChainResult, ContractLocator, HyperlaneChain, HyperlaneContract,
HyperlaneDomain, HyperlaneMessage, HyperlaneProvider, MultisigIsm, RawHyperlaneMessage, H256,
};
use solana_sdk::{
instruction::{AccountMeta, Instruction},
pubkey::Pubkey,
signature::Keypair,
};
use crate::{
utils::{get_account_metas, simulate_instruction},
ConnectionConf, RpcClientWithDebug, SealevelProvider,
};
use hyperlane_sealevel_multisig_ism_message_id::instruction::ValidatorsAndThreshold;
use multisig_ism::interface::{
MultisigIsmInstruction, VALIDATORS_AND_THRESHOLD_ACCOUNT_METAS_PDA_SEEDS,
};
/// A reference to a MultisigIsm contract on some Sealevel chain
#[derive(Debug)]
pub struct SealevelMultisigIsm {
rpc_client: RpcClientWithDebug,
payer: Option<Keypair>,
program_id: Pubkey,
domain: HyperlaneDomain,
}
impl SealevelMultisigIsm {
/// Create a new Sealevel MultisigIsm.
pub fn new(conf: &ConnectionConf, locator: ContractLocator, payer: Option<Keypair>) -> Self {
let rpc_client = RpcClientWithDebug::new(conf.url.to_string());
let program_id = Pubkey::from(<[u8; 32]>::from(locator.address));
Self {
rpc_client,
payer,
program_id,
domain: locator.domain.clone(),
}
}
}
impl HyperlaneContract for SealevelMultisigIsm {
fn address(&self) -> H256 {
self.program_id.to_bytes().into()
}
}
impl HyperlaneChain for SealevelMultisigIsm {
fn domain(&self) -> &HyperlaneDomain {
&self.domain
}
fn provider(&self) -> Box<dyn HyperlaneProvider> {
Box::new(SealevelProvider::new(self.domain.clone()))
}
}
#[async_trait]
impl MultisigIsm for SealevelMultisigIsm {
/// Returns the validator and threshold needed to verify message
async fn validators_and_threshold(
&self,
message: &HyperlaneMessage,
) -> ChainResult<(Vec<H256>, u8)> {
let message_bytes = RawHyperlaneMessage::from(message).to_vec();
let account_metas = self
.get_validators_and_threshold_account_metas(message_bytes.clone())
.await?;
let instruction = Instruction::new_with_bytes(
self.program_id,
&MultisigIsmInstruction::ValidatorsAndThreshold(message_bytes)
.encode()
.map_err(ChainCommunicationError::from_other)?[..],
account_metas,
);
let validators_and_threshold = simulate_instruction::<ValidatorsAndThreshold>(
&self.rpc_client,
self.payer
.as_ref()
.ok_or_else(|| ChainCommunicationError::SignerUnavailable)?,
instruction,
)
.await?
.ok_or_else(|| {
ChainCommunicationError::from_other_str(
"No return data was returned from the multisig ism",
)
})?;
let validators = validators_and_threshold
.validators
.into_iter()
.map(|validator| validator.into())
.collect();
Ok((validators, validators_and_threshold.threshold))
}
}
impl SealevelMultisigIsm {
async fn get_validators_and_threshold_account_metas(
&self,
message_bytes: Vec<u8>,
) -> ChainResult<Vec<AccountMeta>> {
let (account_metas_pda_key, _account_metas_pda_bump) = Pubkey::try_find_program_address(
VALIDATORS_AND_THRESHOLD_ACCOUNT_METAS_PDA_SEEDS,
&self.program_id,
)
.ok_or_else(|| {
ChainCommunicationError::from_other_str(
"Could not find program address for domain data",
)
})?;
let instruction = Instruction::new_with_bytes(
self.program_id,
&MultisigIsmInstruction::ValidatorsAndThresholdAccountMetas(message_bytes)
.encode()
.map_err(ChainCommunicationError::from_other)?[..],
vec![AccountMeta::new_readonly(account_metas_pda_key, false)],
);
get_account_metas(
&self.rpc_client,
self.payer
.as_ref()
.ok_or_else(|| ChainCommunicationError::SignerUnavailable)?,
instruction,
)
.await
}
}

@ -0,0 +1,46 @@
use async_trait::async_trait;
use hyperlane_core::{
BlockInfo, ChainResult, HyperlaneChain, HyperlaneDomain, HyperlaneProvider, TxnInfo, H256,
};
/// A wrapper around a Sealevel provider to get generic blockchain information.
#[derive(Debug)]
pub struct SealevelProvider {
domain: HyperlaneDomain,
}
impl SealevelProvider {
/// Create a new Sealevel provider.
pub fn new(domain: HyperlaneDomain) -> Self {
SealevelProvider { domain }
}
}
impl HyperlaneChain for SealevelProvider {
fn domain(&self) -> &HyperlaneDomain {
&self.domain
}
fn provider(&self) -> Box<dyn HyperlaneProvider> {
Box::new(SealevelProvider {
domain: self.domain.clone(),
})
}
}
#[async_trait]
impl HyperlaneProvider for SealevelProvider {
async fn get_block_by_hash(&self, _hash: &H256) -> ChainResult<BlockInfo> {
todo!() // FIXME
}
async fn get_txn_by_hash(&self, _hash: &H256) -> ChainResult<TxnInfo> {
todo!() // FIXME
}
async fn is_contract(&self, _address: &H256) -> ChainResult<bool> {
// FIXME
Ok(true)
}
}

@ -0,0 +1,7 @@
//! The [ed25519 native program][np].
//!
//! [np]: https://docs.solana.com/developing/runtime-facilities/programs#ed25519-program
use crate::solana::pubkey::Pubkey;
use solana_sdk_macro::declare_id;
declare_id!("Ed25519SigVerify111111111111111111111111111");

@ -0,0 +1,360 @@
//! Calculation of transaction fees.
#![allow(clippy::integer_arithmetic)]
use serde_derive::{Deserialize, Serialize};
use super::{ed25519_program, message::Message, secp256k1_program};
// use super::
use log::*;
#[derive(Serialize, Deserialize, Default, PartialEq, Eq, Clone, Copy, Debug)]
#[serde(rename_all = "camelCase")]
pub struct FeeCalculator {
/// The current cost of a signature.
///
/// This amount may increase/decrease over time based on cluster processing
/// load.
pub lamports_per_signature: u64,
}
impl FeeCalculator {
pub fn new(lamports_per_signature: u64) -> Self {
Self {
lamports_per_signature,
}
}
#[deprecated(
since = "1.9.0",
note = "Please do not use, will no longer be available in the future"
)]
pub fn calculate_fee(&self, message: &Message) -> u64 {
let mut num_signatures: u64 = 0;
for instruction in &message.instructions {
let program_index = instruction.program_id_index as usize;
// Message may not be sanitized here
if program_index < message.account_keys.len() {
let id = message.account_keys[program_index];
if (secp256k1_program::check_id(&id) || ed25519_program::check_id(&id))
&& !instruction.data.is_empty()
{
num_signatures += instruction.data[0] as u64;
}
}
}
self.lamports_per_signature
* (u64::from(message.header.num_required_signatures) + num_signatures)
}
}
/*
#[derive(Serialize, Deserialize, PartialEq, Eq, Clone, Debug, AbiExample)]
#[serde(rename_all = "camelCase")]
pub struct FeeRateGovernor {
// The current cost of a signature This amount may increase/decrease over time based on
// cluster processing load.
#[serde(skip)]
pub lamports_per_signature: u64,
// The target cost of a signature when the cluster is operating around target_signatures_per_slot
// signatures
pub target_lamports_per_signature: u64,
// Used to estimate the desired processing capacity of the cluster. As the signatures for
// recent slots are fewer/greater than this value, lamports_per_signature will decrease/increase
// for the next slot. A value of 0 disables lamports_per_signature fee adjustments
pub target_signatures_per_slot: u64,
pub min_lamports_per_signature: u64,
pub max_lamports_per_signature: u64,
// What portion of collected fees are to be destroyed, as a fraction of std::u8::MAX
pub burn_percent: u8,
}
pub const DEFAULT_TARGET_LAMPORTS_PER_SIGNATURE: u64 = 10_000;
pub const DEFAULT_TARGET_SIGNATURES_PER_SLOT: u64 = 50 * DEFAULT_MS_PER_SLOT;
// Percentage of tx fees to burn
pub const DEFAULT_BURN_PERCENT: u8 = 50;
impl Default for FeeRateGovernor {
fn default() -> Self {
Self {
lamports_per_signature: 0,
target_lamports_per_signature: DEFAULT_TARGET_LAMPORTS_PER_SIGNATURE,
target_signatures_per_slot: DEFAULT_TARGET_SIGNATURES_PER_SLOT,
min_lamports_per_signature: 0,
max_lamports_per_signature: 0,
burn_percent: DEFAULT_BURN_PERCENT,
}
}
}
impl FeeRateGovernor {
pub fn new(target_lamports_per_signature: u64, target_signatures_per_slot: u64) -> Self {
let base_fee_rate_governor = Self {
target_lamports_per_signature,
lamports_per_signature: target_lamports_per_signature,
target_signatures_per_slot,
..FeeRateGovernor::default()
};
Self::new_derived(&base_fee_rate_governor, 0)
}
pub fn new_derived(
base_fee_rate_governor: &FeeRateGovernor,
latest_signatures_per_slot: u64,
) -> Self {
let mut me = base_fee_rate_governor.clone();
if me.target_signatures_per_slot > 0 {
// lamports_per_signature can range from 50% to 1000% of
// target_lamports_per_signature
me.min_lamports_per_signature = std::cmp::max(1, me.target_lamports_per_signature / 2);
me.max_lamports_per_signature = me.target_lamports_per_signature * 10;
// What the cluster should charge at `latest_signatures_per_slot`
let desired_lamports_per_signature =
me.max_lamports_per_signature
.min(me.min_lamports_per_signature.max(
me.target_lamports_per_signature
* std::cmp::min(latest_signatures_per_slot, std::u32::MAX as u64)
as u64
/ me.target_signatures_per_slot as u64,
));
trace!(
"desired_lamports_per_signature: {}",
desired_lamports_per_signature
);
let gap = desired_lamports_per_signature as i64
- base_fee_rate_governor.lamports_per_signature as i64;
if gap == 0 {
me.lamports_per_signature = desired_lamports_per_signature;
} else {
// Adjust fee by 5% of target_lamports_per_signature to produce a smooth
// increase/decrease in fees over time.
let gap_adjust =
std::cmp::max(1, me.target_lamports_per_signature / 20) as i64 * gap.signum();
trace!(
"lamports_per_signature gap is {}, adjusting by {}",
gap,
gap_adjust
);
me.lamports_per_signature =
me.max_lamports_per_signature
.min(me.min_lamports_per_signature.max(
(base_fee_rate_governor.lamports_per_signature as i64 + gap_adjust)
as u64,
));
}
} else {
me.lamports_per_signature = base_fee_rate_governor.target_lamports_per_signature;
me.min_lamports_per_signature = me.target_lamports_per_signature;
me.max_lamports_per_signature = me.target_lamports_per_signature;
}
debug!(
"new_derived(): lamports_per_signature: {}",
me.lamports_per_signature
);
me
}
pub fn clone_with_lamports_per_signature(&self, lamports_per_signature: u64) -> Self {
Self {
lamports_per_signature,
..*self
}
}
/// calculate unburned fee from a fee total, returns (unburned, burned)
pub fn burn(&self, fees: u64) -> (u64, u64) {
let burned = fees * u64::from(self.burn_percent) / 100;
(fees - burned, burned)
}
/// create a FeeCalculator based on current cluster signature throughput
pub fn create_fee_calculator(&self) -> FeeCalculator {
FeeCalculator::new(self.lamports_per_signature)
}
}
#[cfg(test)]
mod tests {
use {
super::*,
crate::{pubkey::Pubkey, system_instruction},
};
#[test]
fn test_fee_rate_governor_burn() {
let mut fee_rate_governor = FeeRateGovernor::default();
assert_eq!(fee_rate_governor.burn(2), (1, 1));
fee_rate_governor.burn_percent = 0;
assert_eq!(fee_rate_governor.burn(2), (2, 0));
fee_rate_governor.burn_percent = 100;
assert_eq!(fee_rate_governor.burn(2), (0, 2));
}
#[test]
#[allow(deprecated)]
fn test_fee_calculator_calculate_fee() {
// Default: no fee.
let message = Message::default();
assert_eq!(FeeCalculator::default().calculate_fee(&message), 0);
// No signature, no fee.
assert_eq!(FeeCalculator::new(1).calculate_fee(&message), 0);
// One signature, a fee.
let pubkey0 = Pubkey::new(&[0; 32]);
let pubkey1 = Pubkey::new(&[1; 32]);
let ix0 = system_instruction::transfer(&pubkey0, &pubkey1, 1);
let message = Message::new(&[ix0], Some(&pubkey0));
assert_eq!(FeeCalculator::new(2).calculate_fee(&message), 2);
// Two signatures, double the fee.
let ix0 = system_instruction::transfer(&pubkey0, &pubkey1, 1);
let ix1 = system_instruction::transfer(&pubkey1, &pubkey0, 1);
let message = Message::new(&[ix0, ix1], Some(&pubkey0));
assert_eq!(FeeCalculator::new(2).calculate_fee(&message), 4);
}
#[test]
#[allow(deprecated)]
fn test_fee_calculator_calculate_fee_secp256k1() {
use crate::instruction::Instruction;
let pubkey0 = Pubkey::new(&[0; 32]);
let pubkey1 = Pubkey::new(&[1; 32]);
let ix0 = system_instruction::transfer(&pubkey0, &pubkey1, 1);
let mut secp_instruction = Instruction {
program_id: crate::secp256k1_program::id(),
accounts: vec![],
data: vec![],
};
let mut secp_instruction2 = Instruction {
program_id: crate::secp256k1_program::id(),
accounts: vec![],
data: vec![1],
};
let message = Message::new(
&[
ix0.clone(),
secp_instruction.clone(),
secp_instruction2.clone(),
],
Some(&pubkey0),
);
assert_eq!(FeeCalculator::new(1).calculate_fee(&message), 2);
secp_instruction.data = vec![0];
secp_instruction2.data = vec![10];
let message = Message::new(&[ix0, secp_instruction, secp_instruction2], Some(&pubkey0));
assert_eq!(FeeCalculator::new(1).calculate_fee(&message), 11);
}
#[test]
fn test_fee_rate_governor_derived_default() {
solana_logger::setup();
let f0 = FeeRateGovernor::default();
assert_eq!(
f0.target_signatures_per_slot,
DEFAULT_TARGET_SIGNATURES_PER_SLOT
);
assert_eq!(
f0.target_lamports_per_signature,
DEFAULT_TARGET_LAMPORTS_PER_SIGNATURE
);
assert_eq!(f0.lamports_per_signature, 0);
let f1 = FeeRateGovernor::new_derived(&f0, DEFAULT_TARGET_SIGNATURES_PER_SLOT);
assert_eq!(
f1.target_signatures_per_slot,
DEFAULT_TARGET_SIGNATURES_PER_SLOT
);
assert_eq!(
f1.target_lamports_per_signature,
DEFAULT_TARGET_LAMPORTS_PER_SIGNATURE
);
assert_eq!(
f1.lamports_per_signature,
DEFAULT_TARGET_LAMPORTS_PER_SIGNATURE / 2
); // min
}
#[test]
fn test_fee_rate_governor_derived_adjust() {
solana_logger::setup();
let mut f = FeeRateGovernor {
target_lamports_per_signature: 100,
target_signatures_per_slot: 100,
..FeeRateGovernor::default()
};
f = FeeRateGovernor::new_derived(&f, 0);
// Ramp fees up
let mut count = 0;
loop {
let last_lamports_per_signature = f.lamports_per_signature;
f = FeeRateGovernor::new_derived(&f, std::u64::MAX);
info!("[up] f.lamports_per_signature={}", f.lamports_per_signature);
// some maximum target reached
if f.lamports_per_signature == last_lamports_per_signature {
break;
}
// shouldn't take more than 1000 steps to get to minimum
assert!(count < 1000);
count += 1;
}
// Ramp fees down
let mut count = 0;
loop {
let last_lamports_per_signature = f.lamports_per_signature;
f = FeeRateGovernor::new_derived(&f, 0);
info!(
"[down] f.lamports_per_signature={}",
f.lamports_per_signature
);
// some minimum target reached
if f.lamports_per_signature == last_lamports_per_signature {
break;
}
// shouldn't take more than 1000 steps to get to minimum
assert!(count < 1000);
count += 1;
}
// Arrive at target rate
let mut count = 0;
while f.lamports_per_signature != f.target_lamports_per_signature {
f = FeeRateGovernor::new_derived(&f, f.target_signatures_per_slot);
info!(
"[target] f.lamports_per_signature={}",
f.lamports_per_signature
);
// shouldn't take more than 100 steps to get to target
assert!(count < 100);
count += 1;
}
}
}
*/

@ -0,0 +1,101 @@
[package]
name = "solana-sdk"
version = "1.14.13"
description = "Solana SDK"
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
repository = "https://github.com/solana-labs/solana"
homepage = "https://solana.com/"
documentation = "https://docs.rs/solana-sdk"
readme = "README.md"
license = "Apache-2.0"
edition = "2021"
[features]
# "program" feature is a legacy feature retained to support v1.3 and older
# programs. New development should not use this feature. Instead use the
# solana-program crate
program = []
default = [
"full" # functionality that is not compatible or needed for on-chain programs
]
full = [
"assert_matches",
"byteorder",
"chrono",
"generic-array",
"memmap2",
"rand",
"rand_chacha",
"serde_json",
# "ed25519-dalek",
"ed25519-dalek-bip32",
# "solana-logger",
"libsecp256k1",
"sha3",
"digest",
]
[dependencies]
assert_matches = { version = "1.5.0", optional = true }
base64 = "0.13"
bincode = "1.3.3"
bitflags = "1.3.1"
borsh = "0.9.3"
bs58 = "0.4.0"
bytemuck = { version = "1.11.0", features = ["derive"] }
byteorder = { version = "1.4.3", optional = true }
chrono = { default-features = false, features = ["alloc"], version = "0.4", optional = true }
derivation-path = { version = "0.2.0", default-features = false }
digest = { version = "0.10.1", optional = true }
ed25519-dalek-bip32 = { version = "0.2.0", optional = true }
ed25519-dalek = { version = "=1.0.1", git = "https://github.com/Eclipse-Laboratories-Inc/ed25519-dalek", branch = "steven/fix-deps" }
generic-array = { version = "0.14.5", default-features = false, features = ["serde", "more_lengths"], optional = true }
hmac = "0.12.1"
itertools = "0.10.3"
lazy_static = "1.4.0"
libsecp256k1 = { version = "0.6.0", optional = true }
log = "0.4.17"
memmap2 = { version = "0.5.3", optional = true }
num-derive = "0.3"
num-traits = "0.2"
pbkdf2 = { version = "0.11.0", default-features = false }
qstring = "0.7.2"
rand = { version = "0.7.0", optional = true }
rand_chacha = { version = "0.2.2", optional = true }
rustversion = "1.0.7"
serde = "1.0.138"
serde_bytes = "0.11"
serde_derive = "1.0.103"
serde_json = { version = "1.0.81", optional = true }
sha2 = "0.10.2"
sha3 = { version = "0.10.1", optional = true }
# solana-frozen-abi = { path = "../frozen-abi", version = "=1.14.13" }
# solana-frozen-abi-macro = { path = "../frozen-abi/macro", version = "=1.14.13" }
# solana-logger = { path = "../logger", version = "=1.14.13", optional = true }
# solana-program = { path = "program", version = "=1.14.13" }
solana-sdk-macro = { path = "macro", version = "=1.14.13" }
thiserror = "1.0"
uriparse = "0.6.4"
wasm-bindgen = "0.2"
[dependencies.curve25519-dalek]
version = "3.2.1"
features = ["serde"]
git = "https://github.com/Eclipse-Laboratories-Inc/curve25519-dalek"
branch = "steven/fix-deps"
[dev-dependencies]
anyhow = "1.0.58"
hex = "0.4.3"
static_assertions = "1.1.0"
tiny-bip39 = "0.8.2"
[build-dependencies]
rustc_version = "0.4"
[package.metadata.docs.rs]
targets = ["x86_64-unknown-linux-gnu"]
[lib]
crate-type = ["cdylib", "rlib"]

@ -0,0 +1,23 @@
[package]
name = "solana-sdk-macro"
version = "1.14.13"
description = "Solana SDK Macro"
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
repository = "https://github.com/solana-labs/solana"
homepage = "https://solana.com/"
documentation = "https://docs.rs/solana-sdk-macro"
license = "Apache-2.0"
edition = "2021"
[lib]
proc-macro = true
[dependencies]
bs58 = "0.4.0"
proc-macro2 = "1.0.19"
quote = "1.0"
syn = { version = "1.0", features = ["full", "extra-traits"] }
rustversion = "1.0.7"
[package.metadata.docs.rs]
targets = ["x86_64-unknown-linux-gnu"]

@ -0,0 +1,405 @@
//! Convenience macro to declare a static public key and functions to interact with it
//!
//! Input: a single literal base58 string representation of a program's id
extern crate proc_macro;
use proc_macro::TokenStream;
use proc_macro2::{Delimiter, Span, TokenTree};
use quote::{quote, ToTokens};
use syn::parse::{Parse, ParseStream, Result};
use syn::{parse_macro_input, Expr, LitByte, LitStr};
fn parse_id(
input: ParseStream,
pubkey_type: proc_macro2::TokenStream,
) -> Result<proc_macro2::TokenStream> {
let id = if input.peek(syn::LitStr) {
let id_literal: LitStr = input.parse()?;
parse_pubkey(&id_literal, &pubkey_type)?
} else {
let expr: Expr = input.parse()?;
quote! { #expr }
};
if !input.is_empty() {
let stream: proc_macro2::TokenStream = input.parse()?;
return Err(syn::Error::new_spanned(stream, "unexpected token"));
}
Ok(id)
}
fn id_to_tokens(
id: &proc_macro2::TokenStream,
pubkey_type: proc_macro2::TokenStream,
tokens: &mut proc_macro2::TokenStream,
) {
tokens.extend(quote! {
/// The static program ID
pub static ID: #pubkey_type = #id;
/// Confirms that a given pubkey is equivalent to the program ID
pub fn check_id(id: &#pubkey_type) -> bool {
id == &ID
}
/// Returns the program ID
pub fn id() -> #pubkey_type {
ID
}
#[cfg(test)]
#[test]
fn test_id() {
assert!(check_id(&id()));
}
});
}
/*
fn deprecated_id_to_tokens(
id: &proc_macro2::TokenStream,
pubkey_type: proc_macro2::TokenStream,
tokens: &mut proc_macro2::TokenStream,
) {
tokens.extend(quote! {
/// The static program ID
pub static ID: #pubkey_type = #id;
/// Confirms that a given pubkey is equivalent to the program ID
#[deprecated()]
pub fn check_id(id: &#pubkey_type) -> bool {
id == &ID
}
/// Returns the program ID
#[deprecated()]
pub fn id() -> #pubkey_type {
ID
}
#[cfg(test)]
#[test]
fn test_id() {
#[allow(deprecated)]
assert!(check_id(&id()));
}
});
}
struct SdkPubkey(proc_macro2::TokenStream);
impl Parse for SdkPubkey {
fn parse(input: ParseStream) -> Result<Self> {
parse_id(input, quote! { ::solana_sdk::pubkey::Pubkey }).map(Self)
}
}
impl ToTokens for SdkPubkey {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let id = &self.0;
tokens.extend(quote! {#id})
}
}
struct ProgramSdkPubkey(proc_macro2::TokenStream);
impl Parse for ProgramSdkPubkey {
fn parse(input: ParseStream) -> Result<Self> {
parse_id(input, quote! { ::solana_program::pubkey::Pubkey }).map(Self)
}
}
impl ToTokens for ProgramSdkPubkey {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let id = &self.0;
tokens.extend(quote! {#id})
}
}
*/
struct Id(proc_macro2::TokenStream);
impl Parse for Id {
fn parse(input: ParseStream) -> Result<Self> {
parse_id(input, quote! { Pubkey }).map(Self)
}
}
impl ToTokens for Id {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
id_to_tokens(&self.0, quote! { Pubkey }, tokens)
}
}
/*
struct IdDeprecated(proc_macro2::TokenStream);
impl Parse for IdDeprecated {
fn parse(input: ParseStream) -> Result<Self> {
parse_id(input, quote! { ::solana_sdk::pubkey::Pubkey }).map(Self)
}
}
impl ToTokens for IdDeprecated {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
deprecated_id_to_tokens(&self.0, quote! { ::solana_sdk::pubkey::Pubkey }, tokens)
}
}
struct ProgramSdkId(proc_macro2::TokenStream);
impl Parse for ProgramSdkId {
fn parse(input: ParseStream) -> Result<Self> {
parse_id(input, quote! { ::solana_program::pubkey::Pubkey }).map(Self)
}
}
impl ToTokens for ProgramSdkId {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
id_to_tokens(&self.0, quote! { ::solana_program::pubkey::Pubkey }, tokens)
}
}
struct ProgramSdkIdDeprecated(proc_macro2::TokenStream);
impl Parse for ProgramSdkIdDeprecated {
fn parse(input: ParseStream) -> Result<Self> {
parse_id(input, quote! { ::solana_program::pubkey::Pubkey }).map(Self)
}
}
impl ToTokens for ProgramSdkIdDeprecated {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
deprecated_id_to_tokens(&self.0, quote! { ::solana_program::pubkey::Pubkey }, tokens)
}
}
#[allow(dead_code)] // `respan` may be compiled out
struct RespanInput {
to_respan: Path,
respan_using: Span,
}
impl Parse for RespanInput {
fn parse(input: ParseStream) -> Result<Self> {
let to_respan: Path = input.parse()?;
let _comma: Token![,] = input.parse()?;
let respan_tree: TokenTree = input.parse()?;
match respan_tree {
TokenTree::Group(g) if g.delimiter() == Delimiter::None => {
let ident: Ident = syn::parse2(g.stream())?;
Ok(RespanInput {
to_respan,
respan_using: ident.span(),
})
}
TokenTree::Ident(i) => Ok(RespanInput {
to_respan,
respan_using: i.span(),
}),
val => Err(syn::Error::new_spanned(
val,
"expected None-delimited group",
)),
}
}
}
/// A proc-macro which respans the tokens in its first argument (a `Path`)
/// to be resolved at the tokens of its second argument.
/// For internal use only.
///
/// There must be exactly one comma in the input,
/// which is used to separate the two arguments.
/// The second argument should be exactly one token.
///
/// For example, `respan!($crate::foo, with_span)`
/// produces the tokens `$crate::foo`, but resolved
/// at the span of `with_span`.
///
/// The input to this function should be very short -
/// its only purpose is to override the span of a token
/// sequence containing `$crate`. For all other purposes,
/// a more general proc-macro should be used.
#[rustversion::since(1.46.0)] // `Span::resolved_at` is stable in 1.46.0 and above
#[proc_macro]
pub fn respan(input: TokenStream) -> TokenStream {
// Obtain the `Path` we are going to respan, and the ident
// whose span we will be using.
let RespanInput {
to_respan,
respan_using,
} = parse_macro_input!(input as RespanInput);
// Respan all of the tokens in the `Path`
let to_respan: proc_macro2::TokenStream = to_respan
.into_token_stream()
.into_iter()
.map(|mut t| {
// Combine the location of the token with the resolution behavior of `respan_using`
let new_span: Span = t.span().resolved_at(respan_using);
t.set_span(new_span);
t
})
.collect();
TokenStream::from(to_respan)
}
#[proc_macro]
pub fn pubkey(input: TokenStream) -> TokenStream {
let id = parse_macro_input!(input as SdkPubkey);
TokenStream::from(quote! {#id})
}
#[proc_macro]
pub fn program_pubkey(input: TokenStream) -> TokenStream {
let id = parse_macro_input!(input as ProgramSdkPubkey);
TokenStream::from(quote! {#id})
}
*/
#[proc_macro]
pub fn declare_id(input: TokenStream) -> TokenStream {
let id = parse_macro_input!(input as Id);
TokenStream::from(quote! {#id})
}
/*
#[proc_macro]
pub fn declare_deprecated_id(input: TokenStream) -> TokenStream {
let id = parse_macro_input!(input as IdDeprecated);
TokenStream::from(quote! {#id})
}
#[proc_macro]
pub fn program_declare_id(input: TokenStream) -> TokenStream {
let id = parse_macro_input!(input as ProgramSdkId);
TokenStream::from(quote! {#id})
}
#[proc_macro]
pub fn program_declare_deprecated_id(input: TokenStream) -> TokenStream {
let id = parse_macro_input!(input as ProgramSdkIdDeprecated);
TokenStream::from(quote! {#id})
}
*/
fn parse_pubkey(
id_literal: &LitStr,
pubkey_type: &proc_macro2::TokenStream,
) -> Result<proc_macro2::TokenStream> {
let id_vec = bs58::decode(id_literal.value())
.into_vec()
.map_err(|_| syn::Error::new_spanned(id_literal, "failed to decode base58 string"))?;
let id_array = <[u8; 32]>::try_from(<&[u8]>::clone(&&id_vec[..])).map_err(|_| {
syn::Error::new_spanned(
id_literal,
format!("pubkey array is not 32 bytes long: len={}", id_vec.len()),
)
})?;
let bytes = id_array.iter().map(|b| LitByte::new(*b, Span::call_site()));
Ok(quote! {
#pubkey_type::new_from_array(
[#(#bytes,)*]
)
})
}
/*
struct Pubkeys {
method: Ident,
num: usize,
pubkeys: proc_macro2::TokenStream,
}
impl Parse for Pubkeys {
fn parse(input: ParseStream) -> Result<Self> {
let pubkey_type = quote! {
::solana_sdk::pubkey::Pubkey
};
let method = input.parse()?;
let _comma: Token![,] = input.parse()?;
let (num, pubkeys) = if input.peek(syn::LitStr) {
let id_literal: LitStr = input.parse()?;
(1, parse_pubkey(&id_literal, &pubkey_type)?)
} else if input.peek(Bracket) {
let pubkey_strings;
bracketed!(pubkey_strings in input);
let punctuated: Punctuated<LitStr, Token![,]> =
Punctuated::parse_terminated(&pubkey_strings)?;
let mut pubkeys: Punctuated<proc_macro2::TokenStream, Token![,]> = Punctuated::new();
for string in punctuated.iter() {
pubkeys.push(parse_pubkey(string, &pubkey_type)?);
}
(pubkeys.len(), quote! {#pubkeys})
} else {
let stream: proc_macro2::TokenStream = input.parse()?;
return Err(syn::Error::new_spanned(stream, "unexpected token"));
};
Ok(Pubkeys {
method,
num,
pubkeys,
})
}
}
impl ToTokens for Pubkeys {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let Pubkeys {
method,
num,
pubkeys,
} = self;
let pubkey_type = quote! {
::solana_sdk::pubkey::Pubkey
};
if *num == 1 {
tokens.extend(quote! {
pub fn #method() -> #pubkey_type {
#pubkeys
}
});
} else {
tokens.extend(quote! {
pub fn #method() -> ::std::vec::Vec<#pubkey_type> {
vec![#pubkeys]
}
});
}
}
}
#[proc_macro]
pub fn pubkeys(input: TokenStream) -> TokenStream {
let pubkeys = parse_macro_input!(input as Pubkeys);
TokenStream::from(quote! {#pubkeys})
}
// The normal `wasm_bindgen` macro generates a .bss section which causes the resulting
// BPF program to fail to load, so for now this stub should be used when building for BPF
#[proc_macro_attribute]
pub fn wasm_bindgen_stub(_attr: TokenStream, item: TokenStream) -> TokenStream {
match parse_macro_input!(item as syn::Item) {
syn::Item::Struct(mut item_struct) => {
if let syn::Fields::Named(fields) = &mut item_struct.fields {
// Strip out any `#[wasm_bindgen]` added to struct fields. This is custom
// syntax supplied by the normal `wasm_bindgen` macro.
for field in fields.named.iter_mut() {
field.attrs.retain(|attr| {
!attr
.path
.segments
.iter()
.any(|segment| segment.ident == "wasm_bindgen")
});
}
}
quote! { #item_struct }
}
item => {
quote!(#item)
}
}
.into()
}
*/

@ -0,0 +1 @@
pub use solana_sdk_macro::declare_id;

@ -0,0 +1,12 @@
//! The [secp256k1 native program][np].
//!
//! [np]: https://docs.solana.com/developing/runtime-facilities/programs#secp256k1-program
//!
//! Constructors for secp256k1 program instructions, and documentation on the
//! program's usage can be found in [`solana_sdk::secp256k1_instruction`].
//!
//! [`solana_sdk::secp256k1_instruction`]: https://docs.rs/solana-sdk/latest/solana_sdk/secp256k1_instruction/index.html
use crate::solana::pubkey::Pubkey;
use solana_sdk_macro::declare_id;
declare_id!("KeccakSecp256k11111111111111111111111111111");

@ -0,0 +1 @@
pub use solana_sdk_macro::declare_id;

@ -0,0 +1,423 @@
//! Convenience macro to declare a static public key and functions to interact with it
//!
//! Input: a single literal base58 string representation of a program's id
extern crate proc_macro;
use proc_macro::{TokenStream};
use syn::{parse_macro_input, LitStr, Expr, LitByte};
use quote::{quote, ToTokens};
use syn::parse::{Parse, ParseStream, Result};
use proc_macro2::{Delimiter, Span, TokenTree};
use {
// proc_macro::TokenStream,
// proc_macro2::{Delimiter, Span, TokenTree},
// quote::{quote, ToTokens},
// std::convert::TryFrom,
// syn::{
// bracketed,
// parse::{Parse, ParseStream, Result},
// parse_macro_input,
// punctuated::Punctuated,
// token::Bracket,
// Expr, Ident, LitByte, LitStr, Path, Token,
// },
};
fn parse_id(
input: ParseStream,
pubkey_type: proc_macro2::TokenStream,
) -> Result<proc_macro2::TokenStream> {
let id = if input.peek(syn::LitStr) {
let id_literal: LitStr = input.parse()?;
parse_pubkey(&id_literal, &pubkey_type)?
} else {
let expr: Expr = input.parse()?;
quote! { #expr }
};
if !input.is_empty() {
let stream: proc_macro2::TokenStream = input.parse()?;
return Err(syn::Error::new_spanned(stream, "unexpected token"));
}
Ok(id)
}
fn id_to_tokens(
id: &proc_macro2::TokenStream,
pubkey_type: proc_macro2::TokenStream,
tokens: &mut proc_macro2::TokenStream,
) {
tokens.extend(quote! {
/// The static program ID
pub static ID: #pubkey_type = #id;
/// Confirms that a given pubkey is equivalent to the program ID
pub fn check_id(id: &#pubkey_type) -> bool {
id == &ID
}
/// Returns the program ID
pub fn id() -> #pubkey_type {
ID
}
#[cfg(test)]
#[test]
fn test_id() {
assert!(check_id(&id()));
}
});
}
/*
fn deprecated_id_to_tokens(
id: &proc_macro2::TokenStream,
pubkey_type: proc_macro2::TokenStream,
tokens: &mut proc_macro2::TokenStream,
) {
tokens.extend(quote! {
/// The static program ID
pub static ID: #pubkey_type = #id;
/// Confirms that a given pubkey is equivalent to the program ID
#[deprecated()]
pub fn check_id(id: &#pubkey_type) -> bool {
id == &ID
}
/// Returns the program ID
#[deprecated()]
pub fn id() -> #pubkey_type {
ID
}
#[cfg(test)]
#[test]
fn test_id() {
#[allow(deprecated)]
assert!(check_id(&id()));
}
});
}
struct SdkPubkey(proc_macro2::TokenStream);
impl Parse for SdkPubkey {
fn parse(input: ParseStream) -> Result<Self> {
parse_id(input, quote! { ::solana_sdk::pubkey::Pubkey }).map(Self)
}
}
impl ToTokens for SdkPubkey {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let id = &self.0;
tokens.extend(quote! {#id})
}
}
struct ProgramSdkPubkey(proc_macro2::TokenStream);
impl Parse for ProgramSdkPubkey {
fn parse(input: ParseStream) -> Result<Self> {
parse_id(input, quote! { ::solana_program::pubkey::Pubkey }).map(Self)
}
}
impl ToTokens for ProgramSdkPubkey {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let id = &self.0;
tokens.extend(quote! {#id})
}
}
*/
struct Id(proc_macro2::TokenStream);
impl Parse for Id {
fn parse(input: ParseStream) -> Result<Self> {
parse_id(input, quote! { ::solana_sdk::pubkey::Pubkey }).map(Self)
}
}
impl ToTokens for Id {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
id_to_tokens(&self.0, quote! { ::solana_sdk::pubkey::Pubkey }, tokens)
}
}
/*
struct IdDeprecated(proc_macro2::TokenStream);
impl Parse for IdDeprecated {
fn parse(input: ParseStream) -> Result<Self> {
parse_id(input, quote! { ::solana_sdk::pubkey::Pubkey }).map(Self)
}
}
impl ToTokens for IdDeprecated {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
deprecated_id_to_tokens(&self.0, quote! { ::solana_sdk::pubkey::Pubkey }, tokens)
}
}
struct ProgramSdkId(proc_macro2::TokenStream);
impl Parse for ProgramSdkId {
fn parse(input: ParseStream) -> Result<Self> {
parse_id(input, quote! { ::solana_program::pubkey::Pubkey }).map(Self)
}
}
impl ToTokens for ProgramSdkId {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
id_to_tokens(&self.0, quote! { ::solana_program::pubkey::Pubkey }, tokens)
}
}
struct ProgramSdkIdDeprecated(proc_macro2::TokenStream);
impl Parse for ProgramSdkIdDeprecated {
fn parse(input: ParseStream) -> Result<Self> {
parse_id(input, quote! { ::solana_program::pubkey::Pubkey }).map(Self)
}
}
impl ToTokens for ProgramSdkIdDeprecated {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
deprecated_id_to_tokens(&self.0, quote! { ::solana_program::pubkey::Pubkey }, tokens)
}
}
#[allow(dead_code)] // `respan` may be compiled out
struct RespanInput {
to_respan: Path,
respan_using: Span,
}
impl Parse for RespanInput {
fn parse(input: ParseStream) -> Result<Self> {
let to_respan: Path = input.parse()?;
let _comma: Token![,] = input.parse()?;
let respan_tree: TokenTree = input.parse()?;
match respan_tree {
TokenTree::Group(g) if g.delimiter() == Delimiter::None => {
let ident: Ident = syn::parse2(g.stream())?;
Ok(RespanInput {
to_respan,
respan_using: ident.span(),
})
}
TokenTree::Ident(i) => Ok(RespanInput {
to_respan,
respan_using: i.span(),
}),
val => Err(syn::Error::new_spanned(
val,
"expected None-delimited group",
)),
}
}
}
/// A proc-macro which respans the tokens in its first argument (a `Path`)
/// to be resolved at the tokens of its second argument.
/// For internal use only.
///
/// There must be exactly one comma in the input,
/// which is used to separate the two arguments.
/// The second argument should be exactly one token.
///
/// For example, `respan!($crate::foo, with_span)`
/// produces the tokens `$crate::foo`, but resolved
/// at the span of `with_span`.
///
/// The input to this function should be very short -
/// its only purpose is to override the span of a token
/// sequence containing `$crate`. For all other purposes,
/// a more general proc-macro should be used.
#[rustversion::since(1.46.0)] // `Span::resolved_at` is stable in 1.46.0 and above
#[proc_macro]
pub fn respan(input: TokenStream) -> TokenStream {
// Obtain the `Path` we are going to respan, and the ident
// whose span we will be using.
let RespanInput {
to_respan,
respan_using,
} = parse_macro_input!(input as RespanInput);
// Respan all of the tokens in the `Path`
let to_respan: proc_macro2::TokenStream = to_respan
.into_token_stream()
.into_iter()
.map(|mut t| {
// Combine the location of the token with the resolution behavior of `respan_using`
let new_span: Span = t.span().resolved_at(respan_using);
t.set_span(new_span);
t
})
.collect();
TokenStream::from(to_respan)
}
#[proc_macro]
pub fn pubkey(input: TokenStream) -> TokenStream {
let id = parse_macro_input!(input as SdkPubkey);
TokenStream::from(quote! {#id})
}
#[proc_macro]
pub fn program_pubkey(input: TokenStream) -> TokenStream {
let id = parse_macro_input!(input as ProgramSdkPubkey);
TokenStream::from(quote! {#id})
}
*/
#[proc_macro]
pub fn declare_id(input: TokenStream) -> TokenStream {
let id = parse_macro_input!(input as Id);
TokenStream::from(quote! {#id})
}
/*
#[proc_macro]
pub fn declare_deprecated_id(input: TokenStream) -> TokenStream {
let id = parse_macro_input!(input as IdDeprecated);
TokenStream::from(quote! {#id})
}
#[proc_macro]
pub fn program_declare_id(input: TokenStream) -> TokenStream {
let id = parse_macro_input!(input as ProgramSdkId);
TokenStream::from(quote! {#id})
}
#[proc_macro]
pub fn program_declare_deprecated_id(input: TokenStream) -> TokenStream {
let id = parse_macro_input!(input as ProgramSdkIdDeprecated);
TokenStream::from(quote! {#id})
}
*/
fn parse_pubkey(
id_literal: &LitStr,
pubkey_type: &proc_macro2::TokenStream,
) -> Result<proc_macro2::TokenStream> {
let id_vec = bs58::decode(id_literal.value())
.into_vec()
.map_err(|_| syn::Error::new_spanned(id_literal, "failed to decode base58 string"))?;
let id_array = <[u8; 32]>::try_from(<&[u8]>::clone(&&id_vec[..])).map_err(|_| {
syn::Error::new_spanned(
id_literal,
format!("pubkey array is not 32 bytes long: len={}", id_vec.len()),
)
})?;
let bytes = id_array.iter().map(|b| LitByte::new(*b, Span::call_site()));
Ok(quote! {
#pubkey_type::new_from_array(
[#(#bytes,)*]
)
})
}
/*
struct Pubkeys {
method: Ident,
num: usize,
pubkeys: proc_macro2::TokenStream,
}
impl Parse for Pubkeys {
fn parse(input: ParseStream) -> Result<Self> {
let pubkey_type = quote! {
::solana_sdk::pubkey::Pubkey
};
let method = input.parse()?;
let _comma: Token![,] = input.parse()?;
let (num, pubkeys) = if input.peek(syn::LitStr) {
let id_literal: LitStr = input.parse()?;
(1, parse_pubkey(&id_literal, &pubkey_type)?)
} else if input.peek(Bracket) {
let pubkey_strings;
bracketed!(pubkey_strings in input);
let punctuated: Punctuated<LitStr, Token![,]> =
Punctuated::parse_terminated(&pubkey_strings)?;
let mut pubkeys: Punctuated<proc_macro2::TokenStream, Token![,]> = Punctuated::new();
for string in punctuated.iter() {
pubkeys.push(parse_pubkey(string, &pubkey_type)?);
}
(pubkeys.len(), quote! {#pubkeys})
} else {
let stream: proc_macro2::TokenStream = input.parse()?;
return Err(syn::Error::new_spanned(stream, "unexpected token"));
};
Ok(Pubkeys {
method,
num,
pubkeys,
})
}
}
impl ToTokens for Pubkeys {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let Pubkeys {
method,
num,
pubkeys,
} = self;
let pubkey_type = quote! {
::solana_sdk::pubkey::Pubkey
};
if *num == 1 {
tokens.extend(quote! {
pub fn #method() -> #pubkey_type {
#pubkeys
}
});
} else {
tokens.extend(quote! {
pub fn #method() -> ::std::vec::Vec<#pubkey_type> {
vec![#pubkeys]
}
});
}
}
}
#[proc_macro]
pub fn pubkeys(input: TokenStream) -> TokenStream {
let pubkeys = parse_macro_input!(input as Pubkeys);
TokenStream::from(quote! {#pubkeys})
}
// The normal `wasm_bindgen` macro generates a .bss section which causes the resulting
// BPF program to fail to load, so for now this stub should be used when building for BPF
#[proc_macro_attribute]
pub fn wasm_bindgen_stub(_attr: TokenStream, item: TokenStream) -> TokenStream {
match parse_macro_input!(item as syn::Item) {
syn::Item::Struct(mut item_struct) => {
if let syn::Fields::Named(fields) = &mut item_struct.fields {
// Strip out any `#[wasm_bindgen]` added to struct fields. This is custom
// syntax supplied by the normal `wasm_bindgen` macro.
for field in fields.named.iter_mut() {
field.attrs.retain(|attr| {
!attr
.path
.segments
.iter()
.any(|segment| segment.ident == "wasm_bindgen")
});
}
}
quote! { #item_struct }
}
item => {
quote!(#item)
}
}
.into()
}
*/

@ -0,0 +1,61 @@
use url::Url;
use hyperlane_core::{
config::{ConfigErrResultExt, ConfigPath, ConfigResult, FromRawConf},
ChainCommunicationError,
};
/// Sealevel connection configuration
#[derive(Debug, Clone)]
pub struct ConnectionConf {
/// Fully qualified string to connect to
pub url: Url,
}
/// Raw Sealevel connection configuration used for better deserialization errors.
#[derive(Debug, serde::Deserialize)]
pub struct RawConnectionConf {
url: Option<String>,
}
/// An error type when parsing a connection configuration.
#[derive(thiserror::Error, Debug)]
pub enum ConnectionConfError {
/// Missing `url` for connection configuration
#[error("Missing `url` for connection configuration")]
MissingConnectionUrl,
/// Invalid `url` for connection configuration
#[error("Invalid `url` for connection configuration: `{0}` ({1})")]
InvalidConnectionUrl(String, url::ParseError),
}
impl FromRawConf<'_, RawConnectionConf> for ConnectionConf {
fn from_config_filtered(
raw: RawConnectionConf,
cwp: &ConfigPath,
_filter: (),
) -> ConfigResult<Self> {
use ConnectionConfError::*;
match raw {
RawConnectionConf { url: Some(url) } => Ok(Self {
url: url
.parse()
.map_err(|e| InvalidConnectionUrl(url, e))
.into_config_result(|| cwp.join("url"))?,
}),
RawConnectionConf { url: None } => {
Err(MissingConnectionUrl).into_config_result(|| cwp.join("url"))
}
}
}
}
#[derive(thiserror::Error, Debug)]
#[error(transparent)]
struct SealevelNewConnectionError(#[from] anyhow::Error);
impl From<SealevelNewConnectionError> for ChainCommunicationError {
fn from(err: SealevelNewConnectionError) -> Self {
ChainCommunicationError::from_other(err)
}
}

@ -0,0 +1,79 @@
use borsh::{BorshDeserialize, BorshSerialize};
use hyperlane_core::{ChainCommunicationError, ChainResult};
use serializable_account_meta::{SerializableAccountMeta, SimulationReturnData};
use solana_client::nonblocking::rpc_client::RpcClient;
use solana_sdk::{
commitment_config::CommitmentConfig,
instruction::{AccountMeta, Instruction},
message::Message,
signature::{Keypair, Signer},
transaction::Transaction,
};
use solana_transaction_status::UiReturnDataEncoding;
/// Simulates an instruction, and attempts to deserialize it into a T.
/// If no return data at all was returned, returns Ok(None).
/// If some return data was returned but deserialization was unsuccessful,
/// an Err is returned.
pub async fn simulate_instruction<T: BorshDeserialize + BorshSerialize>(
rpc_client: &RpcClient,
payer: &Keypair,
instruction: Instruction,
) -> ChainResult<Option<T>> {
let commitment = CommitmentConfig::finalized();
let (recent_blockhash, _) = rpc_client
.get_latest_blockhash_with_commitment(commitment)
.await
.map_err(ChainCommunicationError::from_other)?;
let return_data = rpc_client
.simulate_transaction(&Transaction::new_unsigned(Message::new_with_blockhash(
&[instruction],
Some(&payer.pubkey()),
&recent_blockhash,
)))
.await
.map_err(ChainCommunicationError::from_other)?
.value
.return_data;
if let Some(return_data) = return_data {
let bytes = match return_data.data.1 {
UiReturnDataEncoding::Base64 => {
base64::decode(return_data.data.0).map_err(ChainCommunicationError::from_other)?
}
};
let decoded_data =
T::try_from_slice(bytes.as_slice()).map_err(ChainCommunicationError::from_other)?;
return Ok(Some(decoded_data));
}
Ok(None)
}
/// Simulates an Instruction that will return a list of AccountMetas.
pub async fn get_account_metas(
rpc_client: &RpcClient,
payer: &Keypair,
instruction: Instruction,
) -> ChainResult<Vec<AccountMeta>> {
// If there's no data at all, default to an empty vec.
let account_metas = simulate_instruction::<SimulationReturnData<Vec<SerializableAccountMeta>>>(
rpc_client,
payer,
instruction,
)
.await?
.map(|serializable_account_metas| {
serializable_account_metas
.return_data
.into_iter()
.map(|serializable_account_meta| serializable_account_meta.into())
.collect()
})
.unwrap_or_else(Vec::new);
Ok(account_metas)
}

@ -0,0 +1,129 @@
use async_trait::async_trait;
use tracing::{info, instrument, warn};
use hyperlane_core::{
Announcement, ChainCommunicationError, ChainResult, ContractLocator, HyperlaneChain,
HyperlaneContract, HyperlaneDomain, SignedType, TxOutcome, ValidatorAnnounce, H160, H256, U256,
};
use solana_sdk::{commitment_config::CommitmentConfig, pubkey::Pubkey};
use crate::{ConnectionConf, RpcClientWithDebug};
use hyperlane_sealevel_validator_announce::{
accounts::ValidatorStorageLocationsAccount, validator_storage_locations_pda_seeds,
};
/// A reference to a ValidatorAnnounce contract on some Sealevel chain
#[derive(Debug)]
pub struct SealevelValidatorAnnounce {
program_id: Pubkey,
rpc_client: RpcClientWithDebug,
domain: HyperlaneDomain,
}
impl SealevelValidatorAnnounce {
/// Create a new Sealevel ValidatorAnnounce
pub fn new(conf: &ConnectionConf, locator: ContractLocator) -> Self {
let rpc_client = RpcClientWithDebug::new(conf.url.to_string());
let program_id = Pubkey::from(<[u8; 32]>::from(locator.address));
Self {
program_id,
rpc_client,
domain: locator.domain.clone(),
}
}
}
impl HyperlaneContract for SealevelValidatorAnnounce {
fn address(&self) -> H256 {
self.program_id.to_bytes().into()
}
}
impl HyperlaneChain for SealevelValidatorAnnounce {
fn domain(&self) -> &HyperlaneDomain {
&self.domain
}
fn provider(&self) -> Box<dyn hyperlane_core::HyperlaneProvider> {
Box::new(crate::SealevelProvider::new(self.domain.clone()))
}
}
#[async_trait]
impl ValidatorAnnounce for SealevelValidatorAnnounce {
async fn get_announced_storage_locations(
&self,
validators: &[H256],
) -> ChainResult<Vec<Vec<String>>> {
info!(program_id=?self.program_id, validators=?validators, "Getting validator storage locations");
// Get the validator storage location PDAs for each validator.
let account_pubkeys: Vec<Pubkey> = validators
.iter()
.map(|v| {
let (key, _bump) = Pubkey::find_program_address(
// The seed is based off the H160 representation of the validator address.
validator_storage_locations_pda_seeds!(H160::from_slice(&v.as_bytes()[12..])),
&self.program_id,
);
key
})
.collect();
// Get all validator storage location accounts.
// If an account doesn't exist, it will be returned as None.
let accounts = self
.rpc_client
.get_multiple_accounts_with_commitment(&account_pubkeys, CommitmentConfig::finalized())
.await
.map_err(ChainCommunicationError::from_other)?
.value;
// Parse the storage locations from each account.
// If a validator's account doesn't exist, its storage locations will
// be returned as an empty list.
let storage_locations: Vec<Vec<String>> = accounts
.into_iter()
.map(|account| {
account
.map(|account| {
match ValidatorStorageLocationsAccount::fetch(&mut &account.data[..]) {
Ok(v) => v.into_inner().storage_locations,
Err(err) => {
// If there's an error parsing the account, gracefully return an empty list
info!(?account, ?err, "Unable to parse validator announce account");
vec![]
}
}
})
.unwrap_or_default()
})
.collect();
Ok(storage_locations)
}
async fn announce_tokens_needed(
&self,
_announcement: SignedType<Announcement>,
) -> Option<U256> {
Some(U256::zero())
}
#[instrument(err, ret, skip(self))]
async fn announce(
&self,
_announcement: SignedType<Announcement>,
_tx_gas_limit: Option<U256>,
) -> ChainResult<TxOutcome> {
warn!(
"Announcing validator storage locations within the agents is not supported on Sealevel"
);
Ok(TxOutcome {
txid: H256::zero(),
executed: false,
gas_used: U256::zero(),
gas_price: U256::zero(),
})
}
}

@ -0,0 +1,15 @@
export BASE_CONFIG="sealevel.json"
export RUN_ENV="sealevel"
export HYP_BASE_DB="/tmp/SEALEVEL_DB/relayer"
export HYP_RELAYER_RELAYCHAINS="sealeveltest1,sealeveltest2"
export HYP_BASE_METRICS=9091
export HYP_BASE_ALLOWLOCALCHECKPOINTSYNCERS=true
# The first 32 bytes of test-keys/test_deployer-keypair.json as hexadecimal,
# which is the secret key.
export HYP_BASE_CHAINS_SEALEVELTEST1_SIGNER_KEY=892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f
export HYP_BASE_CHAINS_SEALEVELTEST1_SIGNER_TYPE="hexKey"
export HYP_BASE_CHAINS_SEALEVELTEST2_SIGNER_KEY=892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f
export HYP_BASE_CHAINS_SEALEVELTEST2_SIGNER_TYPE="hexKey"
export HYP_BASE_TRACING_LEVEL="debug"

@ -0,0 +1,49 @@
{
"environment": "sealevel",
"chains": {
"sealeveltest1": {
"name": "SealevelTest1",
"domain": "13375",
"addresses": {
"mailbox": "692KZJaoe2KRcD6uhCQDLLXnLNA5ZLnfvdqjE4aX9iu1",
"interchainGasPaymaster": "FixmeFixmeFixmeFixmeFixmeFixmeFixmeFixmeFixm",
"validatorAnnounce": "DH43ae1LwemXAboWwSh8zc9pG8j72gKUEXNi57w8fEnn"
},
"signer": null,
"protocol": "sealevel",
"finalityBlocks": "0",
"connection": {
"type": "http",
"url": "http://localhost:8899"
},
"index": {
"from": "1",
"mode": "sequence"
}
},
"sealeveltest2": {
"name": "SealevelTest2",
"domain": "13376",
"addresses": {
"mailbox": "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj",
"interchainGasPaymaster": "FixmeFixmeFixmeFixmeFixmeFixmeFixmeFixmeFixm",
"validatorAnnounce": "3Uo5j2Bti9aZtrDqJmAyuwiFaJFPFoNL5yxTpVCNcUhb"
},
"signer": null,
"protocol": "sealevel",
"finalityBlocks": "0",
"connection": {
"type": "http",
"url": "http://localhost:8899"
},
"index": {
"from": "1",
"mode": "sequence"
}
}
},
"tracing": {
"level": "info",
"fmt": "pretty"
}
}

@ -0,0 +1,13 @@
{
"pubkey": "E9VrvAdGRvCguN2XgXsgu9PNmMM3vZsU8LSUrM68j8ty",
"account": {
"lamports": 500000000000000000,
"data": [
"",
"base64"
],
"owner": "11111111111111111111111111111111",
"executable": false,
"rentEpoch": 0
}
}

@ -0,0 +1 @@
[137,43,246,148,154,244,35,62,98,248,84,203,54,24,188,26,62,227,52,29,199,26,218,8,196,213,222,202,35,154,207,79,195,85,53,151,7,182,83,94,59,5,131,252,40,75,87,11,243,118,71,59,195,222,212,148,179,233,253,121,97,210,114,98]

@ -0,0 +1,10 @@
export BASE_CONFIG="sealevel.json"
export RUN_ENV="sealevel"
export HYP_BASE_DB="/tmp/SEALEVEL_DB/validator"
export HYP_VALIDATOR_ORIGINCHAINNAME="sealeveltest1"
export HYP_VALIDATOR_VALIDATOR_KEY="59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
export HYP_VALIDATOR_VALIDATOR_TYPE="hexKey"
export HYP_VALIDATOR_REORGPERIOD="0"
export HYP_VALIDATOR_INTERVAL="1"
export HYP_VALIDATOR_CHECKPOINTSYNCER_TYPE="localStorage"
export HYP_VALIDATOR_CHECKPOINTSYNCER_PATH="/tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8"

@ -125,6 +125,44 @@
"index": { "index": {
"from": 1941997 "from": 1941997
} }
},
"solanadevnet": {
"name": "solanadevnet",
"domain": 1399811151,
"addresses": {
"mailbox": "4v25Dz9RccqUrTzmfHzJMsjd1iVoNrWzeJ4o6GYuJrVn",
"interchainGasPaymaster": "FixmeFixmeFixmeFixmeFixmeFixmeFixmeFixmeFixm",
"validatorAnnounce": "CMHKvdq4CopDf7qXnDCaTybS15QekQeRt4oUB219yxsp"
},
"protocol": "sealevel",
"finalityBlocks": 0,
"connection": {
"type": "http",
"url": "https://api.devnet.solana.com"
},
"index": {
"from": 1,
"mode": "sequence"
}
},
"zbctestnet": {
"name": "zbctestnet",
"domain": 2053254516,
"addresses": {
"mailbox": "4hW22NXtJ2AXrEVbeAmxjhvxWPSNvfTfAphKXdRBZUco",
"interchainGasPaymaster": "FixmeFixmeFixmeFixmeFixmeFixmeFixmeFixmeFixm",
"validatorAnnounce": "Ar1WiYNhN6F33pj4pcVo5jRMV3V8iJqKiMRSbaDEeqkq"
},
"protocol": "sealevel",
"finalityBlocks": 0,
"connection": {
"type": "http",
"url": "https://api.zebec.eclipsenetwork.xyz:8899"
},
"index": {
"from": 1,
"mode": "sequence"
}
} }
} }
} }

@ -1,3 +1,5 @@
cargo-features = ["workspace-inheritance"]
[package] [package]
name = "ethers-prometheus" name = "ethers-prometheus"
documentation.workspace = true documentation.workspace = true
@ -8,20 +10,19 @@ publish.workspace = true
version.workspace = true version.workspace = true
[dependencies] [dependencies]
prometheus = "0.13" async-trait.workspace = true
ethers = { git = "https://github.com/hyperlane-xyz/ethers-rs", tag = "2023-06-01" } derive-new.workspace = true
derive_builder = "0.12" derive_builder.workspace = true
derive-new = "0.5" ethers.workspace = true
async-trait = { version = "0.1", default-features = false } futures.workspace = true
futures = "0.3" log.workspace = true
parking_lot = { version = "0.12" } maplit.workspace = true
maplit = "1.0" parking_lot.workspace = true
log = "0.4" prometheus.workspace = true
tokio = { workspace = true, features = ["time", "sync", "parking_lot"] } serde = { workspace = true, features = ["derive"], optional = true }
serde_json = { workspace = true }
static_assertions.workspace = true static_assertions.workspace = true
tokio = { workspace = true, features = ["time", "sync", "parking_lot"] }
serde = { version = "1.0", features = ["derive"], optional = true }
serde_json = { version = "1.0", default-features = false }
# enable feature for this crate that is imported by ethers-rs # enable feature for this crate that is imported by ethers-rs
primitive-types = { version = "*", features = ["fp-conversion"] } primitive-types = { version = "*", features = ["fp-conversion"] }

@ -1,3 +1,5 @@
cargo-features = ["workspace-inheritance"]
[package] [package]
name = "hyperlane-base" name = "hyperlane-base"
documentation.workspace = true documentation.workspace = true
@ -10,15 +12,18 @@ version.workspace = true
[dependencies] [dependencies]
# Main block # Main block
async-trait.workspace = true async-trait.workspace = true
bs58.workspace = true
color-eyre = { workspace = true, optional = true } color-eyre = { workspace = true, optional = true }
config.workspace = true config.workspace = true
derive-new.workspace = true derive-new.workspace = true
ed25519-dalek.workspace = true
ethers.workspace = true ethers.workspace = true
eyre.workspace = true eyre.workspace = true
fuels.workspace = true fuels.workspace = true
futures-util.workspace = true futures-util.workspace = true
itertools.workspace = true itertools.workspace = true
mockall = "0.11" mockall = "0.11"
once_cell = "1.16"
paste.workspace = true paste.workspace = true
prometheus.workspace = true prometheus.workspace = true
rocksdb.workspace = true rocksdb.workspace = true
@ -28,7 +33,7 @@ static_assertions.workspace = true
tempfile = { version = "3.3", optional = true } tempfile = { version = "3.3", optional = true }
thiserror.workspace = true thiserror.workspace = true
tokio = { workspace = true, features = ["rt", "macros", "parking_lot"] } tokio = { workspace = true, features = ["rt", "macros", "parking_lot"] }
tracing-error = "0.2" tracing-error.workspace = true
tracing-futures.workspace = true tracing-futures.workspace = true
tracing-subscriber = { workspace = true, features = ["json", "ansi"] } tracing-subscriber = { workspace = true, features = ["json", "ansi"] }
tracing.workspace = true tracing.workspace = true
@ -38,9 +43,10 @@ backtrace = { version = "0.3", optional = true }
backtrace-oneline = { path = "../utils/backtrace-oneline", optional = true } backtrace-oneline = { path = "../utils/backtrace-oneline", optional = true }
ethers-prometheus = { path = "../ethers-prometheus", features = ["serde"] } ethers-prometheus = { path = "../ethers-prometheus", features = ["serde"] }
hyperlane-core = { path = "../hyperlane-core" } hyperlane-core = { path = "../hyperlane-core", features = ["agent"] }
hyperlane-ethereum = { path = "../chains/hyperlane-ethereum" } hyperlane-ethereum = { path = "../chains/hyperlane-ethereum" }
hyperlane-fuel = { path = "../chains/hyperlane-fuel" } hyperlane-fuel = { path = "../chains/hyperlane-fuel" }
hyperlane-sealevel = { path = "../chains/hyperlane-sealevel" }
hyperlane-test = { path = "../hyperlane-test" } hyperlane-test = { path = "../hyperlane-test" }
# these versions are important! # these versions are important!
@ -60,12 +66,10 @@ rusoto_kms = "*"
rusoto_s3 = "*" rusoto_s3 = "*"
rusoto_sts = "*" rusoto_sts = "*"
lazy_static = "1.4"
once_cell = "1.16"
[dev-dependencies] [dev-dependencies]
color-eyre = "0.6" color-eyre.workspace = true
tempfile = "3.3" tempfile = "3.3"
walkdir = { version = "2" }
[features] [features]
default = ["oneline-errors", "color-eyre"] default = ["oneline-errors", "color-eyre"]

@ -12,8 +12,9 @@ use tokio::time::sleep;
use tracing::{debug, warn}; use tracing::{debug, warn};
use hyperlane_core::{ use hyperlane_core::{
ChainResult, ContractSyncCursor, CursorAction, HyperlaneMessage, HyperlaneMessageStore, BlockRange, ChainResult, ContractSyncCursor, CursorAction, HyperlaneMessage,
HyperlaneWatermarkedLogStore, Indexer, LogMeta, MessageIndexer, HyperlaneMessageStore, HyperlaneWatermarkedLogStore, IndexMode, IndexRange, Indexer, LogMeta,
MessageIndexer, SequenceRange,
}; };
use crate::contract_sync::eta_calculator::SyncerEtaCalculator; use crate::contract_sync::eta_calculator::SyncerEtaCalculator;
@ -21,6 +22,8 @@ use crate::contract_sync::eta_calculator::SyncerEtaCalculator;
/// Time window for the moving average used in the eta calculator in seconds. /// Time window for the moving average used in the eta calculator in seconds.
const ETA_TIME_WINDOW: f64 = 2. * 60.; const ETA_TIME_WINDOW: f64 = 2. * 60.;
const MAX_SEQUENCE_RANGE: u32 = 100;
/// A struct that holds the data needed for forwards and backwards /// A struct that holds the data needed for forwards and backwards
/// message sync cursors. /// message sync cursors.
#[derive(Debug, new)] #[derive(Debug, new)]
@ -57,7 +60,7 @@ impl MessageSyncCursor {
&mut self, &mut self,
logs: Vec<(HyperlaneMessage, LogMeta)>, logs: Vec<(HyperlaneMessage, LogMeta)>,
prev_nonce: u32, prev_nonce: u32,
) -> eyre::Result<()> { ) -> Result<()> {
// If we found messages, but did *not* find the message we were looking for, // If we found messages, but did *not* find the message we were looking for,
// we need to rewind to the block at which we found the last message. // we need to rewind to the block at which we found the last message.
if !logs.is_empty() && !logs.iter().any(|m| m.0.nonce == self.next_nonce) { if !logs.is_empty() && !logs.iter().any(|m| m.0.nonce == self.next_nonce) {
@ -79,51 +82,62 @@ impl MessageSyncCursor {
/// A MessageSyncCursor that syncs forwards in perpetuity. /// A MessageSyncCursor that syncs forwards in perpetuity.
#[derive(new)] #[derive(new)]
pub(crate) struct ForwardMessageSyncCursor(MessageSyncCursor); pub(crate) struct ForwardMessageSyncCursor {
cursor: MessageSyncCursor,
mode: IndexMode,
}
impl ForwardMessageSyncCursor { impl ForwardMessageSyncCursor {
async fn get_next_range(&mut self) -> ChainResult<Option<(u32, u32)>> { async fn get_next_range(&mut self) -> ChainResult<Option<IndexRange>> {
// Check if any new messages have been inserted into the DB, // Check if any new messages have been inserted into the DB,
// and update the cursor accordingly. // and update the cursor accordingly.
while self while self
.0 .cursor
.retrieve_message_by_nonce(self.0.next_nonce) .retrieve_message_by_nonce(self.cursor.next_nonce)
.await .await
.is_some() .is_some()
{ {
if let Some(block_number) = self if let Some(block_number) = self
.0 .cursor
.retrieve_dispatched_block_number(self.0.next_nonce) .retrieve_dispatched_block_number(self.cursor.next_nonce)
.await .await
{ {
debug!(next_block = block_number, "Fast forwarding next block"); debug!(next_block = block_number, "Fast forwarding next block");
// It's possible that eth_getLogs dropped logs from this block, therefore we cannot do block_number + 1. // It's possible that eth_getLogs dropped logs from this block, therefore we cannot do block_number + 1.
self.0.next_block = block_number; self.cursor.next_block = block_number;
} }
debug!( debug!(
next_nonce = self.0.next_nonce + 1, next_nonce = self.cursor.next_nonce + 1,
"Fast forwarding next nonce" "Fast forwarding next nonce"
); );
self.0.next_nonce += 1; self.cursor.next_nonce += 1;
} }
let (mailbox_count, tip) = self.0.indexer.fetch_count_at_tip().await?; let (mailbox_count, tip) = self.cursor.indexer.fetch_count_at_tip().await?;
let cursor_count = self.0.next_nonce; let cursor_count = self.cursor.next_nonce;
let cmp = cursor_count.cmp(&mailbox_count); let cmp = cursor_count.cmp(&mailbox_count);
match cmp { match cmp {
Ordering::Equal => { Ordering::Equal => {
// We are synced up to the latest nonce so we don't need to index anything. // We are synced up to the latest nonce so we don't need to index anything.
// We update our next block number accordingly. // We update our next block number accordingly.
self.0.next_block = tip; self.cursor.next_block = tip;
Ok(None) Ok(None)
} }
Ordering::Less => { Ordering::Less => {
// The cursor is behind the mailbox, so we need to index some blocks. // The cursor is behind the mailbox, so we need to index some blocks.
// We attempt to index a range of blocks that is as large as possible. // We attempt to index a range of blocks that is as large as possible.
let from = self.0.next_block; let from = self.cursor.next_block;
let to = u32::min(tip, from + self.0.chunk_size); let to = u32::min(tip, from + self.cursor.chunk_size);
self.0.next_block = to + 1; self.cursor.next_block = to + 1;
Ok(Some((from, to)))
let range = match self.mode {
IndexMode::Block => BlockRange(from..=to),
IndexMode::Sequence => SequenceRange(
cursor_count..=u32::min(mailbox_count, cursor_count + MAX_SEQUENCE_RANGE),
),
};
Ok(Some(range))
} }
Ordering::Greater => { Ordering::Greater => {
// Providers may be internally inconsistent, e.g. RPC request A could hit a node // Providers may be internally inconsistent, e.g. RPC request A could hit a node
@ -149,21 +163,21 @@ impl ContractSyncCursor<HyperlaneMessage> for ForwardMessageSyncCursor {
} }
fn latest_block(&self) -> u32 { fn latest_block(&self) -> u32 {
self.0.next_block.saturating_sub(1) self.cursor.next_block.saturating_sub(1)
} }
/// If the previous block has been synced, rewind to the block number /// If the previous block has been synced, rewind to the block number
/// at which it was dispatched. /// at which it was dispatched.
/// Otherwise, rewind all the way back to the start block. /// Otherwise, rewind all the way back to the start block.
async fn update(&mut self, logs: Vec<(HyperlaneMessage, LogMeta)>) -> eyre::Result<()> { async fn update(&mut self, logs: Vec<(HyperlaneMessage, LogMeta)>) -> Result<()> {
let prev_nonce = self.0.next_nonce.saturating_sub(1); let prev_nonce = self.cursor.next_nonce.saturating_sub(1);
// We may wind up having re-indexed messages that are previous to the nonce that we are looking for. // We may wind up having re-indexed messages that are previous to the nonce that we are looking for.
// We should not consider these messages when checking for continuity errors. // We should not consider these messages when checking for continuity errors.
let filtered_logs = logs let filtered_logs = logs
.into_iter() .into_iter()
.filter(|m| m.0.nonce >= self.0.next_nonce) .filter(|m| m.0.nonce >= self.cursor.next_nonce)
.collect(); .collect();
self.0.update(filtered_logs, prev_nonce).await self.cursor.update(filtered_logs, prev_nonce).await
} }
} }
@ -172,10 +186,11 @@ impl ContractSyncCursor<HyperlaneMessage> for ForwardMessageSyncCursor {
pub(crate) struct BackwardMessageSyncCursor { pub(crate) struct BackwardMessageSyncCursor {
cursor: MessageSyncCursor, cursor: MessageSyncCursor,
synced: bool, synced: bool,
mode: IndexMode,
} }
impl BackwardMessageSyncCursor { impl BackwardMessageSyncCursor {
async fn get_next_range(&mut self) -> Option<(u32, u32)> { async fn get_next_range(&mut self) -> Option<IndexRange> {
// Check if any new messages have been inserted into the DB, // Check if any new messages have been inserted into the DB,
// and update the cursor accordingly. // and update the cursor accordingly.
while !self.synced { while !self.synced {
@ -212,14 +227,23 @@ impl BackwardMessageSyncCursor {
let to = self.cursor.next_block; let to = self.cursor.next_block;
let from = to.saturating_sub(self.cursor.chunk_size); let from = to.saturating_sub(self.cursor.chunk_size);
self.cursor.next_block = from.saturating_sub(1); self.cursor.next_block = from.saturating_sub(1);
// TODO: Consider returning a proper ETA for the backwards pass
Some((from, to)) let next_nonce = self.cursor.next_nonce;
let range = match self.mode {
IndexMode::Block => BlockRange(from..=to),
IndexMode::Sequence => {
SequenceRange(next_nonce.saturating_sub(MAX_SEQUENCE_RANGE)..=next_nonce)
}
};
Some(range)
} }
/// If the previous block has been synced, rewind to the block number /// If the previous block has been synced, rewind to the block number
/// at which it was dispatched. /// at which it was dispatched.
/// Otherwise, rewind all the way back to the start block. /// Otherwise, rewind all the way back to the start block.
async fn update(&mut self, logs: Vec<(HyperlaneMessage, LogMeta)>) -> eyre::Result<()> { async fn update(&mut self, logs: Vec<(HyperlaneMessage, LogMeta)>) -> Result<()> {
let prev_nonce = self.cursor.next_nonce.saturating_add(1); let prev_nonce = self.cursor.next_nonce.saturating_add(1);
// We may wind up having re-indexed messages that are previous to the nonce that we are looking for. // We may wind up having re-indexed messages that are previous to the nonce that we are looking for.
// We should not consider these messages when checking for continuity errors. // We should not consider these messages when checking for continuity errors.
@ -249,16 +273,13 @@ impl ForwardBackwardMessageSyncCursor {
indexer: Arc<dyn MessageIndexer>, indexer: Arc<dyn MessageIndexer>,
db: Arc<dyn HyperlaneMessageStore>, db: Arc<dyn HyperlaneMessageStore>,
chunk_size: u32, chunk_size: u32,
mode: IndexMode,
) -> Result<Self> { ) -> Result<Self> {
let (count, tip) = indexer.fetch_count_at_tip().await?; let (count, tip) = indexer.fetch_count_at_tip().await?;
let forward_cursor = ForwardMessageSyncCursor::new(MessageSyncCursor::new( let forward_cursor = ForwardMessageSyncCursor::new(
indexer.clone(), MessageSyncCursor::new(indexer.clone(), db.clone(), chunk_size, tip, tip, count),
db.clone(), mode,
chunk_size, );
tip,
tip,
count,
));
let backward_cursor = BackwardMessageSyncCursor::new( let backward_cursor = BackwardMessageSyncCursor::new(
MessageSyncCursor::new( MessageSyncCursor::new(
@ -270,6 +291,7 @@ impl ForwardBackwardMessageSyncCursor {
count.saturating_sub(1), count.saturating_sub(1),
), ),
count == 0, count == 0,
mode,
); );
Ok(Self { Ok(Self {
forward: forward_cursor, forward: forward_cursor,
@ -299,10 +321,10 @@ impl ContractSyncCursor<HyperlaneMessage> for ForwardBackwardMessageSyncCursor {
} }
fn latest_block(&self) -> u32 { fn latest_block(&self) -> u32 {
self.forward.0.next_block.saturating_sub(1) self.forward.cursor.next_block.saturating_sub(1)
} }
async fn update(&mut self, logs: Vec<(HyperlaneMessage, LogMeta)>) -> eyre::Result<()> { async fn update(&mut self, logs: Vec<(HyperlaneMessage, LogMeta)>) -> Result<()> {
match self.direction { match self.direction {
SyncDirection::Forward => self.forward.update(logs).await, SyncDirection::Forward => self.forward.update(logs).await,
SyncDirection::Backward => self.backward.update(logs).await, SyncDirection::Backward => self.backward.update(logs).await,
@ -392,19 +414,24 @@ where
}; };
let rate_limit = self.get_rate_limit().await?; let rate_limit = self.get_rate_limit().await?;
if let Some(rate_limit) = rate_limit { let action = if let Some(rate_limit) = rate_limit {
return Ok((CursorAction::Sleep(rate_limit), eta)); CursorAction::Sleep(rate_limit)
} else { } else {
self.from = to + 1; self.from = to + 1;
return Ok((CursorAction::Query((from, to)), eta)); // TODO: note at the moment IndexModes are not considered here, and
} // block-based indexing is always used.
// This should be changed when Sealevel IGP indexing is implemented,
// along with a refactor to better accommodate indexing modes.
CursorAction::Query(BlockRange(from..=to))
};
Ok((action, eta))
} }
fn latest_block(&self) -> u32 { fn latest_block(&self) -> u32 {
self.from.saturating_sub(1) self.from.saturating_sub(1)
} }
async fn update(&mut self, _: Vec<(T, LogMeta)>) -> eyre::Result<()> { async fn update(&mut self, _: Vec<(T, LogMeta)>) -> Result<()> {
// Store a relatively conservative view of the high watermark, which should allow a single watermark to be // Store a relatively conservative view of the high watermark, which should allow a single watermark to be
// safely shared across multiple cursors, so long as they are running sufficiently in sync // safely shared across multiple cursors, so long as they are running sufficiently in sync
self.db self.db

@ -62,17 +62,16 @@ where
indexed_height.set(cursor.latest_block() as i64); indexed_height.set(cursor.latest_block() as i64);
let Ok((action, eta)) = cursor.next_action().await else { continue }; let Ok((action, eta)) = cursor.next_action().await else { continue };
match action { match action {
CursorAction::Query((from, to)) => { CursorAction::Query(range) => {
debug!(from, to, "Looking for for events in block range"); debug!(?range, "Looking for for events in index range");
let logs = self.indexer.fetch_logs(from, to).await?; let logs = self.indexer.fetch_logs(range.clone()).await?;
info!( info!(
from, ?range,
to,
num_logs = logs.len(), num_logs = logs.len(),
estimated_time_to_sync = fmt_sync_time(eta), estimated_time_to_sync = fmt_sync_time(eta),
"Found log(s) in block range" "Found log(s) in index range"
); );
// Store deliveries // Store deliveries
let stored = self.db.store_logs(&logs).await?; let stored = self.db.store_logs(&logs).await?;
@ -105,6 +104,7 @@ where
let index_settings = IndexSettings { let index_settings = IndexSettings {
from: watermark.unwrap_or(index_settings.from), from: watermark.unwrap_or(index_settings.from),
chunk_size: index_settings.chunk_size, chunk_size: index_settings.chunk_size,
mode: index_settings.mode,
}; };
Box::new( Box::new(
RateLimitedContractSyncCursor::new( RateLimitedContractSyncCursor::new(
@ -137,19 +137,23 @@ impl MessageContractSync {
index_settings.from, index_settings.from,
next_nonce, next_nonce,
); );
Box::new(ForwardMessageSyncCursor::new(forward_data)) Box::new(ForwardMessageSyncCursor::new(
forward_data,
index_settings.mode,
))
} }
/// Returns a new cursor to be used for syncing dispatched messages from the indexer /// Returns a new cursor to be used for syncing dispatched messages from the indexer
pub async fn forward_backward_message_sync_cursor( pub async fn forward_backward_message_sync_cursor(
&self, &self,
chunk_size: u32, index_settings: IndexSettings,
) -> Box<dyn ContractSyncCursor<HyperlaneMessage>> { ) -> Box<dyn ContractSyncCursor<HyperlaneMessage>> {
Box::new( Box::new(
ForwardBackwardMessageSyncCursor::new( ForwardBackwardMessageSyncCursor::new(
self.indexer.clone(), self.indexer.clone(),
self.db.clone(), self.db.clone(),
chunk_size, index_settings.chunk_size,
index_settings.mode,
) )
.await .await
.unwrap(), .unwrap(),

@ -5,7 +5,7 @@ use async_trait::async_trait;
use eyre::Result; use eyre::Result;
use paste::paste; use paste::paste;
use tokio::time::sleep; use tokio::time::sleep;
use tracing::{debug, trace}; use tracing::{debug, instrument, trace};
use hyperlane_core::{ use hyperlane_core::{
HyperlaneDomain, HyperlaneLogStore, HyperlaneMessage, HyperlaneMessageStore, HyperlaneDomain, HyperlaneLogStore, HyperlaneMessage, HyperlaneMessageStore,
@ -215,6 +215,7 @@ impl HyperlaneRocksDB {
#[async_trait] #[async_trait]
impl HyperlaneLogStore<HyperlaneMessage> for HyperlaneRocksDB { impl HyperlaneLogStore<HyperlaneMessage> for HyperlaneRocksDB {
/// Store a list of dispatched messages and their associated metadata. /// Store a list of dispatched messages and their associated metadata.
#[instrument(skip_all)]
async fn store_logs(&self, messages: &[(HyperlaneMessage, LogMeta)]) -> Result<u32> { async fn store_logs(&self, messages: &[(HyperlaneMessage, LogMeta)]) -> Result<u32> {
let mut stored = 0; let mut stored = 0;
for (message, meta) in messages { for (message, meta) in messages {
@ -233,6 +234,7 @@ impl HyperlaneLogStore<HyperlaneMessage> for HyperlaneRocksDB {
#[async_trait] #[async_trait]
impl HyperlaneLogStore<InterchainGasPayment> for HyperlaneRocksDB { impl HyperlaneLogStore<InterchainGasPayment> for HyperlaneRocksDB {
/// Store a list of interchain gas payments and their associated metadata. /// Store a list of interchain gas payments and their associated metadata.
#[instrument(skip_all)]
async fn store_logs(&self, payments: &[(InterchainGasPayment, LogMeta)]) -> Result<u32> { async fn store_logs(&self, payments: &[(InterchainGasPayment, LogMeta)]) -> Result<u32> {
let mut new = 0; let mut new = 0;
for (payment, meta) in payments { for (payment, meta) in payments {

@ -8,16 +8,17 @@ use ethers_prometheus::middleware::{
ChainInfo, ContractInfo, PrometheusMiddlewareConf, WalletInfo, ChainInfo, ContractInfo, PrometheusMiddlewareConf, WalletInfo,
}; };
use hyperlane_core::{ use hyperlane_core::{
config::*, AggregationIsm, CcipReadIsm, ContractLocator, HyperlaneAbi, HyperlaneDomain, config::*, utils::hex_or_base58_to_h256, AggregationIsm, CcipReadIsm, ContractLocator,
HyperlaneDomainProtocol, HyperlaneProvider, HyperlaneSigner, Indexer, InterchainGasPaymaster, HyperlaneAbi, HyperlaneDomain, HyperlaneDomainProtocol, HyperlaneProvider, HyperlaneSigner,
InterchainGasPayment, InterchainSecurityModule, Mailbox, MessageIndexer, MultisigIsm, IndexMode, Indexer, InterchainGasPaymaster, InterchainGasPayment, InterchainSecurityModule,
RoutingIsm, ValidatorAnnounce, H160, H256, Mailbox, MessageIndexer, MultisigIsm, RoutingIsm, ValidatorAnnounce, H256,
}; };
use hyperlane_ethereum::{ use hyperlane_ethereum::{
self as h_eth, BuildableWithProvider, EthereumInterchainGasPaymasterAbi, EthereumMailboxAbi, self as h_eth, BuildableWithProvider, EthereumInterchainGasPaymasterAbi, EthereumMailboxAbi,
EthereumValidatorAnnounceAbi, EthereumValidatorAnnounceAbi,
}; };
use hyperlane_fuel as h_fuel; use hyperlane_fuel as h_fuel;
use hyperlane_sealevel as h_sealevel;
use crate::{ use crate::{
settings::signers::{BuildableWithSignerConf, RawSignerConf}, settings::signers::{BuildableWithSignerConf, RawSignerConf},
@ -31,6 +32,8 @@ pub enum ChainConnectionConf {
Ethereum(h_eth::ConnectionConf), Ethereum(h_eth::ConnectionConf),
/// Fuel configuration /// Fuel configuration
Fuel(h_fuel::ConnectionConf), Fuel(h_fuel::ConnectionConf),
/// Sealevel configuration.
Sealevel(h_sealevel::ConnectionConf),
} }
/// Specify the chain name (enum variant) under the `chain` key /// Specify the chain name (enum variant) under the `chain` key
@ -39,6 +42,7 @@ pub enum ChainConnectionConf {
enum RawChainConnectionConf { enum RawChainConnectionConf {
Ethereum(h_eth::RawConnectionConf), Ethereum(h_eth::RawConnectionConf),
Fuel(h_fuel::RawConnectionConf), Fuel(h_fuel::RawConnectionConf),
Sealevel(h_sealevel::RawConnectionConf),
#[serde(other)] #[serde(other)]
Unknown, Unknown,
} }
@ -53,6 +57,7 @@ impl FromRawConf<'_, RawChainConnectionConf> for ChainConnectionConf {
match raw { match raw {
Ethereum(r) => Ok(Self::Ethereum(r.parse_config(&cwp.join("connection"))?)), Ethereum(r) => Ok(Self::Ethereum(r.parse_config(&cwp.join("connection"))?)),
Fuel(r) => Ok(Self::Fuel(r.parse_config(&cwp.join("connection"))?)), Fuel(r) => Ok(Self::Fuel(r.parse_config(&cwp.join("connection"))?)),
Sealevel(r) => Ok(Self::Sealevel(r.parse_config(&cwp.join("connection"))?)),
Unknown => { Unknown => {
Err(eyre!("Unknown chain protocol")).into_config_result(|| cwp.join("protocol")) Err(eyre!("Unknown chain protocol")).into_config_result(|| cwp.join("protocol"))
} }
@ -65,6 +70,7 @@ impl ChainConnectionConf {
match self { match self {
Self::Ethereum(_) => HyperlaneDomainProtocol::Ethereum, Self::Ethereum(_) => HyperlaneDomainProtocol::Ethereum,
Self::Fuel(_) => HyperlaneDomainProtocol::Fuel, Self::Fuel(_) => HyperlaneDomainProtocol::Fuel,
Self::Sealevel(_) => HyperlaneDomainProtocol::Sealevel,
} }
} }
} }
@ -107,13 +113,7 @@ impl FromRawConf<'_, RawCoreContractAddresses> for CoreContractAddresses {
) )
}) })
.take_err(&mut err, path) .take_err(&mut err, path)
.and_then(|v| { .and_then(|v| hex_or_base58_to_h256(&v).take_err(&mut err, path))
if v.len() <= 42 {
v.parse::<H160>().take_err(&mut err, path).map(Into::into)
} else {
v.parse().take_err(&mut err, path)
}
})
}}; }};
} }
@ -137,6 +137,8 @@ pub struct IndexSettings {
pub from: u32, pub from: u32,
/// The number of blocks to query at once when indexing contracts. /// The number of blocks to query at once when indexing contracts.
pub chunk_size: u32, pub chunk_size: u32,
/// The indexing mode.
pub mode: IndexMode,
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
@ -144,6 +146,7 @@ pub struct IndexSettings {
struct RawIndexSettings { struct RawIndexSettings {
from: Option<StrOrInt>, from: Option<StrOrInt>,
chunk: Option<StrOrInt>, chunk: Option<StrOrInt>,
mode: Option<IndexMode>,
} }
impl FromRawConf<'_, RawIndexSettings> for IndexSettings { impl FromRawConf<'_, RawIndexSettings> for IndexSettings {
@ -165,7 +168,11 @@ impl FromRawConf<'_, RawIndexSettings> for IndexSettings {
.unwrap_or(1999); .unwrap_or(1999);
err.into_result()?; err.into_result()?;
Ok(Self { from, chunk_size }) Ok(Self {
from,
chunk_size,
mode: raw.mode.unwrap_or_default(),
})
} }
} }
@ -312,8 +319,8 @@ impl ChainConf {
self.build_ethereum(conf, &locator, metrics, h_eth::HyperlaneProviderBuilder {}) self.build_ethereum(conf, &locator, metrics, h_eth::HyperlaneProviderBuilder {})
.await .await
} }
ChainConnectionConf::Fuel(_) => todo!(), ChainConnectionConf::Fuel(_) => todo!(),
ChainConnectionConf::Sealevel(_) => todo!(),
} }
.context(ctx) .context(ctx)
} }
@ -335,6 +342,12 @@ impl ChainConf {
.map(|m| Box::new(m) as Box<dyn Mailbox>) .map(|m| Box::new(m) as Box<dyn Mailbox>)
.map_err(Into::into) .map_err(Into::into)
} }
ChainConnectionConf::Sealevel(conf) => {
let keypair = self.sealevel_signer().await.context(ctx)?;
h_sealevel::SealevelMailbox::new(conf, locator, keypair)
.map(|m| Box::new(m) as Box<dyn Mailbox>)
.map_err(Into::into)
}
} }
.context(ctx) .context(ctx)
} }
@ -361,6 +374,10 @@ impl ChainConf {
} }
ChainConnectionConf::Fuel(_) => todo!(), ChainConnectionConf::Fuel(_) => todo!(),
ChainConnectionConf::Sealevel(conf) => {
let indexer = Box::new(h_sealevel::SealevelMailboxIndexer::new(conf, locator)?);
Ok(indexer as Box<dyn MessageIndexer>)
}
} }
.context(ctx) .context(ctx)
} }
@ -387,6 +404,10 @@ impl ChainConf {
} }
ChainConnectionConf::Fuel(_) => todo!(), ChainConnectionConf::Fuel(_) => todo!(),
ChainConnectionConf::Sealevel(conf) => {
let indexer = Box::new(h_sealevel::SealevelMailboxIndexer::new(conf, locator)?);
Ok(indexer as Box<dyn Indexer<H256>>)
}
} }
.context(ctx) .context(ctx)
} }
@ -412,6 +433,12 @@ impl ChainConf {
} }
ChainConnectionConf::Fuel(_) => todo!(), ChainConnectionConf::Fuel(_) => todo!(),
ChainConnectionConf::Sealevel(conf) => {
let paymaster = Box::new(h_sealevel::SealevelInterchainGasPaymaster::new(
conf, locator,
));
Ok(paymaster as Box<dyn InterchainGasPaymaster>)
}
} }
.context(ctx) .context(ctx)
} }
@ -439,6 +466,12 @@ impl ChainConf {
} }
ChainConnectionConf::Fuel(_) => todo!(), ChainConnectionConf::Fuel(_) => todo!(),
ChainConnectionConf::Sealevel(conf) => {
let indexer = Box::new(h_sealevel::SealevelInterchainGasPaymasterIndexer::new(
conf, locator,
));
Ok(indexer as Box<dyn Indexer<InterchainGasPayment>>)
}
} }
.context(ctx) .context(ctx)
} }
@ -456,6 +489,10 @@ impl ChainConf {
} }
ChainConnectionConf::Fuel(_) => todo!(), ChainConnectionConf::Fuel(_) => todo!(),
ChainConnectionConf::Sealevel(conf) => {
let va = Box::new(h_sealevel::SealevelValidatorAnnounce::new(conf, locator));
Ok(va as Box<dyn ValidatorAnnounce>)
}
} }
.context("Building ValidatorAnnounce") .context("Building ValidatorAnnounce")
} }
@ -482,6 +519,13 @@ impl ChainConf {
} }
ChainConnectionConf::Fuel(_) => todo!(), ChainConnectionConf::Fuel(_) => todo!(),
ChainConnectionConf::Sealevel(conf) => {
let keypair = self.sealevel_signer().await.context(ctx)?;
let ism = Box::new(h_sealevel::SealevelInterchainSecurityModule::new(
conf, locator, keypair,
));
Ok(ism as Box<dyn InterchainSecurityModule>)
}
} }
.context(ctx) .context(ctx)
} }
@ -502,6 +546,11 @@ impl ChainConf {
} }
ChainConnectionConf::Fuel(_) => todo!(), ChainConnectionConf::Fuel(_) => todo!(),
ChainConnectionConf::Sealevel(conf) => {
let keypair = self.sealevel_signer().await.context(ctx)?;
let ism = Box::new(h_sealevel::SealevelMultisigIsm::new(conf, locator, keypair));
Ok(ism as Box<dyn MultisigIsm>)
}
} }
.context(ctx) .context(ctx)
} }
@ -525,6 +574,9 @@ impl ChainConf {
} }
ChainConnectionConf::Fuel(_) => todo!(), ChainConnectionConf::Fuel(_) => todo!(),
ChainConnectionConf::Sealevel(_) => {
Err(eyre!("Sealevel does not support routing ISM yet")).context(ctx)
}
} }
.context(ctx) .context(ctx)
} }
@ -548,6 +600,9 @@ impl ChainConf {
} }
ChainConnectionConf::Fuel(_) => todo!(), ChainConnectionConf::Fuel(_) => todo!(),
ChainConnectionConf::Sealevel(_) => {
Err(eyre!("Sealevel does not support aggregation ISM yet")).context(ctx)
}
} }
.context(ctx) .context(ctx)
} }
@ -571,6 +626,9 @@ impl ChainConf {
} }
ChainConnectionConf::Fuel(_) => todo!(), ChainConnectionConf::Fuel(_) => todo!(),
ChainConnectionConf::Sealevel(_) => {
Err(eyre!("Sealevel does not support CCIP read ISM yet")).context(ctx)
}
} }
.context(ctx) .context(ctx)
} }
@ -593,6 +651,10 @@ impl ChainConf {
}) })
} }
async fn sealevel_signer(&self) -> Result<Option<h_sealevel::Keypair>> {
self.signer().await
}
/// Get a clone of the ethereum metrics conf with correctly configured /// Get a clone of the ethereum metrics conf with correctly configured
/// contract information. /// contract information.
fn metrics_conf( fn metrics_conf(
@ -610,7 +672,7 @@ impl ChainConf {
if let Some(signer) = signer { if let Some(signer) = signer {
cfg.wallets cfg.wallets
.entry(signer.eth_address()) .entry(signer.eth_address().into())
.or_insert_with(|| WalletInfo { .or_insert_with(|| WalletInfo {
name: Some(agent_name.into()), name: Some(agent_name.into()),
}); });

@ -8,6 +8,9 @@ use rusoto_kms::KmsClient;
use serde::Deserialize; use serde::Deserialize;
use tracing::instrument; use tracing::instrument;
use ed25519_dalek::SecretKey;
use hyperlane_sealevel::Keypair;
use super::aws_credentials::AwsChainCredentialsProvider; use super::aws_credentials::AwsChainCredentialsProvider;
use hyperlane_core::{config::*, H256}; use hyperlane_core::{config::*, H256};
@ -139,3 +142,19 @@ impl BuildableWithSignerConf for fuels::prelude::WalletUnlocked {
}) })
} }
} }
#[async_trait]
impl BuildableWithSignerConf for Keypair {
async fn build(conf: &SignerConf) -> Result<Self, Report> {
Ok(match conf {
SignerConf::HexKey { key } => {
let secret = SecretKey::from_bytes(key.as_bytes())
.context("Invalid sealevel ed25519 secret key")?;
Keypair::from_bytes(&ed25519_dalek::Keypair::from(secret).to_bytes())
.context("Unable to create Keypair")?
}
SignerConf::Aws { .. } => bail!("Aws signer is not supported by fuel"),
SignerConf::Node => bail!("Node signer is not supported by fuel"),
})
}
}

@ -2,13 +2,12 @@ use core::str::FromStr;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use ethers::types::Address;
use eyre::{eyre, Context, Report, Result}; use eyre::{eyre, Context, Report, Result};
use prometheus::{IntGauge, IntGaugeVec}; use prometheus::{IntGauge, IntGaugeVec};
use rusoto_core::Region; use rusoto_core::Region;
use serde::Deserialize; use serde::Deserialize;
use hyperlane_core::config::*; use hyperlane_core::{config::*, H160};
use crate::{CheckpointSyncer, LocalStorage, MultisigCheckpointSyncer, S3Storage}; use crate::{CheckpointSyncer, LocalStorage, MultisigCheckpointSyncer, S3Storage};
@ -165,7 +164,7 @@ impl MultisigCheckpointSyncerConf {
let gauge = let gauge =
validator_checkpoint_index.with_label_values(&[origin, &key.to_lowercase()]); validator_checkpoint_index.with_label_values(&[origin, &key.to_lowercase()]);
if let Ok(conf) = value.build(Some(gauge)) { if let Ok(conf) = value.build(Some(gauge)) {
checkpoint_syncers.insert(Address::from_str(key)?, conf.into()); checkpoint_syncers.insert(H160::from_str(key)?, conf.into());
} else { } else {
continue; continue;
} }

@ -2,7 +2,6 @@ use std::collections::{hash_map::Entry, HashMap};
use std::sync::Arc; use std::sync::Arc;
use derive_new::new; use derive_new::new;
use ethers::prelude::Address;
use eyre::Result; use eyre::Result;
use tracing::{debug, instrument, trace}; use tracing::{debug, instrument, trace};
@ -18,7 +17,7 @@ use crate::CheckpointSyncer;
#[derive(Clone, Debug, new)] #[derive(Clone, Debug, new)]
pub struct MultisigCheckpointSyncer { pub struct MultisigCheckpointSyncer {
/// The checkpoint syncer for each valid validator signer address /// The checkpoint syncer for each valid validator signer address
checkpoint_syncers: HashMap<Address, Arc<dyn CheckpointSyncer>>, checkpoint_syncers: HashMap<H160, Arc<dyn CheckpointSyncer>>,
} }
impl MultisigCheckpointSyncer { impl MultisigCheckpointSyncer {

@ -7,8 +7,7 @@ use eyre::Context;
use walkdir::WalkDir; use walkdir::WalkDir;
use hyperlane_base::{RawSettings, Settings}; use hyperlane_base::{RawSettings, Settings};
use hyperlane_core::config::*; use hyperlane_core::{config::*, KnownHyperlaneDomain};
use hyperlane_core::KnownHyperlaneDomain;
/// Relative path to the `hyperlane-monorepo/rust/config/` /// Relative path to the `hyperlane-monorepo/rust/config/`
/// directory, which is where the agent's config files /// directory, which is where the agent's config files
@ -60,7 +59,11 @@ fn config_paths(root: &Path) -> Vec<String> {
/// of a test env. This test simply tries to do some sanity checks /// of a test env. This test simply tries to do some sanity checks
/// against the integrity of that data. /// against the integrity of that data.
fn hyperlane_settings() -> Vec<Settings> { fn hyperlane_settings() -> Vec<Settings> {
let root = Path::new(AGENT_CONFIG_PATH_ROOT); // Determine the config path based on the crate root so that
// the debugger can also find the config file.
let crate_root = env!("CARGO_MANIFEST_DIR");
let config_path = format!("{}/{}", crate_root, AGENT_CONFIG_PATH_ROOT);
let root = Path::new(config_path.as_str());
let paths = config_paths(root); let paths = config_paths(root);
let files: Vec<String> = paths let files: Vec<String> = paths
.iter() .iter()
@ -69,18 +72,19 @@ fn hyperlane_settings() -> Vec<Settings> {
paths paths
.iter() .iter()
.zip(files.iter()) .zip(files.iter())
.map(|(p, f)| { // Filter out config files that can't be parsed as json (e.g. env files)
.filter_map(|(p, f)| {
let raw: RawSettings = Config::builder() let raw: RawSettings = Config::builder()
.add_source(config::File::from_str(f.as_str(), FileFormat::Json)) .add_source(config::File::from_str(f.as_str(), FileFormat::Json))
.build() .build()
.unwrap() .ok()?
.try_deserialize::<RawSettings>() .try_deserialize::<RawSettings>()
.unwrap_or_else(|e| { .unwrap_or_else(|e| {
panic!("!cfg({}): {:?}: {}", p, e, f); panic!("!cfg({}): {:?}: {}", p, e, f);
}); });
Settings::from_config(raw, &ConfigPath::default()) Settings::from_config(raw, &ConfigPath::default())
.context("Config parsing error, please check the config reference (https://docs.hyperlane.xyz/docs/operators/agent-configuration/configuration-reference)") .context("Config parsing error, please check the config reference (https://docs.hyperlane.xyz/docs/operators/agent-configuration/configuration-reference)")
.unwrap() .ok()
}) })
.collect() .collect()
} }

@ -1,3 +1,5 @@
cargo-features = ["workspace-inheritance"]
[package] [package]
name = "hyperlane-core" name = "hyperlane-core"
documentation.workspace = true documentation.workspace = true
@ -7,19 +9,18 @@ license-file.workspace = true
publish.workspace = true publish.workspace = true
version.workspace = true version.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
async-trait.workspace = true async-trait.workspace = true
auto_impl = "1.0" auto_impl = "1.0"
borsh.workspace = true
bs58.workspace = true
bytes = { version = "1", features = ["serde"] } bytes = { version = "1", features = ["serde"] }
convert_case = "0.6" convert_case = "0.6"
derive-new.workspace = true derive-new.workspace = true
ethers-providers.workspace = true derive_more.workspace = true
ethers-core.workspace = true
ethers-contract.workspace = true
eyre.workspace = true eyre.workspace = true
hex = "0.4.3" getrandom.workspace = true
hex.workspace = true
itertools.workspace = true itertools.workspace = true
num = { workspace = true, features = ["serde"] } num = { workspace = true, features = ["serde"] }
num-derive.workspace = true num-derive.workspace = true
@ -27,20 +28,25 @@ num-traits.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true
sha3 = "0.10" sha3 = "0.10"
strum.workspace = true
thiserror.workspace = true thiserror.workspace = true
uint = "0.9.5"
fixed-hash = "0.8.0"
tiny-keccak = { version = "2.0.2", features = ["keccak"]}
# version determined by ethers-rs config = { workspace = true, optional = true }
primitive-types = "*" ethers = { workspace = true, optional = true }
lazy_static = "*" ethers-core = { workspace = true, optional = true }
derive_more.workspace = true ethers-contract = { workspace = true, optional = true }
ethers-providers = { workspace = true, optional = true }
strum = { workspace = true, optional = true }
primitive-types = { workspace = true, optional = true }
[dev-dependencies] [dev-dependencies]
config.workspace = true
hyperlane-base = { path = "../hyperlane-base" }
tokio = { workspace = true, features = ["rt", "time"] } tokio = { workspace = true, features = ["rt", "time"] }
walkdir = { version = "2" }
[features] [features]
default = [] default = []
test-utils = [] test-utils = ["dep:config"]
agent = ["ethers", "strum"]
strum = ["dep:strum"]
ethers = ["dep:ethers-core", "dep:ethers-contract", "dep:ethers-providers", "dep:primitive-types", "dep:ethers"]

@ -1,3 +1,4 @@
use borsh::{BorshDeserialize, BorshSerialize};
use derive_new::new; use derive_new::new;
use crate::accumulator::{ use crate::accumulator::{
@ -6,7 +7,7 @@ use crate::accumulator::{
H256, TREE_DEPTH, ZERO_HASHES, H256, TREE_DEPTH, ZERO_HASHES,
}; };
#[derive(Debug, Clone, Copy, new)] #[derive(BorshDeserialize, BorshSerialize, Debug, Clone, Copy, new, PartialEq, Eq)]
/// An incremental merkle tree, modeled on the eth2 deposit contract /// An incremental merkle tree, modeled on the eth2 deposit contract
pub struct IncrementalMerkle { pub struct IncrementalMerkle {
branch: [H256; TREE_DEPTH], branch: [H256; TREE_DEPTH],
@ -104,7 +105,7 @@ mod test {
// insert the leaves // insert the leaves
for leaf in test_case.leaves.iter() { for leaf in test_case.leaves.iter() {
let hashed_leaf = hash_message(leaf); let hashed_leaf = hash_message(leaf);
tree.ingest(hashed_leaf); tree.ingest(hashed_leaf.into());
} }
// assert the tree has the proper leaf count // assert the tree has the proper leaf count

@ -1,4 +1,3 @@
use lazy_static::lazy_static;
use thiserror::Error; use thiserror::Error;
use crate::{ use crate::{
@ -15,12 +14,46 @@ use crate::{
// - remove ring dependency // - remove ring dependency
// In accordance with its license terms, the apache2 license is reproduced below // In accordance with its license terms, the apache2 license is reproduced below
lazy_static! { // Can't initialize this using `lazy_static` because of a constaint in Solana: static variables cannot be writable.
// See the following links for more info:
// https://stackoverflow.com/questions/70630344/failed-to-deploy-my-solana-smart-contract
// https://docs.solana.com/developing/on-chain-programs/limitations#static-writable-data
/// Zero nodes to act as "synthetic" left and right subtrees of other zero nodes. /// Zero nodes to act as "synthetic" left and right subtrees of other zero nodes.
pub static ref ZERO_NODES: Vec<MerkleTree> = { pub const ZERO_NODES: [MerkleTree; TREE_DEPTH + 1] = [
(0..=TREE_DEPTH).map(MerkleTree::Zero).collect() MerkleTree::Zero(0),
}; MerkleTree::Zero(1),
} MerkleTree::Zero(2),
MerkleTree::Zero(3),
MerkleTree::Zero(4),
MerkleTree::Zero(5),
MerkleTree::Zero(6),
MerkleTree::Zero(7),
MerkleTree::Zero(8),
MerkleTree::Zero(9),
MerkleTree::Zero(10),
MerkleTree::Zero(11),
MerkleTree::Zero(12),
MerkleTree::Zero(13),
MerkleTree::Zero(14),
MerkleTree::Zero(15),
MerkleTree::Zero(16),
MerkleTree::Zero(17),
MerkleTree::Zero(18),
MerkleTree::Zero(19),
MerkleTree::Zero(20),
MerkleTree::Zero(21),
MerkleTree::Zero(22),
MerkleTree::Zero(23),
MerkleTree::Zero(24),
MerkleTree::Zero(25),
MerkleTree::Zero(26),
MerkleTree::Zero(27),
MerkleTree::Zero(28),
MerkleTree::Zero(29),
MerkleTree::Zero(30),
MerkleTree::Zero(31),
MerkleTree::Zero(32),
];
/// Right-sparse Merkle tree. /// Right-sparse Merkle tree.
/// ///
@ -491,6 +524,12 @@ mod tests {
assert_eq!(second.hash(), incr.root()); assert_eq!(second.hash(), incr.root());
assert_eq!(full.hash(), incr.root()); assert_eq!(full.hash(), incr.root());
} }
#[test]
fn it_sets_zero_nodes_correctly() {
let expected_zero_nodes: Vec<_> = (0..=TREE_DEPTH).map(MerkleTree::Zero).collect();
assert_eq!(expected_zero_nodes.as_slice(), ZERO_NODES.as_slice());
}
} }
/* /*

@ -1,4 +1,3 @@
use lazy_static::lazy_static;
use sha3::{digest::Update, Digest, Keccak256}; use sha3::{digest::Update, Digest, Keccak256};
use crate::H256; use crate::H256;
@ -11,8 +10,9 @@ pub mod merkle;
/// Utilities for manipulating proofs to reflect sparse merkle trees. /// Utilities for manipulating proofs to reflect sparse merkle trees.
pub mod sparse; pub mod sparse;
/// Tree depth mod zero_hashes;
pub const TREE_DEPTH: usize = 32; pub use zero_hashes::{TREE_DEPTH, ZERO_HASHES};
const EMPTY_SLICE: &[H256] = &[]; const EMPTY_SLICE: &[H256] = &[];
pub(super) fn hash_concat(left: impl AsRef<[u8]>, right: impl AsRef<[u8]>) -> H256 { pub(super) fn hash_concat(left: impl AsRef<[u8]>, right: impl AsRef<[u8]>) -> H256 {
@ -25,31 +25,45 @@ pub(super) fn hash_concat(left: impl AsRef<[u8]>, right: impl AsRef<[u8]>) -> H2
) )
} }
lazy_static! { /// The root of an empty tree
/// A cache of the zero hashes for each layer of the tree. pub const INITIAL_ROOT: H256 = H256([
pub static ref ZERO_HASHES: [H256; TREE_DEPTH + 1] = { 39, 174, 91, 160, 141, 114, 145, 201, 108, 140, 189, 220, 193, 72, 191, 72, 166, 214, 140, 121,
116, 185, 67, 86, 245, 55, 84, 239, 97, 113, 215, 87,
]);
#[cfg(test)]
mod test {
use super::*;
fn compute_zero_hashes() -> [H256; TREE_DEPTH + 1] {
// Implementation previously used in the `lazy_static!` macro for `ZERO_HASHES`
let mut hashes = [H256::zero(); TREE_DEPTH + 1]; let mut hashes = [H256::zero(); TREE_DEPTH + 1];
for i in 0..TREE_DEPTH { for i in 0..TREE_DEPTH {
hashes[i + 1] = hash_concat(hashes[i], hashes[i]); hashes[i + 1] = hash_concat(hashes[i], hashes[i]);
} }
hashes hashes
};
/// The root of an empty tree
pub static ref INITIAL_ROOT: H256 = incremental::IncrementalMerkle::default().root();
} }
#[cfg(test)]
mod test {
use super::*;
#[test] #[test]
fn it_calculates_the_initial_root() { fn it_calculates_the_initial_root() {
assert_eq!( assert_eq!(
*INITIAL_ROOT, INITIAL_ROOT,
"0x27ae5ba08d7291c96c8cbddcc148bf48a6d68c7974b94356f53754ef6171d757" "0x27ae5ba08d7291c96c8cbddcc148bf48a6d68c7974b94356f53754ef6171d757"
.parse() .parse()
.unwrap() .unwrap()
); );
} }
#[test]
fn it_prints_zero_hashes_items() {
assert_eq!(zero_hashes::ZERO_HASHES, compute_zero_hashes());
}
#[test]
fn it_computes_initial_root() {
assert_eq!(
incremental::IncrementalMerkle::default().root(),
INITIAL_ROOT
);
}
} }

@ -140,10 +140,7 @@ mod tests {
fn tree_and_roots() -> (MerkleTree, Vec<H256>) { fn tree_and_roots() -> (MerkleTree, Vec<H256>) {
const LEAF_COUNT: usize = 47; const LEAF_COUNT: usize = 47;
let all_leaves: Vec<H256> = (0..LEAF_COUNT) let all_leaves: Vec<H256> = (0..LEAF_COUNT).map(|_| H256::from([0xAA; 32])).collect();
.into_iter()
.map(|_| H256::from([0xAA; 32]))
.collect();
let mut roots = [H256::zero(); LEAF_COUNT]; let mut roots = [H256::zero(); LEAF_COUNT];
let mut tree = MerkleTree::create(&[], TREE_DEPTH); let mut tree = MerkleTree::create(&[], TREE_DEPTH);
for i in 0..LEAF_COUNT { for i in 0..LEAF_COUNT {
@ -157,7 +154,7 @@ mod tests {
fn as_latest() { fn as_latest() {
let (tree, roots) = tree_and_roots(); let (tree, roots) = tree_and_roots();
for i in 0..roots.len() { for (i, root) in roots.iter().enumerate() {
let current_proof_i = tree.prove_against_current(i); let current_proof_i = tree.prove_against_current(i);
let latest_proof_i = current_proof_i.as_latest(); let latest_proof_i = current_proof_i.as_latest();
assert!(verify_merkle_proof( assert!(verify_merkle_proof(
@ -165,7 +162,7 @@ mod tests {
&latest_proof_i.path, &latest_proof_i.path,
TREE_DEPTH, TREE_DEPTH,
i, i,
roots[i] *root,
)); ));
} }
} }
@ -174,7 +171,7 @@ mod tests {
fn prove_against_previous() { fn prove_against_previous() {
let (tree, roots) = tree_and_roots(); let (tree, roots) = tree_and_roots();
for i in 0..roots.len() { for i in 0..roots.len() {
for j in i..roots.len() { for (j, root) in roots.iter().enumerate().skip(i) {
let proof = tree.prove_against_previous(i, j); let proof = tree.prove_against_previous(i, j);
assert_eq!(proof.root(), roots[j]); assert_eq!(proof.root(), roots[j]);
assert!(verify_merkle_proof( assert!(verify_merkle_proof(
@ -182,7 +179,7 @@ mod tests {
&proof.path, &proof.path,
TREE_DEPTH, TREE_DEPTH,
i, i,
roots[j] *root,
)); ));
} }
} }

@ -0,0 +1,143 @@
use crate::H256;
/// Tree depth
pub const TREE_DEPTH: usize = 32;
// keccak256 zero hashes
const Z_0: H256 = H256([
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
]);
const Z_1: H256 = H256([
173, 50, 40, 182, 118, 247, 211, 205, 66, 132, 165, 68, 63, 23, 241, 150, 43, 54, 228, 145,
179, 10, 64, 178, 64, 88, 73, 229, 151, 186, 95, 181,
]);
const Z_2: H256 = H256([
180, 193, 25, 81, 149, 124, 111, 143, 100, 44, 74, 246, 28, 214, 178, 70, 64, 254, 198, 220,
127, 198, 7, 238, 130, 6, 169, 158, 146, 65, 13, 48,
]);
const Z_3: H256 = H256([
33, 221, 185, 163, 86, 129, 92, 63, 172, 16, 38, 182, 222, 197, 223, 49, 36, 175, 186, 219, 72,
92, 155, 165, 163, 227, 57, 138, 4, 183, 186, 133,
]);
const Z_4: H256 = H256([
229, 135, 105, 179, 42, 27, 234, 241, 234, 39, 55, 90, 68, 9, 90, 13, 31, 182, 100, 206, 45,
211, 88, 231, 252, 191, 183, 140, 38, 161, 147, 68,
]);
const Z_5: H256 = H256([
14, 176, 30, 191, 201, 237, 39, 80, 12, 212, 223, 201, 121, 39, 45, 31, 9, 19, 204, 159, 102,
84, 13, 126, 128, 5, 129, 17, 9, 225, 207, 45,
]);
const Z_6: H256 = H256([
136, 124, 34, 189, 135, 80, 211, 64, 22, 172, 60, 102, 181, 255, 16, 45, 172, 221, 115, 246,
176, 20, 231, 16, 181, 30, 128, 34, 175, 154, 25, 104,
]);
const Z_7: H256 = H256([
255, 215, 1, 87, 228, 128, 99, 252, 51, 201, 122, 5, 15, 127, 100, 2, 51, 191, 100, 108, 201,
141, 149, 36, 198, 185, 43, 207, 58, 181, 111, 131,
]);
const Z_8: H256 = H256([
152, 103, 204, 95, 127, 25, 107, 147, 186, 225, 226, 126, 99, 32, 116, 36, 69, 210, 144, 242,
38, 56, 39, 73, 139, 84, 254, 197, 57, 247, 86, 175,
]);
const Z_9: H256 = H256([
206, 250, 212, 229, 8, 192, 152, 185, 167, 225, 216, 254, 177, 153, 85, 251, 2, 186, 150, 117,
88, 80, 120, 113, 9, 105, 211, 68, 15, 80, 84, 224,
]);
const Z_10: H256 = H256([
249, 220, 62, 127, 224, 22, 224, 80, 239, 242, 96, 51, 79, 24, 165, 212, 254, 57, 29, 130, 9,
35, 25, 245, 150, 79, 46, 46, 183, 193, 195, 165,
]);
const Z_11: H256 = H256([
248, 177, 58, 73, 226, 130, 246, 9, 195, 23, 168, 51, 251, 141, 151, 109, 17, 81, 124, 87, 29,
18, 33, 162, 101, 210, 90, 247, 120, 236, 248, 146,
]);
const Z_12: H256 = H256([
52, 144, 198, 206, 235, 69, 10, 236, 220, 130, 226, 130, 147, 3, 29, 16, 199, 215, 59, 248, 94,
87, 191, 4, 26, 151, 54, 10, 162, 197, 217, 156,
]);
const Z_13: H256 = H256([
193, 223, 130, 217, 196, 184, 116, 19, 234, 226, 239, 4, 143, 148, 180, 211, 85, 76, 234, 115,
217, 43, 15, 122, 249, 110, 2, 113, 198, 145, 226, 187,
]);
const Z_14: H256 = H256([
92, 103, 173, 215, 198, 202, 243, 2, 37, 106, 222, 223, 122, 177, 20, 218, 10, 207, 232, 112,
212, 73, 163, 164, 137, 247, 129, 214, 89, 232, 190, 204,
]);
const Z_15: H256 = H256([
218, 123, 206, 159, 78, 134, 24, 182, 189, 47, 65, 50, 206, 121, 140, 220, 122, 96, 231, 225,
70, 10, 114, 153, 227, 198, 52, 42, 87, 150, 38, 210,
]);
const Z_16: H256 = H256([
39, 51, 229, 15, 82, 110, 194, 250, 25, 162, 43, 49, 232, 237, 80, 242, 60, 209, 253, 249, 76,
145, 84, 237, 58, 118, 9, 162, 241, 255, 152, 31,
]);
const Z_17: H256 = H256([
225, 211, 181, 200, 7, 178, 129, 228, 104, 60, 198, 214, 49, 92, 249, 91, 154, 222, 134, 65,
222, 252, 179, 35, 114, 241, 193, 38, 227, 152, 239, 122,
]);
const Z_18: H256 = H256([
90, 45, 206, 10, 138, 127, 104, 187, 116, 86, 15, 143, 113, 131, 124, 44, 46, 187, 203, 247,
255, 251, 66, 174, 24, 150, 241, 63, 124, 116, 121, 160,
]);
const Z_19: H256 = H256([
180, 106, 40, 182, 245, 85, 64, 248, 148, 68, 246, 61, 224, 55, 142, 61, 18, 27, 224, 158, 6,
204, 157, 237, 28, 32, 230, 88, 118, 211, 106, 160,
]);
const Z_20: H256 = H256([
198, 94, 150, 69, 100, 71, 134, 182, 32, 226, 221, 42, 214, 72, 221, 252, 191, 74, 126, 91, 26,
58, 78, 207, 231, 246, 70, 103, 163, 240, 183, 226,
]);
const Z_21: H256 = H256([
244, 65, 133, 136, 237, 53, 162, 69, 140, 255, 235, 57, 185, 61, 38, 241, 141, 42, 177, 59,
220, 230, 174, 229, 142, 123, 153, 53, 158, 194, 223, 217,
]);
const Z_22: H256 = H256([
90, 156, 22, 220, 0, 214, 239, 24, 183, 147, 58, 111, 141, 198, 92, 203, 85, 102, 113, 56, 119,
111, 125, 234, 16, 16, 112, 220, 135, 150, 227, 119,
]);
const Z_23: H256 = H256([
77, 248, 79, 64, 174, 12, 130, 41, 208, 214, 6, 158, 92, 143, 57, 167, 194, 153, 103, 122, 9,
211, 103, 252, 123, 5, 227, 188, 56, 14, 230, 82,
]);
const Z_24: H256 = H256([
205, 199, 37, 149, 247, 76, 123, 16, 67, 208, 225, 255, 186, 183, 52, 100, 140, 131, 141, 251,
5, 39, 217, 113, 182, 2, 188, 33, 108, 150, 25, 239,
]);
const Z_25: H256 = H256([
10, 191, 90, 201, 116, 161, 237, 87, 244, 5, 10, 165, 16, 221, 156, 116, 245, 8, 39, 123, 57,
215, 151, 59, 178, 223, 204, 197, 238, 176, 97, 141,
]);
const Z_26: H256 = H256([
184, 205, 116, 4, 111, 243, 55, 240, 167, 191, 44, 142, 3, 225, 15, 100, 44, 24, 134, 121, 141,
113, 128, 106, 177, 232, 136, 217, 229, 238, 135, 208,
]);
const Z_27: H256 = H256([
131, 140, 86, 85, 203, 33, 198, 203, 131, 49, 59, 90, 99, 17, 117, 223, 244, 150, 55, 114, 204,
233, 16, 129, 136, 179, 74, 200, 124, 129, 196, 30,
]);
const Z_28: H256 = H256([
102, 46, 228, 221, 45, 215, 178, 188, 112, 121, 97, 177, 230, 70, 196, 4, 118, 105, 220, 182,
88, 79, 13, 141, 119, 13, 175, 93, 126, 125, 235, 46,
]);
const Z_29: H256 = H256([
56, 138, 178, 14, 37, 115, 209, 113, 168, 129, 8, 231, 157, 130, 14, 152, 242, 108, 11, 132,
170, 139, 47, 74, 164, 150, 141, 187, 129, 142, 163, 34,
]);
const Z_30: H256 = H256([
147, 35, 124, 80, 186, 117, 238, 72, 95, 76, 34, 173, 242, 247, 65, 64, 11, 223, 141, 106, 156,
199, 223, 126, 202, 229, 118, 34, 22, 101, 215, 53,
]);
const Z_31: H256 = H256([
132, 72, 129, 139, 180, 174, 69, 98, 132, 158, 148, 158, 23, 172, 22, 224, 190, 22, 104, 142,
21, 107, 92, 241, 94, 9, 140, 98, 124, 0, 86, 169,
]);
const Z_32: H256 = H256([
39, 174, 91, 160, 141, 114, 145, 201, 108, 140, 189, 220, 193, 72, 191, 72, 166, 214, 140, 121,
116, 185, 67, 86, 245, 55, 84, 239, 97, 113, 215, 87,
]);
/// Precomputed zero hashes for building the merkle tree
/// A cache of the zero hashes for each layer of the tree.
pub const ZERO_HASHES: [H256; TREE_DEPTH + 1] = [
Z_0, Z_1, Z_2, Z_3, Z_4, Z_5, Z_6, Z_7, Z_8, Z_9, Z_10, Z_11, Z_12, Z_13, Z_14, Z_15, Z_16,
Z_17, Z_18, Z_19, Z_20, Z_21, Z_22, Z_23, Z_24, Z_25, Z_26, Z_27, Z_28, Z_29, Z_30, Z_31, Z_32,
];

@ -1,10 +1,11 @@
#![allow(missing_docs)] #![allow(missing_docs)]
use std::fmt::{Debug, Display, Formatter}; use std::fmt::{Debug, Formatter};
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use num_derive::FromPrimitive; use num_derive::FromPrimitive;
use num_traits::FromPrimitive; use num_traits::FromPrimitive;
#[cfg(feature = "strum")]
use strum::{EnumIter, EnumString, IntoStaticStr}; use strum::{EnumIter, EnumString, IntoStaticStr};
use crate::utils::many_to_one; use crate::utils::many_to_one;
@ -21,7 +22,9 @@ pub struct ContractLocator<'a> {
pub domain: &'a HyperlaneDomain, pub domain: &'a HyperlaneDomain,
pub address: H256, pub address: H256,
} }
impl<'a> Display for ContractLocator<'a> {
#[cfg(feature = "strum")]
impl<'a> std::fmt::Display for ContractLocator<'a> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!( write!(
f, f,
@ -58,20 +61,15 @@ impl From<&'_ Address> for H160 {
} }
/// All domains supported by Hyperlane. /// All domains supported by Hyperlane.
#[derive( #[derive(FromPrimitive, PartialEq, Eq, Debug, Clone, Copy, Hash)]
FromPrimitive, #[cfg_attr(
EnumString, feature = "strum",
IntoStaticStr, derive(strum::Display, EnumString, IntoStaticStr, EnumIter)
strum::Display, )]
EnumIter, #[cfg_attr(
PartialEq, feature = "strum",
Eq, strum(serialize_all = "lowercase", ascii_case_insensitive)
Debug,
Clone,
Copy,
Hash,
)] )]
#[strum(serialize_all = "lowercase", ascii_case_insensitive)]
pub enum KnownHyperlaneDomain { pub enum KnownHyperlaneDomain {
Ethereum = 1, Ethereum = 1,
Goerli = 5, Goerli = 5,
@ -89,9 +87,9 @@ pub enum KnownHyperlaneDomain {
Optimism = 10, Optimism = 10,
OptimismGoerli = 420, OptimismGoerli = 420,
#[strum(serialize = "bsc")] #[cfg_attr(feature = "strum", strum(serialize = "bsc"))]
BinanceSmartChain = 56, BinanceSmartChain = 56,
#[strum(serialize = "bsctestnet")] #[cfg_attr(feature = "strum", strum(serialize = "bsctestnet"))]
BinanceSmartChainTestnet = 97, BinanceSmartChainTestnet = 97,
Celo = 42220, Celo = 42220,
@ -114,6 +112,11 @@ pub enum KnownHyperlaneDomain {
/// Fuel1 local chain /// Fuel1 local chain
FuelTest1 = 13374, FuelTest1 = 13374,
/// Sealevel local chain 1
SealevelTest1 = 13375,
/// Sealevel local chain 1
SealevelTest2 = 13376,
} }
#[derive(Clone)] #[derive(Clone)]
@ -151,10 +154,15 @@ impl HyperlaneDomain {
} }
/// Types of Hyperlane domains. /// Types of Hyperlane domains.
#[derive( #[derive(FromPrimitive, Copy, Clone, Eq, PartialEq, Debug)]
FromPrimitive, EnumString, IntoStaticStr, strum::Display, Copy, Clone, Eq, PartialEq, Debug, #[cfg_attr(
feature = "strum",
derive(strum::Display, EnumString, IntoStaticStr, EnumIter)
)]
#[cfg_attr(
feature = "strum",
strum(serialize_all = "lowercase", ascii_case_insensitive)
)] )]
#[strum(serialize_all = "lowercase", ascii_case_insensitive)]
pub enum HyperlaneDomainType { pub enum HyperlaneDomainType {
/// A mainnet. /// A mainnet.
Mainnet, Mainnet,
@ -167,15 +175,22 @@ pub enum HyperlaneDomainType {
} }
/// A selector for which base library should handle this domain. /// A selector for which base library should handle this domain.
#[derive( #[derive(FromPrimitive, Copy, Clone, Eq, PartialEq, Debug)]
FromPrimitive, EnumString, IntoStaticStr, strum::Display, Copy, Clone, Eq, PartialEq, Debug, #[cfg_attr(
feature = "strum",
derive(strum::Display, EnumString, IntoStaticStr, EnumIter)
)]
#[cfg_attr(
feature = "strum",
strum(serialize_all = "lowercase", ascii_case_insensitive)
)] )]
#[strum(serialize_all = "lowercase", ascii_case_insensitive)]
pub enum HyperlaneDomainProtocol { pub enum HyperlaneDomainProtocol {
/// An EVM-based chain type which uses hyperlane-ethereum. /// An EVM-based chain type which uses hyperlane-ethereum.
Ethereum, Ethereum,
/// A Fuel-based chain type which uses hyperlane-fuel. /// A Fuel-based chain type which uses hyperlane-fuel.
Fuel, Fuel,
/// A Sealevel-based chain type which uses hyperlane-sealevel.
Sealevel,
} }
impl HyperlaneDomainProtocol { impl HyperlaneDomainProtocol {
@ -184,11 +199,13 @@ impl HyperlaneDomainProtocol {
match self { match self {
Ethereum => format!("{:?}", H160::from(addr)), Ethereum => format!("{:?}", H160::from(addr)),
Fuel => format!("{:?}", addr), Fuel => format!("{:?}", addr),
Sealevel => format!("{:?}", addr),
} }
} }
} }
impl KnownHyperlaneDomain { impl KnownHyperlaneDomain {
#[cfg(feature = "strum")]
pub fn as_str(self) -> &'static str { pub fn as_str(self) -> &'static str {
self.into() self.into()
} }
@ -206,7 +223,7 @@ impl KnownHyperlaneDomain {
Goerli, Mumbai, Fuji, ArbitrumGoerli, OptimismGoerli, BinanceSmartChainTestnet, Goerli, Mumbai, Fuji, ArbitrumGoerli, OptimismGoerli, BinanceSmartChainTestnet,
Alfajores, MoonbaseAlpha, Zksync2Testnet, Sepolia Alfajores, MoonbaseAlpha, Zksync2Testnet, Sepolia
], ],
LocalTestChain: [Test1, Test2, Test3, FuelTest1], LocalTestChain: [Test1, Test2, Test3, FuelTest1, SealevelTest1, SealevelTest2],
}) })
} }
@ -220,6 +237,7 @@ impl KnownHyperlaneDomain {
Alfajores, Moonbeam, MoonbaseAlpha, Zksync2Testnet, Test1, Test2, Test3 Alfajores, Moonbeam, MoonbaseAlpha, Zksync2Testnet, Test1, Test2, Test3
], ],
HyperlaneDomainProtocol::Fuel: [FuelTest1], HyperlaneDomainProtocol::Fuel: [FuelTest1],
HyperlaneDomainProtocol::Sealevel: [SealevelTest1, SealevelTest2],
}) })
} }
} }
@ -238,6 +256,7 @@ impl Hash for HyperlaneDomain {
} }
} }
#[cfg(feature = "strum")]
impl AsRef<str> for HyperlaneDomain { impl AsRef<str> for HyperlaneDomain {
fn as_ref(&self) -> &str { fn as_ref(&self) -> &str {
self.name() self.name()
@ -270,7 +289,8 @@ impl From<&HyperlaneDomain> for HyperlaneDomainProtocol {
} }
} }
impl Display for HyperlaneDomain { #[cfg(feature = "strum")]
impl std::fmt::Display for HyperlaneDomain {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.name()) write!(f, "{}", self.name())
} }
@ -278,8 +298,15 @@ impl Display for HyperlaneDomain {
impl Debug for HyperlaneDomain { impl Debug for HyperlaneDomain {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
#[cfg(feature = "strum")]
{
write!(f, "HyperlaneDomain({} ({}))", self.name(), self.id()) write!(f, "HyperlaneDomain({} ({}))", self.name(), self.id())
} }
#[cfg(not(feature = "strum"))]
{
write!(f, "HyperlaneDomain({})", self.id())
}
}
} }
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
@ -291,6 +318,7 @@ pub enum HyperlaneDomainConfigError {
} }
impl HyperlaneDomain { impl HyperlaneDomain {
#[cfg(feature = "strum")]
pub fn from_config( pub fn from_config(
domain_id: u32, domain_id: u32,
name: &str, name: &str,
@ -319,6 +347,7 @@ impl HyperlaneDomain {
} }
/// The chain name /// The chain name
#[cfg(feature = "strum")]
pub fn name(&self) -> &str { pub fn name(&self) -> &str {
match self { match self {
HyperlaneDomain::Known(domain) => domain.as_str(), HyperlaneDomain::Known(domain) => domain.as_str(),
@ -357,6 +386,7 @@ impl HyperlaneDomain {
} }
#[cfg(test)] #[cfg(test)]
#[cfg(feature = "strum")]
mod tests { mod tests {
use crate::KnownHyperlaneDomain; use crate::KnownHyperlaneDomain;
use std::str::FromStr; use std::str::FromStr;

@ -1,4 +1,4 @@
use primitive_types::U256; use crate::U256;
use serde::Deserialize; use serde::Deserialize;
use std::fmt::{Debug, Formatter}; use std::fmt::{Debug, Formatter};
use std::num::{ParseIntError, TryFromIntError}; use std::num::{ParseIntError, TryFromIntError};

@ -3,10 +3,6 @@ use std::error::Error as StdError;
use std::fmt::{Debug, Display, Formatter}; use std::fmt::{Debug, Display, Formatter};
use std::ops::Deref; use std::ops::Deref;
use ethers_contract::ContractError;
use ethers_core::types::SignatureError;
use ethers_providers::{Middleware, ProviderError};
use crate::HyperlaneProviderError; use crate::HyperlaneProviderError;
use crate::H256; use crate::H256;
@ -65,9 +61,6 @@ pub enum ChainCommunicationError {
/// An error with a contract call /// An error with a contract call
#[error(transparent)] #[error(transparent)]
ContractError(HyperlaneCustomErrorWrapper), ContractError(HyperlaneCustomErrorWrapper),
/// Provider Error
#[error(transparent)]
ProviderError(#[from] ProviderError),
/// A transaction was dropped from the mempool /// A transaction was dropped from the mempool
#[error("Transaction dropped from mempool {0:?}")] #[error("Transaction dropped from mempool {0:?}")]
TransactionDropped(H256), TransactionDropped(H256),
@ -78,6 +71,9 @@ pub enum ChainCommunicationError {
/// A transaction submission timed out /// A transaction submission timed out
#[error("Transaction submission timed out")] #[error("Transaction submission timed out")]
TransactionTimeout(), TransactionTimeout(),
/// No signer is available and was required for the operation
#[error("Signer unavailable")]
SignerUnavailable,
} }
impl ChainCommunicationError { impl ChainCommunicationError {
@ -90,14 +86,53 @@ impl ChainCommunicationError {
pub fn from_other_boxed<E: HyperlaneCustomError>(err: Box<E>) -> Self { pub fn from_other_boxed<E: HyperlaneCustomError>(err: Box<E>) -> Self {
Self::Other(HyperlaneCustomErrorWrapper(err)) Self::Other(HyperlaneCustomErrorWrapper(err))
} }
/// Creates a chain communication error of the other error variant from a static string
pub fn from_other_str(err: &'static str) -> Self {
#[derive(Debug)]
#[repr(transparent)]
struct StringError(&'static str);
impl Display for StringError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(self.0)
}
} }
impl StdError for StringError {}
impl<M> From<ContractError<M>> for ChainCommunicationError Self::from_contract_error(StringError(err))
}
/// Creates a chain communication error of the contract error variant from any other existing
/// error
pub fn from_contract_error<E>(err: E) -> Self
where
E: HyperlaneCustomError,
{
Self::ContractError(HyperlaneCustomErrorWrapper(Box::new(err)))
}
/// Creates a chain communication error of the contract error variant from any other existing
/// error
pub fn from_contract_error_boxed<E>(err: Box<E>) -> Self
where where
M: Middleware + 'static, E: HyperlaneCustomError,
{ {
fn from(e: ContractError<M>) -> Self { Self::ContractError(HyperlaneCustomErrorWrapper(err))
Self::ContractError(HyperlaneCustomErrorWrapper(Box::new(e))) }
/// Creates a chain communication error of the contract error variant from a static string
pub fn from_contract_error_str(err: &'static str) -> Self {
#[derive(Debug)]
#[repr(transparent)]
struct StringError(&'static str);
impl Display for StringError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(self.0)
}
}
impl StdError for StringError {}
Self::from_contract_error(StringError(err))
} }
} }
@ -107,12 +142,29 @@ impl From<HyperlaneProviderError> for ChainCommunicationError {
} }
} }
#[cfg(feature = "ethers")]
impl<T: ethers_providers::Middleware + 'static> From<ethers_contract::ContractError<T>>
for ChainCommunicationError
{
fn from(err: ethers_contract::ContractError<T>) -> Self {
Self::ContractError(HyperlaneCustomErrorWrapper(Box::new(err)))
}
}
#[cfg(feature = "ethers")]
impl From<ethers::providers::ProviderError> for ChainCommunicationError {
fn from(err: ethers::providers::ProviderError) -> Self {
Self::ContractError(HyperlaneCustomErrorWrapper(Box::new(err)))
}
}
/// Error types for the Hyperlane protocol /// Error types for the Hyperlane protocol
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
pub enum HyperlaneProtocolError { pub enum HyperlaneProtocolError {
/// Signature Error pasthrough /// Signature Error pasthrough
#[cfg(feature = "ethers")]
#[error(transparent)] #[error(transparent)]
SignatureError(#[from] SignatureError), SignatureError(#[from] ethers_core::types::SignatureError),
/// IO error from Read/Write usage /// IO error from Read/Write usage
#[error(transparent)] #[error(transparent)]
IoError(#[from] std::io::Error), IoError(#[from] std::io::Error),

@ -2,7 +2,7 @@
//! implementations. //! implementations.
#![warn(missing_docs)] #![warn(missing_docs)]
#![forbid(unsafe_code)] #![deny(unsafe_code)]
#![forbid(where_clauses_object_safety)] #![forbid(where_clauses_object_safety)]
extern crate core; extern crate core;

@ -2,9 +2,8 @@ use std::fs::File;
use std::io::Read; use std::io::Read;
use std::path::PathBuf; use std::path::PathBuf;
use primitive_types::H256;
use crate::accumulator::merkle::Proof; use crate::accumulator::merkle::Proof;
use crate::H256;
/// Struct representing a single merkle test case /// Struct representing a single merkle test case
#[derive(serde::Deserialize, serde::Serialize)] #[derive(serde::Deserialize, serde::Serialize)]

@ -3,15 +3,7 @@ use std::time::Duration;
use async_trait::async_trait; use async_trait::async_trait;
use auto_impl::auto_impl; use auto_impl::auto_impl;
use crate::{ChainResult, LogMeta}; use crate::{ChainResult, IndexRange, LogMeta};
/// The action that should be taken by the contract sync loop
pub enum CursorAction {
/// Direct the contract_sync task to query a block range
Query((u32, u32)),
/// Direct the contract_sync task to sleep for a duration
Sleep(Duration),
}
/// A cursor governs event indexing for a contract. /// A cursor governs event indexing for a contract.
#[async_trait] #[async_trait]
@ -27,3 +19,11 @@ pub trait ContractSyncCursor<T>: Send + Sync + 'static {
/// accordingly. /// accordingly.
async fn update(&mut self, logs: Vec<(T, LogMeta)>) -> eyre::Result<()>; async fn update(&mut self, logs: Vec<(T, LogMeta)>) -> eyre::Result<()>;
} }
/// The action that should be taken by the contract sync loop
pub enum CursorAction {
/// Direct the contract_sync task to query a block range (inclusive)
Query(IndexRange),
/// Direct the contract_sync task to sleep for a duration
Sleep(Duration),
}

@ -42,19 +42,37 @@ pub trait HyperlaneAbi {
impl fmt::Debug for dyn HyperlaneChain { impl fmt::Debug for dyn HyperlaneChain {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let domain = self.domain(); let domain = self.domain();
write!(f, "HyperlaneChain({} ({}))", domain, domain.id()) #[cfg(feature = "strum")]
{
write!(f, "HyperlaneChain({domain} ({}))", domain.id())
}
#[cfg(not(feature = "strum"))]
{
write!(f, "HyperlaneChain({})", domain.id())
}
} }
} }
impl fmt::Debug for dyn HyperlaneContract { impl fmt::Debug for dyn HyperlaneContract {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let domain = self.domain(); let domain = self.domain();
#[cfg(feature = "strum")]
{
write!( write!(
f, f,
"HyperlaneContract({:?} @ {} ({}))", "HyperlaneContract({:?} @ {domain} ({}))",
self.address(), self.address(),
domain,
domain.id(), domain.id(),
) )
} }
#[cfg(not(feature = "strum"))]
{
write!(
f,
"HyperlaneContract({:?} @ {})",
self.address(),
domain.id(),
)
}
}
} }

@ -1,5 +1,3 @@
use ethers_core::types::{Signature, SignatureError};
use std::convert::TryFrom;
use std::io::{Error, ErrorKind}; use std::io::{Error, ErrorKind};
use crate::{HyperlaneProtocolError, H256, U256}; use crate::{HyperlaneProtocolError, H256, U256};
@ -28,7 +26,8 @@ pub trait Decode {
Self: Sized; Self: Sized;
} }
impl Encode for Signature { #[cfg(feature = "ethers")]
impl Encode for ethers_core::types::Signature {
fn write_to<W>(&self, writer: &mut W) -> std::io::Result<usize> fn write_to<W>(&self, writer: &mut W) -> std::io::Result<usize>
where where
W: std::io::Write, W: std::io::Write,
@ -38,7 +37,8 @@ impl Encode for Signature {
} }
} }
impl Decode for Signature { #[cfg(feature = "ethers")]
impl Decode for ethers_core::types::Signature {
fn read_from<R>(reader: &mut R) -> Result<Self, HyperlaneProtocolError> fn read_from<R>(reader: &mut R) -> Result<Self, HyperlaneProtocolError>
where where
R: std::io::Read, R: std::io::Read,
@ -46,7 +46,7 @@ impl Decode for Signature {
let mut buf = [0u8; 65]; let mut buf = [0u8; 65];
let len = reader.read(&mut buf)?; let len = reader.read(&mut buf)?;
if len != 65 { if len != 65 {
Err(SignatureError::InvalidLength(len).into()) Err(ethers_core::types::SignatureError::InvalidLength(len).into())
} else { } else {
Ok(Self::try_from(buf.as_ref())?) Ok(Self::try_from(buf.as_ref())?)
} }

@ -5,18 +5,42 @@
//! a chain-specific library and provider (e.g. ethers::provider). //! a chain-specific library and provider (e.g. ethers::provider).
use std::fmt::Debug; use std::fmt::Debug;
use std::ops::RangeInclusive;
use async_trait::async_trait; use async_trait::async_trait;
use auto_impl::auto_impl; use auto_impl::auto_impl;
use serde::Deserialize;
use crate::{ChainResult, HyperlaneMessage, LogMeta}; use crate::{ChainResult, HyperlaneMessage, LogMeta};
/// Indexing mode.
#[derive(Copy, Debug, Default, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub enum IndexMode {
/// Block based indexing.
#[default]
Block,
/// Sequence based indexing.
Sequence,
}
/// An indexing range.
#[derive(Debug, Clone)]
pub enum IndexRange {
/// For block-based indexers
BlockRange(RangeInclusive<u32>),
/// For indexers that look for specific sequences, e.g. message nonces.
SequenceRange(RangeInclusive<u32>),
}
pub use IndexRange::*;
/// Interface for an indexer. /// Interface for an indexer.
#[async_trait] #[async_trait]
#[auto_impl(&, Box, Arc,)] #[auto_impl(&, Box, Arc,)]
pub trait Indexer<T: Sized>: Send + Sync + Debug { pub trait Indexer<T: Sized>: Send + Sync + Debug {
/// Fetch list of logs between blocks `from` and `to`, inclusive. /// Fetch list of logs between blocks `from` and `to`, inclusive.
async fn fetch_logs(&self, from: u32, to: u32) -> ChainResult<Vec<(T, LogMeta)>>; async fn fetch_logs(&self, range: IndexRange) -> ChainResult<Vec<(T, LogMeta)>>;
/// Get the chain's latest block number that has reached finality /// Get the chain's latest block number that has reached finality
async fn get_finalized_block_number(&self) -> ChainResult<u32>; async fn get_finalized_block_number(&self) -> ChainResult<u32>;

@ -2,14 +2,16 @@ use std::fmt::Debug;
use async_trait::async_trait; use async_trait::async_trait;
use auto_impl::auto_impl; use auto_impl::auto_impl;
use borsh::{BorshDeserialize, BorshSerialize};
use num_derive::FromPrimitive; use num_derive::FromPrimitive;
use primitive_types::U256;
use strum::Display;
use crate::{ChainResult, HyperlaneContract, HyperlaneMessage}; use crate::{ChainResult, HyperlaneContract, HyperlaneMessage, U256};
/// Enumeration of all known module types /// Enumeration of all known module types
#[derive(FromPrimitive, Clone, Debug, Default, Display, Copy, PartialEq, Eq)] #[derive(
FromPrimitive, Clone, Debug, Default, Copy, PartialEq, Eq, BorshDeserialize, BorshSerialize,
)]
#[cfg_attr(feature = "strum", derive(strum::Display))]
pub enum ModuleType { pub enum ModuleType {
/// INVALID ISM /// INVALID ISM
#[default] #[default]

@ -44,13 +44,17 @@ pub struct TxOutcome {
// TODO: more? What can be abstracted across all chains? // TODO: more? What can be abstracted across all chains?
} }
#[cfg(feature = "ethers")]
impl From<ethers_core::types::TransactionReceipt> for TxOutcome { impl From<ethers_core::types::TransactionReceipt> for TxOutcome {
fn from(t: ethers_core::types::TransactionReceipt) -> Self { fn from(t: ethers_core::types::TransactionReceipt) -> Self {
Self { Self {
txid: t.transaction_hash, txid: t.transaction_hash.into(),
executed: t.status.unwrap().low_u32() == 1, executed: t.status.unwrap().low_u32() == 1,
gas_used: t.gas_used.unwrap_or(crate::U256::zero()), gas_used: t.gas_used.map(Into::into).unwrap_or(crate::U256::zero()),
gas_price: t.effective_gas_price.unwrap_or(crate::U256::zero()), gas_price: t
.effective_gas_price
.map(Into::into)
.unwrap_or(crate::U256::zero()),
} }
} }
} }

@ -2,18 +2,13 @@ use std::fmt::{Debug, Formatter};
use async_trait::async_trait; use async_trait::async_trait;
use auto_impl::auto_impl; use auto_impl::auto_impl;
use ethers_core::{
types::{Address, Signature},
utils::hash_message,
};
use serde::{ use serde::{
ser::{SerializeStruct, Serializer}, ser::{SerializeStruct, Serializer},
Deserialize, Serialize, Deserialize, Serialize,
}; };
use crate::utils::fmt_bytes; use crate::utils::fmt_bytes;
use crate::{HyperlaneProtocolError, H160, H256}; use crate::{Signature, H160, H256};
/// An error incurred by a signer /// An error incurred by a signer
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
@ -43,7 +38,11 @@ pub trait HyperlaneSignerExt {
) -> Result<SignedType<T>, HyperlaneSignerError>; ) -> Result<SignedType<T>, HyperlaneSignerError>;
/// Check whether a message was signed by a specific address. /// Check whether a message was signed by a specific address.
fn verify<T: Signable>(&self, signed: &SignedType<T>) -> Result<(), HyperlaneProtocolError>; #[cfg(feature = "ethers")]
fn verify<T: Signable>(
&self,
signed: &SignedType<T>,
) -> Result<(), crate::HyperlaneProtocolError>;
} }
#[async_trait] #[async_trait]
@ -57,7 +56,11 @@ impl<S: HyperlaneSigner> HyperlaneSignerExt for S {
Ok(SignedType { value, signature }) Ok(SignedType { value, signature })
} }
fn verify<T: Signable>(&self, signed: &SignedType<T>) -> Result<(), HyperlaneProtocolError> { #[cfg(feature = "ethers")]
fn verify<T: Signable>(
&self,
signed: &SignedType<T>,
) -> Result<(), crate::HyperlaneProtocolError> {
signed.verify(self.eth_address()) signed.verify(self.eth_address())
} }
} }
@ -72,7 +75,7 @@ pub trait Signable: Sized {
/// EIP-191 compliant hash of the signing hash. /// EIP-191 compliant hash of the signing hash.
fn eth_signed_message_hash(&self) -> H256 { fn eth_signed_message_hash(&self) -> H256 {
hash_message(self.signing_hash()) hashes::hash_message(self.signing_hash())
} }
} }
@ -103,17 +106,20 @@ impl<T: Signable + Serialize> Serialize for SignedType<T> {
impl<T: Signable> SignedType<T> { impl<T: Signable> SignedType<T> {
/// Recover the Ethereum address of the signer /// Recover the Ethereum address of the signer
pub fn recover(&self) -> Result<Address, HyperlaneProtocolError> { #[cfg(feature = "ethers")]
Ok(self pub fn recover(&self) -> Result<H160, crate::HyperlaneProtocolError> {
.signature let hash = ethers_core::types::H256::from(self.value.eth_signed_message_hash());
.recover(self.value.eth_signed_message_hash())?) let sig = ethers_core::types::Signature::from(self.signature);
Ok(sig.recover(hash)?.into())
} }
/// Check whether a message was signed by a specific address /// Check whether a message was signed by a specific address
pub fn verify(&self, signer: Address) -> Result<(), HyperlaneProtocolError> { #[cfg(feature = "ethers")]
Ok(self pub fn verify(&self, signer: H160) -> Result<(), crate::HyperlaneProtocolError> {
.signature let hash = ethers_core::types::H256::from(self.value.eth_signed_message_hash());
.verify(self.value.eth_signed_message_hash(), signer)?) let sig = ethers_core::types::Signature::from(self.signature);
let signer = ethers_core::types::H160::from(signer);
Ok(sig.verify(hash, signer)?)
} }
} }
@ -126,3 +132,54 @@ impl<T: Signable + Debug> Debug for SignedType<T> {
) )
} }
} }
// Copied from https://github.com/hyperlane-xyz/ethers-rs/blob/hyperlane/ethers-core/src/utils/hash.rs
// so that we can get EIP-191 hashing without the `ethers` feature
mod hashes {
const PREFIX: &str = "\x19Ethereum Signed Message:\n";
use crate::H256;
use tiny_keccak::{Hasher, Keccak};
/// Hash a message according to EIP-191.
///
/// The data is a UTF-8 encoded string and will enveloped as follows:
/// `"\x19Ethereum Signed Message:\n" + message.length + message` and hashed
/// using keccak256.
pub fn hash_message<S>(message: S) -> H256
where
S: AsRef<[u8]>,
{
let message = message.as_ref();
let mut eth_message = format!("{PREFIX}{}", message.len()).into_bytes();
eth_message.extend_from_slice(message);
keccak256(&eth_message).into()
}
/// Compute the Keccak-256 hash of input bytes.
// TODO: Add Solidity Keccak256 packing support
pub fn keccak256<S>(bytes: S) -> [u8; 32]
where
S: AsRef<[u8]>,
{
let mut output = [0u8; 32];
let mut hasher = Keccak::v256();
hasher.update(bytes.as_ref());
hasher.finalize(&mut output);
output
}
#[test]
#[cfg(feature = "ethers")]
fn ensure_signed_hashes_match() {
assert_eq!(
ethers_core::utils::hash_message(b"gm crypto!"),
hash_message(b"gm crypto!").into()
);
assert_eq!(
ethers_core::utils::hash_message(b"hyperlane"),
hash_message(b"hyperlane").into()
);
}
}

@ -8,7 +8,7 @@ use crate::{Announcement, ChainResult, HyperlaneContract, SignedType, TxOutcome,
/// Interface for the ValidatorAnnounce chain contract. Allows abstraction over /// Interface for the ValidatorAnnounce chain contract. Allows abstraction over
/// different chains /// different chains
#[async_trait] #[async_trait]
#[auto_impl(Box, Arc)] #[auto_impl(&, Box, Arc)]
pub trait ValidatorAnnounce: HyperlaneContract + Send + Sync + Debug { pub trait ValidatorAnnounce: HyperlaneContract + Send + Sync + Debug {
/// Returns the announced storage locations for the provided validators. /// Returns the announced storage locations for the provided validators.
async fn get_announced_storage_locations( async fn get_announced_storage_locations(

@ -1,10 +1,10 @@
use std::fmt::Debug;
use derive_more::Deref; use derive_more::Deref;
use ethers_core::types::{Address, Signature};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sha3::{digest::Update, Digest, Keccak256}; use sha3::{digest::Update, Digest, Keccak256};
use std::fmt::Debug;
use crate::{utils::domain_hash, Signable, SignedType, H256}; use crate::{utils::domain_hash, Signable, Signature, SignedType, H160, H256};
/// An Hyperlane checkpoint /// An Hyperlane checkpoint
#[derive(Copy, Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] #[derive(Copy, Clone, Eq, PartialEq, Serialize, Deserialize, Debug)]
@ -73,7 +73,7 @@ pub type SignedCheckpointWithMessageId = SignedType<CheckpointWithMessageId>;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct SignedCheckpointWithSigner<T: Signable> { pub struct SignedCheckpointWithSigner<T: Signable> {
/// The recovered signer /// The recovered signer
pub signer: Address, pub signer: H160,
/// The signed checkpoint /// The signed checkpoint
pub signed_checkpoint: SignedType<T>, pub signed_checkpoint: SignedType<T>,
} }
@ -84,7 +84,7 @@ pub struct SignatureWithSigner {
/// The signature /// The signature
pub signature: Signature, pub signature: Signature,
/// The signer of the signature /// The signer of the signature
pub signer: Address, pub signer: H160,
} }
/// A checkpoint and multiple signatures /// A checkpoint and multiple signatures

@ -1,8 +1,10 @@
use std::cmp::Ordering; use std::cmp::Ordering;
use ethers_contract::LogMeta as EthersLogMeta;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[cfg(feature = "ethers")]
use ethers_contract::LogMeta as EthersLogMeta;
use crate::{H256, U256}; use crate::{H256, U256};
/// A close clone of the Ethereum `LogMeta`, this is designed to be a more /// A close clone of the Ethereum `LogMeta`, this is designed to be a more
@ -29,28 +31,23 @@ pub struct LogMeta {
pub log_index: U256, pub log_index: U256,
} }
#[cfg(feature = "ethers")]
impl From<EthersLogMeta> for LogMeta { impl From<EthersLogMeta> for LogMeta {
fn from(v: EthersLogMeta) -> Self { fn from(v: EthersLogMeta) -> Self {
Self { Self::from(&v)
address: v.address.into(),
block_number: v.block_number.as_u64(),
block_hash: v.block_hash,
transaction_hash: v.transaction_hash,
transaction_index: v.transaction_index.as_u64(),
log_index: v.log_index,
}
} }
} }
#[cfg(feature = "ethers")]
impl From<&EthersLogMeta> for LogMeta { impl From<&EthersLogMeta> for LogMeta {
fn from(v: &EthersLogMeta) -> Self { fn from(v: &EthersLogMeta) -> Self {
Self { Self {
address: v.address.into(), address: crate::H160::from(v.address).into(),
block_number: v.block_number.as_u64(), block_number: v.block_number.as_u64(),
block_hash: v.block_hash, block_hash: v.block_hash.into(),
transaction_hash: v.transaction_hash, transaction_hash: v.transaction_hash.into(),
transaction_index: v.transaction_index.as_u64(), transaction_index: v.transaction_index.as_u64(),
log_index: v.log_index, log_index: v.log_index.into(),
} }
} }
} }

@ -1,7 +1,11 @@
pub use primitive_types::{H128, H160, H256, H512, U128, U256, U512}; use serde::{Deserialize, Serialize};
use std::fmt;
use std::io::{Read, Write}; use std::io::{Read, Write};
use std::ops::Add; use std::ops::Add;
pub use self::primitive_types::*;
#[cfg(feature = "ethers")]
pub use ::primitive_types as ethers_core_types;
pub use announcement::*; pub use announcement::*;
pub use chain_data::*; pub use chain_data::*;
pub use checkpoint::*; pub use checkpoint::*;
@ -15,10 +19,89 @@ mod chain_data;
mod checkpoint; mod checkpoint;
mod log_metadata; mod log_metadata;
mod message; mod message;
mod serialize;
/// Unified 32-byte identifier with convenience tooling for handling /// Unified 32-byte identifier with convenience tooling for handling
/// 20-byte ids (e.g ethereum addresses) /// 20-byte ids (e.g ethereum addresses)
pub mod identifiers; pub mod identifiers;
mod primitive_types;
// Copied from https://github.com/hyperlane-xyz/ethers-rs/blob/hyperlane/ethers-core/src/types/signature.rs#L54
// To avoid depending on the `ethers` type
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Copy, Hash)]
/// An ECDSA signature
pub struct Signature {
/// R value
pub r: U256,
/// S Value
pub s: U256,
/// V value
pub v: u64,
}
impl Signature {
/// Copies and serializes `self` into a new `Vec` with the recovery id included
pub fn to_vec(&self) -> Vec<u8> {
self.into()
}
}
impl fmt::Display for Signature {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let sig = <[u8; 65]>::from(self);
write!(f, "{}", hex::encode(&sig[..]))
}
}
impl From<&Signature> for [u8; 65] {
fn from(src: &Signature) -> [u8; 65] {
let mut sig = [0u8; 65];
src.r.to_big_endian(&mut sig[0..32]);
src.s.to_big_endian(&mut sig[32..64]);
sig[64] = src.v as u8;
sig
}
}
impl From<Signature> for [u8; 65] {
fn from(src: Signature) -> [u8; 65] {
<[u8; 65]>::from(&src)
}
}
impl From<&Signature> for Vec<u8> {
fn from(src: &Signature) -> Vec<u8> {
<[u8; 65]>::from(src).to_vec()
}
}
impl From<Signature> for Vec<u8> {
fn from(src: Signature) -> Vec<u8> {
<[u8; 65]>::from(&src).to_vec()
}
}
#[cfg(feature = "ethers")]
impl From<ethers_core::types::Signature> for Signature {
fn from(value: ethers_core::types::Signature) -> Self {
Self {
r: value.r.into(),
s: value.s.into(),
v: value.v,
}
}
}
#[cfg(feature = "ethers")]
impl From<Signature> for ethers_core::types::Signature {
fn from(value: Signature) -> Self {
Self {
r: value.r.into(),
s: value.s.into(),
v: value.v,
}
}
}
/// A payment of a message's gas costs. /// A payment of a message's gas costs.
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]

@ -0,0 +1,310 @@
// Based on https://github.com/paritytech/parity-common/blob/a5ef7308d6986e62431e35d3156fed0a7a585d39/primitive-types/src/lib.rs
#![allow(clippy::assign_op_pattern)]
#![allow(clippy::reversed_empty_ranges)]
use std::fmt::Formatter;
use crate::types::serialize;
use borsh::{BorshDeserialize, BorshSerialize};
use fixed_hash::{construct_fixed_hash, impl_fixed_hash_conversions};
use serde::de::Visitor;
use uint::construct_uint;
/// Error type for conversion.
#[derive(Debug, PartialEq, Eq)]
pub enum Error {
/// Overflow encountered.
Overflow,
}
construct_uint! {
/// 128-bit unsigned integer.
#[derive(BorshSerialize, BorshDeserialize)]
pub struct U128(2);
}
construct_uint! {
/// 256-bit unsigned integer.
#[derive(BorshSerialize, BorshDeserialize)]
pub struct U256(4);
}
construct_uint! {
/// 512-bit unsigned integer.
#[derive(BorshSerialize, BorshDeserialize)]
pub struct U512(8);
}
construct_fixed_hash! {
/// 128-bit hash type.
#[derive(BorshSerialize, BorshDeserialize)]
pub struct H128(16);
}
construct_fixed_hash! {
/// 160-bit hash type.
#[derive(BorshSerialize, BorshDeserialize)]
pub struct H160(20);
}
construct_fixed_hash! {
/// 256-bit hash type.
#[derive(BorshSerialize, BorshDeserialize)]
pub struct H256(32);
}
construct_fixed_hash! {
/// 512-bit hash type.
#[derive(BorshSerialize, BorshDeserialize)]
pub struct H512(64);
}
struct H512Visitor;
impl<'de> Visitor<'de> for H512Visitor {
type Value = H512;
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
formatter.write_str("a 512-bit hash")
}
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
v.try_into()
.map_err(|_| E::invalid_length(v.len(), &self))
.map(H512)
}
}
#[cfg(feature = "ethers")]
type EthersH160 = ethers_core::types::H160;
#[cfg(feature = "ethers")]
type EthersH256 = ethers_core::types::H256;
#[cfg(feature = "ethers")]
impl_fixed_hash_conversions!(H256, EthersH160);
#[cfg(feature = "ethers")]
impl_fixed_hash_conversions!(EthersH256, H160);
impl_fixed_hash_conversions!(H256, H160);
impl_fixed_hash_conversions!(H512, H256);
impl_fixed_hash_conversions!(H512, H160);
macro_rules! impl_fixed_uint_conversions {
($larger:ty, $smaller:ty) => {
impl From<$smaller> for $larger {
impl_fixed_uint_conversions!(@from_smaller $larger, $smaller);
}
impl<'a> From<&'a $smaller> for $larger {
impl_fixed_uint_conversions!(@from_smaller $larger, &'a $smaller);
}
impl TryFrom<$larger> for $smaller {
type Error = Error;
impl_fixed_uint_conversions!(@try_from_larger $larger, $smaller);
}
impl<'a> TryFrom<&'a $larger> for $smaller {
type Error = Error;
impl_fixed_uint_conversions!(@try_from_larger &'a $larger, $smaller);
}
};
(@from_smaller $larger:ty, $smaller:ty) => {
fn from(val: $smaller) -> $larger {
let mut ret = <$larger>::zero();
for i in 0..val.0.len() {
ret.0[i] = val.0[i];
}
ret
}
};
(@try_from_larger $larger:ty, $smaller:ty) => {
fn try_from(val: $larger) -> Result<$smaller, Error> {
let mut ret = <$smaller>::zero();
for i in 0..ret.0.len() {
ret.0[i] = val.0[i];
}
let mut ov = 0;
for i in ret.0.len()..val.0.len() {
ov |= val.0[i];
}
if ov == 0 {
Ok(ret)
} else {
Err(Error::Overflow)
}
}
};
}
#[cfg(feature = "ethers")]
impl_fixed_uint_conversions!(U256, ethers_core::types::U128);
impl_fixed_uint_conversions!(U256, U128);
impl_fixed_uint_conversions!(U512, U128);
impl_fixed_uint_conversions!(U512, U256);
macro_rules! impl_f64_conversions {
($ty:ty) => {
impl $ty {
/// Lossy saturating conversion from a `f64` to a `$ty`. Like for floating point to
/// primitive integer type conversions, this truncates fractional parts.
///
/// The conversion follows the same rules as converting `f64` to other
/// primitive integer types. Namely, the conversion of `value: f64` behaves as
/// follows:
/// - `NaN` => `0`
/// - `(-∞, 0]` => `0`
/// - `(0, $ty::MAX]` => `value as $ty`
/// - `($ty::MAX, +∞)` => `$ty::MAX`
pub fn from_f64_lossy(val: f64) -> $ty {
const TY_BITS: u64 = <$ty>::zero().0.len() as u64 * <$ty>::WORD_BITS as u64;
if val >= 1.0 {
let bits = val.to_bits();
// NOTE: Don't consider the sign or check that the subtraction will
// underflow since we already checked that the value is greater
// than 1.0.
let exponent = ((bits >> 52) & 0x7ff) - 1023;
let mantissa = (bits & 0x0f_ffff_ffff_ffff) | 0x10_0000_0000_0000;
if exponent <= 52 {
<$ty>::from(mantissa >> (52 - exponent))
} else if exponent < TY_BITS {
<$ty>::from(mantissa) << <$ty>::from(exponent - 52)
} else {
<$ty>::MAX
}
} else {
<$ty>::zero()
}
}
/// Lossy conversion of `$ty` to `f64`.
pub fn to_f64_lossy(self) -> f64 {
let mut acc = 0.0;
for i in (0..self.0.len()).rev() {
acc += self.0[i] as f64 * 2.0f64.powi((i * <$ty>::WORD_BITS) as i32);
}
acc
}
}
};
}
impl_f64_conversions!(U128);
impl_f64_conversions!(U256);
impl_f64_conversions!(U512);
#[cfg(feature = "ethers")]
macro_rules! impl_inner_conversion {
($a:ty, $b:ty) => {
impl From<$a> for $b {
fn from(val: $a) -> Self {
Self(val.0)
}
}
impl<'a> From<&'a $a> for $b {
fn from(val: &'a $a) -> Self {
Self(val.0)
}
}
impl From<$b> for $a {
fn from(val: $b) -> Self {
Self(val.0)
}
}
impl<'a> From<&'a $b> for $a {
fn from(val: &'a $b) -> Self {
Self(val.0)
}
}
};
}
#[cfg(feature = "ethers")]
impl_inner_conversion!(H128, ethers_core::types::H128);
#[cfg(feature = "ethers")]
impl_inner_conversion!(H160, ethers_core::types::H160);
#[cfg(feature = "ethers")]
impl_inner_conversion!(H256, ethers_core::types::H256);
#[cfg(feature = "ethers")]
impl_inner_conversion!(H512, ethers_core::types::H512);
#[cfg(feature = "ethers")]
impl_inner_conversion!(U128, ethers_core::types::U128);
#[cfg(feature = "ethers")]
impl_inner_conversion!(U256, ethers_core::types::U256);
#[cfg(feature = "ethers")]
impl_inner_conversion!(U512, ethers_core::types::U512);
/// Add Serde serialization support to an integer created by `construct_uint!`.
macro_rules! impl_uint_serde {
($name: ident, $len: expr) => {
impl serde::Serialize for $name {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut slice = [0u8; 2 + 2 * $len * 8];
let mut bytes = [0u8; $len * 8];
self.to_big_endian(&mut bytes);
serialize::serialize_uint(&mut slice, &bytes, serializer)
}
}
impl<'de> serde::Deserialize<'de> for $name {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let mut bytes = [0u8; $len * 8];
let wrote = serialize::deserialize_check_len(
deserializer,
serialize::ExpectedLen::Between(0, &mut bytes),
)?;
Ok(bytes[0..wrote].into())
}
}
};
}
/// Add Serde serialization support to a fixed-sized hash type created by `construct_fixed_hash!`.
macro_rules! impl_fixed_hash_serde {
($name: ident, $len: expr) => {
impl serde::Serialize for $name {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut slice = [0u8; 2 + 2 * $len];
serialize::serialize_raw(&mut slice, &self.0, serializer)
}
}
impl<'de> serde::Deserialize<'de> for $name {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let mut bytes = [0u8; $len];
serialize::deserialize_check_len(
deserializer,
serialize::ExpectedLen::Exact(&mut bytes),
)?;
Ok($name(bytes))
}
}
};
}
impl_uint_serde!(U128, 2);
impl_uint_serde!(U256, 4);
impl_uint_serde!(U512, 8);
impl_fixed_hash_serde!(H128, 16);
impl_fixed_hash_serde!(H160, 20);
impl_fixed_hash_serde!(H256, 32);
impl_fixed_hash_serde!(H512, 64);

@ -0,0 +1,349 @@
#![allow(unused)]
// Based on https://github.com/paritytech/parity-common/blob/7194def73feb7d97644303f1a6ddbab29bbb799f/primitive-types/impls/serde/src/serialize.rs
// Copyright 2020 Parity Technologies
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::{fmt, result::Result};
use serde::{de, Deserializer, Serializer};
static CHARS: &[u8] = b"0123456789abcdef";
/// Serialize given bytes to a 0x-prefixed hex string.
///
/// If `skip_leading_zero` initial 0s will not be printed out,
/// unless the byte string is empty, in which case `0x0` will be returned.
/// The results are consistent with `serialize_uint` output if the flag is
/// on and `serialize_raw` if the flag is off.
pub fn to_hex(bytes: &[u8], skip_leading_zero: bool) -> String {
let bytes = if skip_leading_zero {
let non_zero = bytes.iter().take_while(|b| **b == 0).count();
let bytes = &bytes[non_zero..];
if bytes.is_empty() {
return "0x0".into();
} else {
bytes
}
} else if bytes.is_empty() {
return "0x".into();
} else {
bytes
};
let mut slice = vec![0u8; (bytes.len() + 1) * 2];
to_hex_raw(&mut slice, bytes, skip_leading_zero).into()
}
fn to_hex_raw<'a>(v: &'a mut [u8], bytes: &[u8], skip_leading_zero: bool) -> &'a str {
assert!(v.len() > 1 + bytes.len() * 2);
v[0] = b'0';
v[1] = b'x';
let mut idx = 2;
let first_nibble = bytes[0] >> 4;
if first_nibble != 0 || !skip_leading_zero {
v[idx] = CHARS[first_nibble as usize];
idx += 1;
}
v[idx] = CHARS[(bytes[0] & 0xf) as usize];
idx += 1;
for &byte in bytes.iter().skip(1) {
v[idx] = CHARS[(byte >> 4) as usize];
v[idx + 1] = CHARS[(byte & 0xf) as usize];
idx += 2;
}
// SAFETY: all characters come either from CHARS or "0x", therefore valid UTF8
#[allow(unsafe_code)]
unsafe {
core::str::from_utf8_unchecked(&v[0..idx])
}
}
/// Decoding bytes from hex string error.
#[derive(Debug, PartialEq, Eq)]
pub enum FromHexError {
/// The `0x` prefix is missing.
#[deprecated(since = "0.3.2", note = "We support non 0x-prefixed hex strings")]
MissingPrefix,
/// Invalid (non-hex) character encountered.
InvalidHex {
/// The unexpected character.
character: char,
/// Index of that occurrence.
index: usize,
},
}
#[cfg(feature = "std")]
impl std::error::Error for FromHexError {}
impl fmt::Display for FromHexError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match *self {
#[allow(deprecated)]
Self::MissingPrefix => write!(fmt, "0x prefix is missing"),
Self::InvalidHex { character, index } => {
write!(fmt, "invalid hex character: {}, at {}", character, index)
}
}
}
}
/// Decode given (both 0x-prefixed or not) hex string into a vector of bytes.
///
/// Returns an error if non-hex characters are present.
pub fn from_hex(v: &str) -> Result<Vec<u8>, FromHexError> {
let (v, stripped) = v.strip_prefix("0x").map_or((v, false), |v| (v, true));
let mut bytes = vec![0u8; (v.len() + 1) / 2];
from_hex_raw(v, &mut bytes, stripped)?;
Ok(bytes)
}
/// Decode given 0x-prefix-stripped hex string into provided slice.
/// Used internally by `from_hex` and `deserialize_check_len`.
///
/// The method will panic if `bytes` have incorrect length (make sure to allocate enough beforehand).
fn from_hex_raw(v: &str, bytes: &mut [u8], stripped: bool) -> Result<usize, FromHexError> {
let bytes_len = v.len();
let mut modulus = bytes_len % 2;
let mut buf = 0;
let mut pos = 0;
for (index, byte) in v.bytes().enumerate() {
buf <<= 4;
match byte {
b'A'..=b'F' => buf |= byte - b'A' + 10,
b'a'..=b'f' => buf |= byte - b'a' + 10,
b'0'..=b'9' => buf |= byte - b'0',
b' ' | b'\r' | b'\n' | b'\t' => {
buf >>= 4;
continue;
}
b => {
let character = char::from(b);
return Err(FromHexError::InvalidHex {
character,
index: index + if stripped { 2 } else { 0 },
});
}
}
modulus += 1;
if modulus == 2 {
modulus = 0;
bytes[pos] = buf;
pos += 1;
}
}
Ok(pos)
}
/// Serializes a slice of bytes.
pub fn serialize_raw<S>(slice: &mut [u8], bytes: &[u8], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
if bytes.is_empty() {
serializer.serialize_str("0x")
} else {
serializer.serialize_str(to_hex_raw(slice, bytes, false))
}
}
/// Serializes a slice of bytes.
pub fn serialize<S>(bytes: &[u8], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut slice = vec![0u8; (bytes.len() + 1) * 2];
serialize_raw(&mut slice, bytes, serializer)
}
/// Serialize a slice of bytes as uint.
///
/// The representation will have all leading zeros trimmed.
pub fn serialize_uint<S>(slice: &mut [u8], bytes: &[u8], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let non_zero = bytes.iter().take_while(|b| **b == 0).count();
let bytes = &bytes[non_zero..];
if bytes.is_empty() {
serializer.serialize_str("0x0")
} else {
serializer.serialize_str(to_hex_raw(slice, bytes, true))
}
}
/// Expected length of bytes vector.
#[derive(Debug, PartialEq, Eq)]
pub enum ExpectedLen<'a> {
/// Exact length in bytes.
Exact(&'a mut [u8]),
/// A bytes length between (min; slice.len()].
Between(usize, &'a mut [u8]),
}
impl<'a> fmt::Display for ExpectedLen<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match *self {
ExpectedLen::Exact(ref v) => write!(fmt, "{} bytes", v.len()),
ExpectedLen::Between(min, ref v) => write!(fmt, "between ({}; {}] bytes", min, v.len()),
}
}
}
/// Deserialize into vector of bytes. This will allocate an O(n) intermediate
/// string.
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<u8>, D::Error>
where
D: Deserializer<'de>,
{
struct Visitor;
impl<'b> de::Visitor<'b> for Visitor {
type Value = Vec<u8>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(
formatter,
"a (both 0x-prefixed or not) hex string or byte array"
)
}
fn visit_str<E: de::Error>(self, v: &str) -> Result<Self::Value, E> {
from_hex(v).map_err(E::custom)
}
fn visit_string<E: de::Error>(self, v: String) -> Result<Self::Value, E> {
self.visit_str(&v)
}
fn visit_bytes<E: de::Error>(self, v: &[u8]) -> Result<Self::Value, E> {
Ok(v.to_vec())
}
fn visit_byte_buf<E: de::Error>(self, v: Vec<u8>) -> Result<Self::Value, E> {
Ok(v)
}
fn visit_seq<A: de::SeqAccess<'b>>(self, mut seq: A) -> Result<Self::Value, A::Error> {
let mut bytes = vec![];
while let Some(n) = seq.next_element::<u8>()? {
bytes.push(n);
}
Ok(bytes)
}
fn visit_newtype_struct<D: Deserializer<'b>>(
self,
deserializer: D,
) -> Result<Self::Value, D::Error> {
deserializer.deserialize_bytes(self)
}
}
deserializer.deserialize_str(Visitor)
}
/// Deserialize into vector of bytes with additional size check.
/// Returns number of bytes written.
pub fn deserialize_check_len<'a, 'de, D>(
deserializer: D,
len: ExpectedLen<'a>,
) -> Result<usize, D::Error>
where
D: Deserializer<'de>,
{
struct Visitor<'a> {
len: ExpectedLen<'a>,
}
impl<'a, 'b> de::Visitor<'b> for Visitor<'a> {
type Value = usize;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(
formatter,
"a (both 0x-prefixed or not) hex string or byte array containing {}",
self.len
)
}
fn visit_str<E: de::Error>(self, v: &str) -> Result<Self::Value, E> {
let (v, stripped) = v.strip_prefix("0x").map_or((v, false), |v| (v, true));
let len = v.len();
let is_len_valid = match self.len {
ExpectedLen::Exact(ref slice) => len == 2 * slice.len(),
ExpectedLen::Between(min, ref slice) => len <= 2 * slice.len() && len > 2 * min,
};
if !is_len_valid {
return Err(E::invalid_length(v.len(), &self));
}
let bytes = match self.len {
ExpectedLen::Exact(slice) => slice,
ExpectedLen::Between(_, slice) => slice,
};
from_hex_raw(v, bytes, stripped).map_err(E::custom)
}
fn visit_string<E: de::Error>(self, v: String) -> Result<Self::Value, E> {
self.visit_str(&v)
}
fn visit_bytes<E: de::Error>(self, v: &[u8]) -> Result<Self::Value, E> {
let len = v.len();
let is_len_valid = match self.len {
ExpectedLen::Exact(ref slice) => len == slice.len(),
ExpectedLen::Between(min, ref slice) => len <= slice.len() && len > min,
};
if !is_len_valid {
return Err(E::invalid_length(v.len(), &self));
}
let bytes = match self.len {
ExpectedLen::Exact(slice) => slice,
ExpectedLen::Between(_, slice) => slice,
};
bytes[..len].copy_from_slice(v);
Ok(len)
}
fn visit_byte_buf<E: de::Error>(self, v: Vec<u8>) -> Result<Self::Value, E> {
self.visit_bytes(&v)
}
fn visit_seq<A: de::SeqAccess<'b>>(self, mut seq: A) -> Result<Self::Value, A::Error> {
let mut v = vec![];
while let Some(n) = seq.next_element::<u8>()? {
v.push(n);
}
self.visit_byte_buf(v)
}
fn visit_newtype_struct<D: Deserializer<'b>>(
self,
deserializer: D,
) -> Result<Self::Value, D::Error> {
deserializer.deserialize_bytes(self)
}
}
deserializer.deserialize_str(Visitor { len })
}

@ -1,8 +1,9 @@
use std::time::Duration; use std::{str::FromStr, time::Duration};
use eyre::Result;
use sha3::{digest::Update, Digest, Keccak256}; use sha3::{digest::Update, Digest, Keccak256};
use crate::{KnownHyperlaneDomain, H256}; use crate::{KnownHyperlaneDomain, H160, H256};
/// Strips the '0x' prefix off of hex string so it can be deserialized. /// Strips the '0x' prefix off of hex string so it can be deserialized.
/// ///
@ -17,6 +18,25 @@ pub fn strip_0x_prefix(s: &str) -> &str {
} }
} }
/// Converts a hex or base58 string to an H256.
pub fn hex_or_base58_to_h256(string: &str) -> Result<H256> {
let h256 = if string.starts_with("0x") {
match string.len() {
66 => H256::from_str(string)?,
42 => H160::from_str(string)?.into(),
_ => eyre::bail!("Invalid hex string"),
}
} else {
let bytes = bs58::decode(string).into_vec()?;
if bytes.len() != 32 {
eyre::bail!("Invalid length of base58 string")
}
H256::from_slice(bytes.as_slice())
};
Ok(h256)
}
/// Computes hash of domain concatenated with "HYPERLANE" /// Computes hash of domain concatenated with "HYPERLANE"
pub fn domain_hash(address: H256, domain: impl Into<u32>) -> H256 { pub fn domain_hash(address: H256, domain: impl Into<u32>) -> H256 {
H256::from_slice( H256::from_slice(
@ -55,10 +75,17 @@ pub fn fmt_bytes(bytes: &[u8]) -> String {
/// Format a domain id as a name if it is known or just the number if not. /// Format a domain id as a name if it is known or just the number if not.
pub fn fmt_domain(domain: u32) -> String { pub fn fmt_domain(domain: u32) -> String {
#[cfg(feature = "strum")]
{
KnownHyperlaneDomain::try_from(domain) KnownHyperlaneDomain::try_from(domain)
.map(|d| d.to_string()) .map(|d| d.to_string())
.unwrap_or_else(|_| domain.to_string()) .unwrap_or_else(|_| domain.to_string())
} }
#[cfg(not(feature = "strum"))]
{
domain.to_string()
}
}
/// Formats the duration in the most appropriate time units. /// Formats the duration in the most appropriate time units.
pub fn fmt_duration(dur: Duration) -> String { pub fn fmt_duration(dur: Duration) -> String {

@ -1,3 +1,5 @@
cargo-features = ["workspace-inheritance"]
[package] [package]
name = "hyperlane-test" name = "hyperlane-test"
documentation.workspace = true documentation.workspace = true

@ -0,0 +1,2 @@
/target
environments/**/deploy-logs.txt

@ -0,0 +1,127 @@
# Hyperlane Sealevel (Solana VM) Integration
# Running local end to end test
A local end to end test has been written that will:
1. Run a local Solana network
2. Deploy two sets of core contracts (i.e. Mailbox / Multisig ISM / ValidatorAnnounce) onto this chain, one with domain 13375 and the other 13376.
3. Deploy a "native" warp route on domain 13375 and a "synthetic" warp route on domain 13376
4. Send native lamports from domain 13375 to 13376
5. A validator & relayer can then be spun up to deliver the message
### Build and run solana-test-validator
This only needs to be done once when initially setting things up.
1. Clone the `solar-eclipse` repo, which is the Eclipse fork of the Solana repo. This is needed to run the local Solana network. Check out the `steven/hyperlane-fix-deps` branch:
```
git clone git@github.com:Eclipse-Laboratories-Inc/solar-eclipse --branch steven/hyperlane-fix-deps
```
2. `cd` into the repo and build the `solana-test-validator` using the local `cargo` script (which ensures the correct version is used):
```
./cargo build -p solana-test-validator
```
### Check out `eclipse-program-library`
This is a fork (with some dependency fixes) of the eclipse fork of the `solana-program-library`. This contains "SPL" programs that are commonly used programs - stuff like the token program, etc.
Note these instructions previously required a different remote and branch - make sure to move to this remote & branch if you ahven't already!
1. Check out the branch `trevor/steven/eclipse-1.14.13/with-tlv-lib`:
```
git clone git@github.com:tkporter/eclipse-program-library.git --branch trevor/steven/eclipse-1.14.13/with-tlv-lib
```
### Build the required SPL programs and Hyperlane programs
This command will build all the required SPL programs (e.g. the token program, token 2022 program, SPL noop, etc...) found in the local repo of `eclipse-program-library`,
and will build all the required Hyperlane programs (e.g. the Mailbox program, Validator Announce, etc...).
You need to run this if any changes are made to programs that you want to be used in future runs of the end to end test.
Change the paths to your local `solar-eclipse` repo and `eclipse-program-library` as necessary, and run this from the `rust` directory of hyperlane-monorepo.
```
SOLAR_ECLIPSE_DIR=~/solar-eclipse ECLIPSE_PROGRAM_LIBRARY_DIR=~/eclipse-program-library ./utils/sealevel-test.bash build-only
```
### Run the local Solana network
This will run the `solana-test-validator` with a funded test account `E9VrvAdGRvCguN2XgXsgu9PNmMM3vZsU8LSUrM68j8ty` that will later be used for deploying contracts. It will also create some of the required SPL programs at the specified program IDs - these program IDs are consistent across Solana networks and are required by our Hyperlane programs. Change paths as necessary - the \*.so files should have been created by the prior command. The `--ledger` directory is arbitrary and is just the data dir for the Solana validator.
```
mkdir -p /tmp/eclipse/ledger-dir && target/debug/solana-test-validator --reset --ledger /tmp/eclipse/ledger-dir --account E9VrvAdGRvCguN2XgXsgu9PNmMM3vZsU8LSUrM68j8ty ~/abacus-monorepo/rust/config/sealevel/test-keys/test_deployer-account.json --bpf-program TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA ~/eclipse-program-library/target/deploy/spl_token.so --bpf-program TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb ~/eclipse-program-library/target/deploy/spl_token_2022.so --bpf-program ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL ~/eclipse-program-library/target/deploy/spl_associated_token_account.so --bpf-program noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV ~/eclipse-program-library/account-compression/target/deploy/spl_noop.so
```
By now you should have an output like this - keep it running and move to another terminal:
```
Ledger location: /tmp/eclipse/ledger-dir
Log: /tmp/eclipse/ledger-dir/validator.log
⠒ Initializing...
⠄ Initializing...
Identity: 4P5rtWdphhehU32myNQcTSMgrCRz7kdvZEnasX6fahJQ
Genesis Hash: G7CY7wEzbdjh8RwqTszxrpYTqiHKvqwpaw3JbmKJjJhU
Version: 1.14.13
Shred Version: 419
Gossip Address: 127.0.0.1:1024
TPU Address: 127.0.0.1:1027
JSON RPC URL: http://127.0.0.1:8899
⠒ 00:05:35 | Processed Slot: 668 | Confirmed Slot: 668 | Finalized Slot: 6
```
### Run the local end to end script
Run the script found at `rust/utils/sealevel-test.bash`. This will build all required programs, deploy contracts, and test sending a warp route message. You need to supply the paths to your local `solar-eclipse` and `eclipse-program-library` repos:
```
SOLAR_ECLIPSE_DIR=~/solar-eclipse ECLIPSE_PROGRAM_LIBRARY_DIR=~/eclipse-program-library ./utils/sealevel-test.bash
```
Note: this won't rebuild any of the programs. If you want to rebuild them, you can either cd into them individually and run `cargo build-sbf --arch sbf`, or you can run the above bash script with `force-build-programs` as the first argument.
You'll see a bunch of output here showing programs being built and deployed. Eventually you should see some logs saying `grep -q 'Message not delivered'`. At this point, the contracts have all been deployed and a native warp route transfer has been made. You can move on to running the validator and relayer.
### Running the validator
In a separate terminal, cd to `hyperlane-monorepo/rust`.
1. Source the env vars:
```
source ./config/sealevel/validator.env
```
2. Run the validator (this clears the DB / checkpoints if present):
```
mkdir /tmp/SEALEVEL_DB ; rm -rf /tmp/SEALEVEL_DB/validator /tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8/* ; CONFIG_FILES=./config/sealevel/sealevel.json cargo run --bin validator
```
You should see some INFO logs about checkpoint at index 0.
You can confirm things are working correctly by looking at `/tmp/CHECKPOINTS_DIR`, where the validator posts its signatures.
### Running the relayer
In a separate terminal, again in `hyperlane-monorepo/rust`:
1. Source the env vars:
```
source ./config/sealevel/relayer.env
```
2. Run the relayer (the rm is to make sure the relayer's DB is cleared):
```
rm -rf /tmp/SEALEVEL_DB/relayer ; RUST_BACKTRACE=full CONFIG_FILES=./config/sealevel/sealevel.json cargo run --bin relayer
```
When the original `sealevel-test.bash` exits with a 0 exit code and some logs about Hyperlane Token Storage, the message has been successfully delivered!

@ -0,0 +1,30 @@
cargo-features = ["workspace-inheritance"]
[package]
name = "hyperlane-sealevel-client"
version = "0.1.0"
edition = "2021"
[dependencies]
borsh.workspace = true
clap = { workspace = true, features = ["derive"] }
hex.workspace = true
pretty_env_logger.workspace = true
serde.workspace = true
serde_json.workspace = true
solana-clap-utils.workspace = true
solana-cli-config.workspace = true
solana-client.workspace = true
solana-program.workspace = true
solana-sdk.workspace = true
account-utils = { path = "../libraries/account-utils" }
hyperlane-core = { path = "../../hyperlane-core" }
hyperlane-sealevel-connection-client = { path = "../libraries/hyperlane-sealevel-connection-client" }
hyperlane-sealevel-mailbox = { path = "../programs/mailbox" }
hyperlane-sealevel-multisig-ism-message-id = { path = "../programs/ism/multisig-ism-message-id" }
hyperlane-sealevel-token = { path = "../programs/hyperlane-sealevel-token" }
hyperlane-sealevel-token-collateral = { path = "../programs/hyperlane-sealevel-token-collateral" }
hyperlane-sealevel-token-lib = { path = "../libraries/hyperlane-sealevel-token" }
hyperlane-sealevel-token-native = { path = "../programs/hyperlane-sealevel-token-native" }
hyperlane-sealevel-validator-announce = { path = "../programs/validator-announce" }

@ -0,0 +1,160 @@
use std::{
collections::HashMap,
fs::File,
io::Write,
path::{Path, PathBuf},
process::{Command, Stdio},
};
use solana_client::{client_error::ClientError, rpc_client::RpcClient};
use solana_sdk::{
commitment_config::CommitmentConfig,
pubkey::Pubkey,
signature::{Keypair, Signer},
};
/// Open a file in append mode, or create it if it does not exist.
fn append_to(p: impl AsRef<Path>) -> File {
File::options()
.create(true)
.append(true)
.open(p)
.expect("Failed to open file")
}
pub fn build_cmd(
cmd: &[&str],
log: impl AsRef<Path>,
log_all: bool,
wd: Option<&str>,
env: Option<&HashMap<&str, &str>>,
assert_success: bool,
) {
assert!(!cmd.is_empty(), "Must specify a command!");
let mut c = Command::new(cmd[0]);
c.args(&cmd[1..]);
if log_all {
c.stdout(Stdio::inherit());
} else {
c.stdout(append_to(log));
}
if let Some(wd) = wd {
c.current_dir(wd);
}
if let Some(env) = env {
c.envs(env);
}
let status = c.status().expect("Failed to run command");
if assert_success {
assert!(
status.success(),
"Command returned non-zero exit code: {}",
cmd.join(" ")
);
}
}
pub(crate) fn account_exists(client: &RpcClient, account: &Pubkey) -> Result<bool, ClientError> {
// Using `get_account_with_commitment` instead of `get_account` so we get Ok(None) when the account
// doesn't exist, rather than an error
let exists = client
.get_account_with_commitment(account, CommitmentConfig::processed())?
.value
.is_some();
Ok(exists)
}
pub(crate) fn deploy_program_idempotent(
payer_path: &str,
program_keypair: &Keypair,
program_keypair_path: &str,
program_path: &str,
url: &str,
log_file: impl AsRef<Path>,
) -> Result<(), ClientError> {
let client = RpcClient::new(url.to_string());
if !account_exists(&client, &program_keypair.pubkey())? {
deploy_program(
payer_path,
program_keypair_path,
program_path,
url,
log_file,
);
} else {
println!("Program {} already deployed", program_keypair.pubkey());
}
Ok(())
}
pub(crate) fn deploy_program(
payer_path: &str,
program_keypair_path: &str,
program_path: &str,
url: &str,
log_file: impl AsRef<Path>,
) {
build_cmd(
&[
"solana",
"--url",
url,
"-k",
payer_path,
"program",
"deploy",
program_path,
"--upgrade-authority",
payer_path,
"--program-id",
program_keypair_path,
],
log_file,
true,
None,
None,
true,
);
}
pub(crate) fn create_new_file(parent_dir: &Path, name: &str) -> PathBuf {
let path = parent_dir.join(name);
let _file = File::create(path.clone())
.unwrap_or_else(|_| panic!("Failed to create file {}", path.display()));
path
}
pub(crate) fn create_new_directory(parent_dir: &Path, name: &str) -> PathBuf {
let path = parent_dir.join(name);
std::fs::create_dir_all(path.clone())
.unwrap_or_else(|_| panic!("Failed to create directory {}", path.display()));
path
}
pub(crate) fn create_and_write_keypair(
key_dir: &Path,
key_name: &str,
use_existing_key: bool,
) -> (Keypair, PathBuf) {
let path = key_dir.join(key_name);
if use_existing_key {
if let Ok(file) = File::open(path.clone()) {
println!("Using existing key at path {}", path.display());
let keypair_bytes: Vec<u8> = serde_json::from_reader(file).unwrap();
let keypair = Keypair::from_bytes(&keypair_bytes[..]).unwrap();
return (keypair, path);
}
}
let keypair = Keypair::new();
let keypair_json = serde_json::to_string(&keypair.to_bytes()[..]).unwrap();
let mut file = File::create(path.clone()).expect("Failed to create keypair file");
file.write_all(keypair_json.as_bytes())
.expect("Failed to write keypair to file");
println!("Wrote keypair {} to {}", keypair.pubkey(), path.display());
(keypair, path)
}

@ -0,0 +1,264 @@
use serde::{Deserialize, Serialize};
use solana_program::pubkey::Pubkey;
use solana_sdk::signature::Signer;
use std::{fs::File, io::Write, path::Path, str::FromStr};
use crate::{
cmd_utils::{create_and_write_keypair, create_new_directory, create_new_file, deploy_program},
Context, CoreCmd, CoreSubCmd,
};
pub(crate) fn process_core_cmd(mut ctx: Context, cmd: CoreCmd) {
match cmd.cmd {
CoreSubCmd::Deploy(core) => {
let environments_dir = create_new_directory(&core.environments_dir, &core.environment);
let chain_dir = create_new_directory(&environments_dir, &core.chain);
let core_dir = create_new_directory(&chain_dir, "core");
let key_dir = create_new_directory(&core_dir, "keys");
let log_file = create_new_file(&core_dir, "deploy-logs.txt");
let ism_program_id = deploy_multisig_ism_message_id(
&mut ctx,
core.use_existing_keys,
&key_dir,
&core.built_so_dir,
&log_file,
);
let mailbox_program_id = deploy_mailbox(
&mut ctx,
core.use_existing_keys,
&key_dir,
&core.built_so_dir,
&log_file,
core.local_domain,
ism_program_id,
);
let validator_announce_program_id = deploy_validator_announce(
&mut ctx,
core.use_existing_keys,
&key_dir,
&core.built_so_dir,
&log_file,
mailbox_program_id,
core.local_domain,
);
let program_ids = CoreProgramIds {
mailbox: mailbox_program_id,
validator_announce: validator_announce_program_id,
multisig_ism_message_id: ism_program_id,
};
write_program_ids(&core_dir, program_ids);
}
}
}
fn deploy_multisig_ism_message_id(
ctx: &mut Context,
use_existing_key: bool,
key_dir: &Path,
built_so_dir: &Path,
log_file: impl AsRef<Path>,
) -> Pubkey {
let (keypair, keypair_path) = create_and_write_keypair(
key_dir,
"hyperlane_sealevel_multisig_ism_message_id-keypair.json",
use_existing_key,
);
let program_id = keypair.pubkey();
deploy_program(
&ctx.payer_path,
keypair_path.to_str().unwrap(),
built_so_dir
.join("hyperlane_sealevel_multisig_ism_message_id.so")
.to_str()
.unwrap(),
&ctx.client.url(),
log_file,
);
println!(
"Deployed Multisig ISM Message ID at program ID {}",
program_id
);
// Initialize
let instruction = hyperlane_sealevel_multisig_ism_message_id::instruction::init_instruction(
program_id,
ctx.payer.pubkey(),
)
.unwrap();
ctx.instructions.push(instruction);
ctx.send_transaction(&[&ctx.payer]);
ctx.instructions.clear();
println!("Initialized Multisig ISM Message ID ");
program_id
}
fn deploy_mailbox(
ctx: &mut Context,
use_existing_key: bool,
key_dir: &Path,
built_so_dir: &Path,
log_file: impl AsRef<Path>,
local_domain: u32,
default_ism: Pubkey,
) -> Pubkey {
let (keypair, keypair_path) = create_and_write_keypair(
key_dir,
"hyperlane_sealevel_mailbox-keypair.json",
use_existing_key,
);
let program_id = keypair.pubkey();
deploy_program(
&ctx.payer_path,
keypair_path.to_str().unwrap(),
built_so_dir
.join("hyperlane_sealevel_mailbox.so")
.to_str()
.unwrap(),
&ctx.client.url(),
log_file,
);
println!("Deployed Mailbox at program ID {}", program_id);
// Initialize
let instruction = hyperlane_sealevel_mailbox::instruction::init_instruction(
program_id,
local_domain,
default_ism,
ctx.payer.pubkey(),
)
.unwrap();
ctx.instructions.push(instruction);
ctx.send_transaction(&[&ctx.payer]);
ctx.instructions.clear();
println!("Initialized Mailbox");
program_id
}
fn deploy_validator_announce(
ctx: &mut Context,
use_existing_key: bool,
key_dir: &Path,
built_so_dir: &Path,
log_file: impl AsRef<Path>,
mailbox_program_id: Pubkey,
local_domain: u32,
) -> Pubkey {
let (keypair, keypair_path) = create_and_write_keypair(
key_dir,
"hyperlane_sealevel_validator_announce-keypair.json",
use_existing_key,
);
let program_id = keypair.pubkey();
deploy_program(
&ctx.payer_path,
keypair_path.to_str().unwrap(),
built_so_dir
.join("hyperlane_sealevel_validator_announce.so")
.to_str()
.unwrap(),
&ctx.client.url(),
log_file,
);
println!("Deployed ValidatorAnnounce at program ID {}", program_id);
// Initialize
let instruction = hyperlane_sealevel_validator_announce::instruction::init_instruction(
program_id,
ctx.payer.pubkey(),
mailbox_program_id,
local_domain,
)
.unwrap();
ctx.instructions.push(instruction);
ctx.send_transaction(&[&ctx.payer]);
ctx.instructions.clear();
println!("Initialized ValidatorAnnounce");
program_id
}
#[derive(Debug)]
pub(crate) struct CoreProgramIds {
pub mailbox: Pubkey,
pub validator_announce: Pubkey,
pub multisig_ism_message_id: Pubkey,
}
impl From<PrettyCoreProgramIds> for CoreProgramIds {
fn from(program_ids: PrettyCoreProgramIds) -> Self {
Self {
mailbox: Pubkey::from_str(program_ids.mailbox.as_str()).unwrap(),
validator_announce: Pubkey::from_str(program_ids.validator_announce.as_str()).unwrap(),
multisig_ism_message_id: Pubkey::from_str(program_ids.multisig_ism_message_id.as_str())
.unwrap(),
}
}
}
#[derive(Debug, Serialize, Deserialize)]
struct PrettyCoreProgramIds {
mailbox: String,
validator_announce: String,
multisig_ism_message_id: String,
}
impl From<CoreProgramIds> for PrettyCoreProgramIds {
fn from(program_ids: CoreProgramIds) -> Self {
Self {
mailbox: program_ids.mailbox.to_string(),
validator_announce: program_ids.validator_announce.to_string(),
multisig_ism_message_id: program_ids.multisig_ism_message_id.to_string(),
}
}
}
fn write_program_ids(core_dir: &Path, program_ids: CoreProgramIds) {
let pretty_program_ids = PrettyCoreProgramIds::from(program_ids);
let json = serde_json::to_string_pretty(&pretty_program_ids).unwrap();
let path = core_dir.join("program-ids.json");
println!("Writing program IDs to {}:\n{}", path.display(), json);
let mut file = File::create(path).expect("Failed to create keypair file");
file.write_all(json.as_bytes())
.expect("Failed to write program IDs to file");
}
pub(crate) fn read_core_program_ids(
environments_dir: &Path,
environment: &str,
chain: &str,
) -> CoreProgramIds {
let path = environments_dir
.join(environment)
.join(chain)
.join("core")
.join("program-ids.json");
let file = File::open(path).expect("Failed to open program IDs file");
let pretty_program_ids: PrettyCoreProgramIds =
serde_json::from_reader(file).expect("Failed to read program IDs file");
CoreProgramIds::from(pretty_program_ids)
}

File diff suppressed because it is too large Load Diff

@ -0,0 +1,566 @@
use hyperlane_core::{utils::hex_or_base58_to_h256, H256};
use serde::{Deserialize, Serialize};
use std::{collections::HashMap, fs::File, path::Path, str::FromStr};
use solana_client::{client_error::ClientError, rpc_client::RpcClient};
use solana_program::program_error::ProgramError;
use solana_sdk::{commitment_config::CommitmentConfig, pubkey::Pubkey, signature::Signer};
use hyperlane_sealevel_connection_client::router::RemoteRouterConfig;
use hyperlane_sealevel_token::{hyperlane_token_mint_pda_seeds, spl_token, spl_token_2022};
use hyperlane_sealevel_token_lib::{
accounts::HyperlaneTokenAccount,
hyperlane_token_pda_seeds,
instruction::{enroll_remote_routers_instruction, Init},
};
use crate::{
cmd_utils::{
account_exists, create_and_write_keypair, create_new_directory, deploy_program_idempotent,
},
core::{read_core_program_ids, CoreProgramIds},
Context, WarpRouteCmd, WarpRouteSubCmd,
};
#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(rename_all = "camelCase")]
struct DecimalMetadata {
decimals: u8,
remote_decimals: Option<u8>,
}
impl DecimalMetadata {
fn remote_decimals(&self) -> u8 {
self.remote_decimals.unwrap_or(self.decimals)
}
}
#[derive(Clone, Serialize, Deserialize, Debug)]
#[serde(tag = "type", rename_all = "camelCase")]
enum TokenType {
Native,
Synthetic(TokenMetadata),
Collateral(CollateralInfo),
}
impl TokenType {
fn program_name(&self) -> &str {
match self {
TokenType::Native => "hyperlane_sealevel_token_native",
TokenType::Synthetic(_) => "hyperlane_sealevel_token",
TokenType::Collateral(_) => "hyperlane_sealevel_token_collateral",
}
}
}
#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(rename_all = "camelCase")]
struct TokenMetadata {
name: String,
symbol: String,
total_supply: Option<String>,
}
#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(rename_all = "camelCase")]
enum SplTokenProgramType {
Token,
Token2022,
}
impl SplTokenProgramType {
fn program_id(&self) -> Pubkey {
match &self {
SplTokenProgramType::Token => spl_token::id(),
SplTokenProgramType::Token2022 => spl_token_2022::id(),
}
}
}
#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(rename_all = "camelCase")]
struct CollateralInfo {
#[serde(rename = "token")]
mint: String,
spl_token_program: Option<SplTokenProgramType>,
}
#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(rename_all = "camelCase")]
struct OptionalConnectionClientConfig {
mailbox: Option<String>,
interchain_gas_paymaster: Option<String>,
interchain_security_module: Option<String>,
}
#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(rename_all = "camelCase")]
struct OptionalOwnableConfig {
owner: Option<String>,
}
#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(rename_all = "camelCase")]
struct TokenConfig {
#[serde(flatten)]
token_type: TokenType,
foreign_deployment: Option<String>,
#[serde(flatten)]
decimal_metadata: DecimalMetadata,
#[serde(flatten)]
ownable: OptionalOwnableConfig,
#[serde(flatten)]
connection_client: OptionalConnectionClientConfig,
}
#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct RpcUrlConfig {
pub http: String,
}
/// An abridged version of the Typescript ChainMetadata
#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ChainMetadata {
chain_id: u32,
/// Hyperlane domain, only required if differs from id above
domain_id: Option<u32>,
name: String,
/// Collection of RPC endpoints
public_rpc_urls: Vec<RpcUrlConfig>,
}
impl ChainMetadata {
fn client(&self) -> RpcClient {
RpcClient::new_with_commitment(
self.public_rpc_urls[0].http.clone(),
CommitmentConfig::confirmed(),
)
}
fn domain_id(&self) -> u32 {
self.domain_id.unwrap_or(self.chain_id)
}
}
pub(crate) fn process_warp_route_cmd(mut ctx: Context, cmd: WarpRouteCmd) {
match cmd.cmd {
WarpRouteSubCmd::Deploy(deploy) => {
let token_config_file = File::open(deploy.token_config_file).unwrap();
let token_configs: HashMap<String, TokenConfig> =
serde_json::from_reader(token_config_file).unwrap();
let chain_config_file = File::open(deploy.chain_config_file).unwrap();
let chain_configs: HashMap<String, ChainMetadata> =
serde_json::from_reader(chain_config_file).unwrap();
let environments_dir =
create_new_directory(&deploy.environments_dir, &deploy.environment);
let artifacts_dir = create_new_directory(&environments_dir, "warp-routes");
let warp_route_dir = create_new_directory(&artifacts_dir, &deploy.warp_route_name);
let keys_dir = create_new_directory(&warp_route_dir, "keys");
let foreign_deployments = token_configs
.iter()
.filter(|(_, token_config)| token_config.foreign_deployment.is_some())
.map(|(chain_name, token_config)| {
let chain_config = chain_configs.get(chain_name).unwrap();
(
chain_config.domain_id(),
hex_or_base58_to_h256(token_config.foreign_deployment.as_ref().unwrap())
.unwrap(),
)
})
.collect::<HashMap<u32, H256>>();
let mut routers: HashMap<u32, H256> = foreign_deployments;
let token_configs_to_deploy = token_configs
.into_iter()
.filter(|(_, token_config)| token_config.foreign_deployment.is_none())
.collect::<HashMap<_, _>>();
// Deploy to chains that don't have a foreign deployment
for (chain_name, token_config) in token_configs_to_deploy.iter() {
let chain_config = chain_configs
.get(chain_name)
.unwrap_or_else(|| panic!("Chain config not found for chain: {}", chain_name));
if token_config.ownable.owner.is_some() {
println!("WARNING: Ownership transfer is not yet supported in this deploy tooling, ownership is granted to the payer account");
}
let program_id = deploy_warp_route(
&mut ctx,
&keys_dir,
&deploy.environments_dir,
&deploy.environment,
&deploy.built_so_dir,
chain_config,
token_config,
deploy.ata_payer_funding_amount,
);
routers.insert(
chain_config.domain_id(),
H256::from_slice(&program_id.to_bytes()[..]),
);
}
// Now enroll routers
for (chain_name, _) in token_configs_to_deploy {
let chain_config = chain_configs
.get(&chain_name)
.unwrap_or_else(|| panic!("Chain config not found for chain: {}", chain_name));
let domain_id = chain_config.domain_id();
let program_id: Pubkey =
Pubkey::new_from_array(*routers.get(&domain_id).unwrap().as_fixed_bytes());
let enrolled_routers = get_routers(&chain_config.client(), &program_id).unwrap();
let expected_routers = routers
.iter()
.filter(|(router_domain_id, _)| *router_domain_id != &domain_id)
.map(|(domain, router)| {
(
*domain,
RemoteRouterConfig {
domain: *domain,
router: Some(*router),
},
)
})
.collect::<HashMap<u32, RemoteRouterConfig>>();
// Routers to enroll (or update to a Some value)
let routers_to_enroll = expected_routers
.iter()
.filter(|(domain, router_config)| {
enrolled_routers.get(domain) != router_config.router.as_ref()
})
.map(|(_, router_config)| router_config.clone());
// Routers to remove
let routers_to_unenroll = enrolled_routers
.iter()
.filter(|(domain, _)| !expected_routers.contains_key(domain))
.map(|(domain, _)| RemoteRouterConfig {
domain: *domain,
router: None,
});
// All router config changes
let router_configs = routers_to_enroll
.chain(routers_to_unenroll)
.collect::<Vec<RemoteRouterConfig>>();
if !router_configs.is_empty() {
println!(
"Enrolling routers for chain: {}, program_id {}, routers: {:?}",
chain_name, program_id, router_configs,
);
ctx.instructions.push(
enroll_remote_routers_instruction(
program_id,
ctx.payer.pubkey(),
router_configs,
)
.unwrap(),
);
ctx.send_transaction_with_client(&chain_config.client(), &[&ctx.payer]);
ctx.instructions.clear();
} else {
println!(
"No router changes for chain: {}, program_id {}",
chain_name, program_id
);
}
}
let routers_by_name: HashMap<String, H256> = routers
.iter()
.map(|(domain_id, router)| {
(
chain_configs
.iter()
.find(|(_, chain_config)| chain_config.domain_id() == *domain_id)
.unwrap()
.0
.clone(),
*router,
)
})
.collect::<HashMap<String, H256>>();
write_program_ids(&warp_route_dir, &routers_by_name);
}
}
}
#[allow(clippy::too_many_arguments)]
fn deploy_warp_route(
ctx: &mut Context,
key_dir: &Path,
environments_dir: &Path,
environment: &str,
built_so_dir: &Path,
chain_config: &ChainMetadata,
token_config: &TokenConfig,
ata_payer_funding_amount: Option<u64>,
) -> Pubkey {
println!(
"Attempting deploy on chain: {}\nToken config: {:?}",
chain_config.name, token_config
);
let (keypair, keypair_path) = create_and_write_keypair(
key_dir,
format!(
"{}-{}.json",
token_config.token_type.program_name(),
chain_config.name
)
.as_str(),
true,
);
let program_id = keypair.pubkey();
deploy_program_idempotent(
&ctx.payer_path,
&keypair,
keypair_path.to_str().unwrap(),
built_so_dir
.join(format!("{}.so", token_config.token_type.program_name()))
.to_str()
.unwrap(),
&chain_config.public_rpc_urls[0].http,
// Not used
"/",
)
.unwrap();
let core_program_ids = read_core_program_ids(environments_dir, environment, &chain_config.name);
init_warp_route_idempotent(
ctx,
&chain_config.client(),
&core_program_ids,
chain_config,
token_config,
program_id,
ata_payer_funding_amount,
)
.unwrap();
match &token_config.token_type {
TokenType::Native => {
println!("Deploying native token");
}
TokenType::Synthetic(_token_metadata) => {
println!("Deploying synthetic token");
}
TokenType::Collateral(_collateral_info) => {
println!("Deploying collateral token");
}
}
program_id
}
fn init_warp_route_idempotent(
ctx: &mut Context,
client: &RpcClient,
core_program_ids: &CoreProgramIds,
_chain_config: &ChainMetadata,
token_config: &TokenConfig,
program_id: Pubkey,
ata_payer_funding_amount: Option<u64>,
) -> Result<(), ProgramError> {
let (token_pda, _token_bump) =
Pubkey::find_program_address(hyperlane_token_pda_seeds!(), &program_id);
if let Some(ata_payer_funding_amount) = ata_payer_funding_amount {
if matches!(
token_config.token_type,
TokenType::Collateral(_) | TokenType::Synthetic(_)
) {
fund_ata_payer_up_to(ctx, client, program_id, ata_payer_funding_amount);
}
}
if account_exists(client, &token_pda).unwrap() {
println!("Token PDA already exists, skipping init");
return Ok(());
}
init_warp_route(
ctx,
client,
core_program_ids,
_chain_config,
token_config,
program_id,
)
}
fn fund_ata_payer_up_to(
ctx: &mut Context,
client: &RpcClient,
program_id: Pubkey,
ata_payer_funding_amount: u64,
) {
let (ata_payer_account, _ata_payer_bump) = Pubkey::find_program_address(
hyperlane_sealevel_token::hyperlane_token_ata_payer_pda_seeds!(),
&program_id,
);
let current_balance = client.get_balance(&ata_payer_account).unwrap();
let funding_amount = ata_payer_funding_amount.saturating_sub(current_balance);
if funding_amount == 0 {
println!("ATA payer fully funded with balance of {}", current_balance);
return;
}
println!(
"Funding ATA payer {} with funding_amount {} to reach total balance of {}",
ata_payer_account, funding_amount, ata_payer_funding_amount
);
ctx.instructions
.push(solana_program::system_instruction::transfer(
&ctx.payer.pubkey(),
&ata_payer_account,
funding_amount,
));
ctx.send_transaction_with_client(client, &[&ctx.payer]);
ctx.instructions.clear();
}
fn init_warp_route(
ctx: &mut Context,
client: &RpcClient,
core_program_ids: &CoreProgramIds,
_chain_config: &ChainMetadata,
token_config: &TokenConfig,
program_id: Pubkey,
) -> Result<(), ProgramError> {
// If the Mailbox was provided as configuration, use that. Otherwise, default to
// the Mailbox found in the core program ids.
let mailbox = token_config
.connection_client
.mailbox
.as_ref()
.map(|s| Pubkey::from_str(s).unwrap())
.unwrap_or(core_program_ids.mailbox);
let init = Init {
mailbox,
interchain_security_module: token_config
.connection_client
.interchain_security_module
.as_ref()
.map(|s| Pubkey::from_str(s).unwrap()),
decimals: token_config.decimal_metadata.decimals,
remote_decimals: token_config.decimal_metadata.remote_decimals(),
};
let mut init_instructions = match &token_config.token_type {
TokenType::Native => vec![
hyperlane_sealevel_token_native::instruction::init_instruction(
program_id,
ctx.payer.pubkey(),
init,
)?,
],
TokenType::Synthetic(_token_metadata) => {
let decimals = init.decimals;
let mut instructions = vec![hyperlane_sealevel_token::instruction::init_instruction(
program_id,
ctx.payer.pubkey(),
init,
)?];
let (mint_account, _mint_bump) =
Pubkey::find_program_address(hyperlane_token_mint_pda_seeds!(), &program_id);
// TODO: Also set Metaplex metadata?
instructions.push(
spl_token_2022::instruction::initialize_mint2(
&spl_token_2022::id(),
&mint_account,
&mint_account,
None,
decimals,
)
.unwrap(),
);
instructions
}
TokenType::Collateral(collateral_info) => {
vec![
hyperlane_sealevel_token_collateral::instruction::init_instruction(
program_id,
ctx.payer.pubkey(),
init,
collateral_info
.spl_token_program
.as_ref()
.expect("Cannot initalize collateral warp route without SPL token program")
.program_id(),
collateral_info.mint.parse().expect("Invalid mint address"),
)?,
]
}
};
ctx.instructions.append(&mut init_instructions);
ctx.send_transaction_with_client(client, &[&ctx.payer]);
ctx.instructions.clear();
Ok(())
}
fn get_routers(
client: &RpcClient,
token_program_id: &Pubkey,
) -> Result<HashMap<u32, H256>, ClientError> {
let (token_pda, _token_bump) =
Pubkey::find_program_address(hyperlane_token_pda_seeds!(), token_program_id);
let account = client.get_account(&token_pda)?;
let token_data = HyperlaneTokenAccount::<()>::fetch(&mut &account.data[..])
.unwrap()
.into_inner();
Ok(token_data.remote_routers)
}
#[derive(Serialize, Deserialize)]
struct SerializedProgramId {
hex: String,
base58: String,
}
fn write_program_ids(warp_route_dir: &Path, routers: &HashMap<String, H256>) {
let serialized_program_ids = routers
.iter()
.map(|(chain_name, router)| {
(
chain_name.clone(),
SerializedProgramId {
hex: format!("0x{}", hex::encode(router)),
base58: Pubkey::new_from_array(router.to_fixed_bytes()).to_string(),
},
)
})
.collect::<HashMap<String, SerializedProgramId>>();
let program_ids_file = warp_route_dir.join("program-ids.json");
let program_ids_file = File::create(program_ids_file).unwrap();
serde_json::to_writer_pretty(program_ids_file, &serialized_program_ids).unwrap();
}

@ -0,0 +1 @@
[113,244,152,170,85,122,42,51,10,74,244,18,91,8,135,77,156,19,172,122,139,50,248,3,186,184,186,140,110,165,78,161,76,88,146,213,185,127,121,92,132,2,249,73,19,192,73,170,105,85,247,241,48,175,67,28,165,29,224,252,173,165,38,140]

@ -0,0 +1 @@
[135,153,145,193,50,88,169,205,206,171,48,1,17,242,3,43,225,72,101,163,93,126,105,165,159,44,243,196,182,240,4,87,22,253,47,198,217,75,23,60,181,129,251,103,140,170,111,35,152,97,16,23,64,17,198,239,79,225,120,141,55,38,60,86]

@ -0,0 +1 @@
[252,76,67,201,250,68,86,32,216,136,163,46,192,20,249,175,209,94,101,235,24,240,204,4,246,159,180,138,253,20,48,146,182,104,250,124,231,168,239,248,95,199,219,250,126,156,57,113,83,209,232,171,10,90,153,238,72,138,186,34,77,87,172,211]

@ -0,0 +1,5 @@
{
"mailbox": "692KZJaoe2KRcD6uhCQDLLXnLNA5ZLnfvdqjE4aX9iu1",
"validator_announce": "DH43ae1LwemXAboWwSh8zc9pG8j72gKUEXNi57w8fEnn",
"multisig_ism_message_id": "2YjtZDiUoptoSsA5eVrDCcX6wxNK6YoEVW7y82x5Z2fw"
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save