Solana e2e tests (#2578)

### Description

Adds support for testing solana with the e2e tests. This involves
1) Downloading the pre-built solana cli tools
2) Setting up the solana configuration
3) Downloading the pre-built solana programs we need (see
https://github.com/hyperlane-xyz/solana-program-library/releases/)
4) The solana programs in the repo
5) Start the solana validator
6) Deploy the solana programs
7) Deploy a warp route
8) Initialize the multisig ism and validator announce
9) Initialize a transfer
10) Wait for the message to be delivered

In addition these were "woven" in with the existing E2E test logic, so
for instance we only run one new validator and the existing relayer was
extended to support these additional chains. This will make it much
easier later on to support testes between environments.

### Drive-by changes

- Fix a bug in the relayer when trying to index the mailbox
- Add support in the hyperlane sealevel CLI for custom solana config
paths
- Fix a linker error on linux causes by not specifying `no-entrypoint`
- Cleaned up unnecessary `no-entrypoint` features in libraries
- Minor refactor to how we run commands to avoid arg passing
- Created an easy way to define tasks that run asyncly `as_task`
- Split up main logic flow into a couple files for easier reading
- Use mold linker to speed up builds (well, the linking part)
- Removed support for `log_all` to simplify code pathways
- Added context when a child process ends unexpectedly for easier
debugging
- Fixed a bug in the validator where it would infinitely retry to send
an announcement on failure without waiting
- Cleaned up solana configs
- Fixed processes hanging on exit (very annoying when testing locally
since you have to manually go kill them)
- Added stderr logging to the hyperlane sealevel CLI subprocess calls

### Related issues

Fixes #2415

### Backward compatibility

Yes

### Testing

Manual
pull/2396/merge
Mattie Conover 1 year ago committed by GitHub
parent 127294decc
commit 9ed3a24902
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 5
      .github/workflows/e2e.yml
  2. 17
      rust/Cargo.lock
  3. 2
      rust/agents/validator/src/validator.rs
  4. 8
      rust/chains/hyperlane-sealevel/Cargo.toml
  5. 15
      rust/config/sealevel/relayer.env
  6. 10
      rust/config/sealevel/validator.env
  7. 0
      rust/config/test-sealevel-keys/test_deployer-account.json
  8. 0
      rust/config/test-sealevel-keys/test_deployer-keypair.json
  9. 23
      rust/config/test_sealevel_config.json
  10. 15
      rust/hyperlane-base/src/contract_sync/cursor.rs
  11. 127
      rust/sealevel/README.md
  12. 12
      rust/sealevel/client/Cargo.toml
  13. 4
      rust/sealevel/client/src/cmd_utils.rs
  14. 8
      rust/sealevel/client/src/main.rs
  15. 8
      rust/sealevel/environments/local-e2e/warp-routes/testwarproute/program-ids.json
  16. 3
      rust/sealevel/libraries/access-control/Cargo.toml
  17. 3
      rust/sealevel/libraries/account-utils/Cargo.toml
  18. 3
      rust/sealevel/libraries/ecdsa-signature/Cargo.toml
  19. 3
      rust/sealevel/libraries/hyperlane-sealevel-connection-client/Cargo.toml
  20. 3
      rust/sealevel/libraries/hyperlane-sealevel-token/Cargo.toml
  21. 3
      rust/sealevel/libraries/interchain-security-module-interface/Cargo.toml
  22. 3
      rust/sealevel/libraries/message-recipient-interface/Cargo.toml
  23. 1
      rust/sealevel/libraries/multisig-ism/Cargo.toml
  24. 3
      rust/sealevel/libraries/serializable-account-meta/Cargo.toml
  25. 3
      rust/sealevel/libraries/test-utils/Cargo.toml
  26. 1
      rust/utils/run-locally/Cargo.toml
  27. 200
      rust/utils/run-locally/src/config.rs
  28. 57
      rust/utils/run-locally/src/ethereum.rs
  29. 117
      rust/utils/run-locally/src/invariants.rs
  30. 390
      rust/utils/run-locally/src/main.rs
  31. 346
      rust/utils/run-locally/src/program.rs
  32. 342
      rust/utils/run-locally/src/solana.rs
  33. 253
      rust/utils/run-locally/src/utils.rs
  34. 166
      rust/utils/sealevel-test.bash

@ -46,6 +46,11 @@ jobs:
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Install mold linker
uses: rui314/setup-mold@v1
with:
mold-version: 2.0.0
make-default: true
- name: rust cache
uses: Swatinem/rust-cache@v2
with:

17
rust/Cargo.lock generated

@ -4563,6 +4563,22 @@ dependencies = [
"libc",
]
[[package]]
name = "macro_rules_attribute"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a82271f7bc033d84bbca59a3ce3e4159938cb08a9c3aebbe54d215131518a13"
dependencies = [
"macro_rules_attribute-proc_macro",
"paste",
]
[[package]]
name = "macro_rules_attribute-proc_macro"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8dd856d451cc0da70e2ef2ce95a18e39a93b7558bedf10201ad28503f918568"
[[package]]
name = "maplit"
version = "1.0.2"
@ -6326,6 +6342,7 @@ version = "0.1.0"
dependencies = [
"ctrlc",
"eyre",
"macro_rules_attribute",
"maplit",
"nix 0.26.2",
"tempfile",

@ -270,7 +270,6 @@ impl Validator {
validator_address=?announcement.validator,
"Please send tokens to the validator address to announce",
);
sleep(self.interval).await;
} else {
let result = self
.validator_announce
@ -278,6 +277,7 @@ impl Validator {
.await;
Self::log_on_announce_failure(result);
}
sleep(self.interval).await;
}
}
Ok(())

@ -22,12 +22,12 @@ tracing-futures.workspace = true
tracing.workspace = true
url.workspace = true
account-utils = { path = "../../sealevel/libraries/account-utils" }
hyperlane-core = { path = "../../hyperlane-core" }
hyperlane-sealevel-mailbox = { path = "../../sealevel/programs/mailbox", features = ["no-entrypoint"] }
hyperlane-sealevel-interchain-security-module-interface = { path = "../../sealevel/libraries/interchain-security-module-interface" }
hyperlane-sealevel-mailbox = { path = "../../sealevel/programs/mailbox", features = ["no-entrypoint"] }
hyperlane-sealevel-message-recipient-interface = { path = "../../sealevel/libraries/message-recipient-interface" }
serializable-account-meta = { path = "../../sealevel/libraries/serializable-account-meta" }
account-utils = { path = "../../sealevel/libraries/account-utils" }
multisig-ism = { path = "../../sealevel/libraries/multisig-ism" }
hyperlane-sealevel-multisig-ism-message-id = { path = "../../sealevel/programs/ism/multisig-ism-message-id", features = ["no-entrypoint"] }
hyperlane-sealevel-validator-announce = { path = "../../sealevel/programs/validator-announce", features = ["no-entrypoint"] }
multisig-ism = { path = "../../sealevel/libraries/multisig-ism" }
serializable-account-meta = { path = "../../sealevel/libraries/serializable-account-meta" }

@ -1,15 +0,0 @@
export BASE_CONFIG="sealevel.json"
export RUN_ENV="sealevel"
export HYP_BASE_DB="/tmp/SEALEVEL_DB/relayer"
export HYP_RELAYER_RELAYCHAINS="sealeveltest1,sealeveltest2"
export HYP_BASE_METRICS=9091
export HYP_BASE_ALLOWLOCALCHECKPOINTSYNCERS=true
# The first 32 bytes of test-keys/test_deployer-keypair.json as hexadecimal,
# which is the secret key.
export HYP_BASE_CHAINS_SEALEVELTEST1_SIGNER_KEY=892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f
export HYP_BASE_CHAINS_SEALEVELTEST1_SIGNER_TYPE="hexKey"
export HYP_BASE_CHAINS_SEALEVELTEST2_SIGNER_KEY=892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f
export HYP_BASE_CHAINS_SEALEVELTEST2_SIGNER_TYPE="hexKey"
export HYP_BASE_TRACING_LEVEL="debug"

@ -1,10 +0,0 @@
export BASE_CONFIG="sealevel.json"
export RUN_ENV="sealevel"
export HYP_BASE_DB="/tmp/SEALEVEL_DB/validator"
export HYP_VALIDATOR_ORIGINCHAINNAME="sealeveltest1"
export HYP_VALIDATOR_VALIDATOR_KEY="59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
export HYP_VALIDATOR_VALIDATOR_TYPE="hexKey"
export HYP_VALIDATOR_REORGPERIOD="0"
export HYP_VALIDATOR_INTERVAL="1"
export HYP_VALIDATOR_CHECKPOINTSYNCER_TYPE="localStorage"
export HYP_VALIDATOR_CHECKPOINTSYNCER_PATH="/tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8"

@ -1,49 +1,42 @@
{
"environment": "sealevel",
"chains": {
"sealeveltest1": {
"name": "SealevelTest1",
"domain": "13375",
"name": "sealeveltest1",
"domain": 13375,
"addresses": {
"mailbox": "692KZJaoe2KRcD6uhCQDLLXnLNA5ZLnfvdqjE4aX9iu1",
"interchainGasPaymaster": "FixmeFixmeFixmeFixmeFixmeFixmeFixmeFixmeFixm",
"validatorAnnounce": "DH43ae1LwemXAboWwSh8zc9pG8j72gKUEXNi57w8fEnn"
},
"signer": null,
"protocol": "sealevel",
"finalityBlocks": "0",
"finalityBlocks": 0,
"connection": {
"type": "http",
"url": "http://localhost:8899"
},
"index": {
"from": "1",
"from": 1,
"mode": "sequence"
}
},
"sealeveltest2": {
"name": "SealevelTest2",
"domain": "13376",
"name": "sealeveltest2",
"domain": 13376,
"addresses": {
"mailbox": "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj",
"interchainGasPaymaster": "FixmeFixmeFixmeFixmeFixmeFixmeFixmeFixmeFixm",
"validatorAnnounce": "3Uo5j2Bti9aZtrDqJmAyuwiFaJFPFoNL5yxTpVCNcUhb"
},
"signer": null,
"protocol": "sealevel",
"finalityBlocks": "0",
"finalityBlocks": 0,
"connection": {
"type": "http",
"url": "http://localhost:8899"
},
"index": {
"from": "1",
"from": 1,
"mode": "sequence"
}
}
},
"tracing": {
"level": "info",
"fmt": "pretty"
}
}

@ -200,30 +200,27 @@ impl ForwardMessageSyncCursor {
let (mailbox_count, tip) = self.cursor.indexer.fetch_count_at_tip().await?;
let cursor_count = self.cursor.sync_state.next_sequence;
let cmp = cursor_count.cmp(&mailbox_count);
match cmp {
Ok(match cursor_count.cmp(&mailbox_count) {
Ordering::Equal => {
// We are synced up to the latest nonce so we don't need to index anything.
// We update our next block number accordingly.
self.cursor.sync_state.next_block = tip;
Ok(None)
None
}
Ordering::Less => {
// The cursor is behind the mailbox, so we need to index some blocks.
let range = self
.cursor
self.cursor
.sync_state
.get_next_range(Some(mailbox_count), Some(tip))
.await?;
Ok(range)
.await?
}
Ordering::Greater => {
// Providers may be internally inconsistent, e.g. RPC request A could hit a node
// whose tip is N and subsequent RPC request B could hit a node whose tip is < N.
debug!("Cursor count is greater than Mailbox count");
Ok(None)
None
}
}
})
}
}

@ -1,127 +0,0 @@
# Hyperlane Sealevel (Solana VM) Integration
# Running local end to end test
A local end to end test has been written that will:
1. Run a local Solana network
2. Deploy two sets of core contracts (i.e. Mailbox / Multisig ISM / ValidatorAnnounce) onto this chain, one with domain 13375 and the other 13376.
3. Deploy a "native" warp route on domain 13375 and a "synthetic" warp route on domain 13376
4. Send native lamports from domain 13375 to 13376
5. A validator & relayer can then be spun up to deliver the message
### Build and run solana-test-validator
This only needs to be done once when initially setting things up.
1. Clone the `solar-eclipse` repo, which is the Eclipse fork of the Solana repo. This is needed to run the local Solana network. Check out the `steven/hyperlane-fix-deps` branch:
```
git clone git@github.com:Eclipse-Laboratories-Inc/solar-eclipse --branch steven/hyperlane-fix-deps
```
2. `cd` into the repo and build the `solana-test-validator` using the local `cargo` script (which ensures the correct version is used):
```
./cargo build -p solana-test-validator
```
### Check out `eclipse-program-library`
This is a fork (with some dependency fixes) of the eclipse fork of the `solana-program-library`. This contains "SPL" programs that are commonly used programs - stuff like the token program, etc.
Note these instructions previously required a different remote and branch - make sure to move to this remote & branch if you ahven't already!
1. Check out the branch `trevor/steven/eclipse-1.14.13/with-tlv-lib`:
```
git clone git@github.com:tkporter/eclipse-program-library.git --branch trevor/steven/eclipse-1.14.13/with-tlv-lib
```
### Build the required SPL programs and Hyperlane programs
This command will build all the required SPL programs (e.g. the token program, token 2022 program, SPL noop, etc...) found in the local repo of `eclipse-program-library`,
and will build all the required Hyperlane programs (e.g. the Mailbox program, Validator Announce, etc...).
You need to run this if any changes are made to programs that you want to be used in future runs of the end to end test.
Change the paths to your local `solar-eclipse` repo and `eclipse-program-library` as necessary, and run this from the `rust` directory of hyperlane-monorepo.
```
SOLAR_ECLIPSE_DIR=~/solar-eclipse ECLIPSE_PROGRAM_LIBRARY_DIR=~/eclipse-program-library ./utils/sealevel-test.bash build-only
```
### Run the local Solana network
This will run the `solana-test-validator` with a funded test account `E9VrvAdGRvCguN2XgXsgu9PNmMM3vZsU8LSUrM68j8ty` that will later be used for deploying contracts. It will also create some of the required SPL programs at the specified program IDs - these program IDs are consistent across Solana networks and are required by our Hyperlane programs. Change paths as necessary - the \*.so files should have been created by the prior command. The `--ledger` directory is arbitrary and is just the data dir for the Solana validator.
```
mkdir -p /tmp/eclipse/ledger-dir && target/debug/solana-test-validator --reset --ledger /tmp/eclipse/ledger-dir --account E9VrvAdGRvCguN2XgXsgu9PNmMM3vZsU8LSUrM68j8ty ~/abacus-monorepo/rust/config/sealevel/test-keys/test_deployer-account.json --bpf-program TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA ~/eclipse-program-library/target/deploy/spl_token.so --bpf-program TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb ~/eclipse-program-library/target/deploy/spl_token_2022.so --bpf-program ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL ~/eclipse-program-library/target/deploy/spl_associated_token_account.so --bpf-program noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV ~/eclipse-program-library/account-compression/target/deploy/spl_noop.so
```
By now you should have an output like this - keep it running and move to another terminal:
```
Ledger location: /tmp/eclipse/ledger-dir
Log: /tmp/eclipse/ledger-dir/validator.log
⠒ Initializing...
⠄ Initializing...
Identity: 4P5rtWdphhehU32myNQcTSMgrCRz7kdvZEnasX6fahJQ
Genesis Hash: G7CY7wEzbdjh8RwqTszxrpYTqiHKvqwpaw3JbmKJjJhU
Version: 1.14.13
Shred Version: 419
Gossip Address: 127.0.0.1:1024
TPU Address: 127.0.0.1:1027
JSON RPC URL: http://127.0.0.1:8899
⠒ 00:05:35 | Processed Slot: 668 | Confirmed Slot: 668 | Finalized Slot: 6
```
### Run the local end to end script
Run the script found at `rust/utils/sealevel-test.bash`. This will build all required programs, deploy contracts, and test sending a warp route message. You need to supply the paths to your local `solar-eclipse` and `eclipse-program-library` repos:
```
SOLAR_ECLIPSE_DIR=~/solar-eclipse ECLIPSE_PROGRAM_LIBRARY_DIR=~/eclipse-program-library ./utils/sealevel-test.bash
```
Note: this won't rebuild any of the programs. If you want to rebuild them, you can either cd into them individually and run `cargo build-sbf --arch sbf`, or you can run the above bash script with `force-build-programs` as the first argument.
You'll see a bunch of output here showing programs being built and deployed. Eventually you should see some logs saying `grep -q 'Message not delivered'`. At this point, the contracts have all been deployed and a native warp route transfer has been made. You can move on to running the validator and relayer.
### Running the validator
In a separate terminal, cd to `hyperlane-monorepo/rust`.
1. Source the env vars:
```
source ./config/sealevel/validator.env
```
2. Run the validator (this clears the DB / checkpoints if present):
```
mkdir /tmp/SEALEVEL_DB ; rm -rf /tmp/SEALEVEL_DB/validator /tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8/* ; CONFIG_FILES=./config/sealevel/sealevel.json cargo run --bin validator
```
You should see some INFO logs about checkpoint at index 0.
You can confirm things are working correctly by looking at `/tmp/CHECKPOINTS_DIR`, where the validator posts its signatures.
### Running the relayer
In a separate terminal, again in `hyperlane-monorepo/rust`:
1. Source the env vars:
```
source ./config/sealevel/relayer.env
```
2. Run the relayer (the rm is to make sure the relayer's DB is cleared):
```
rm -rf /tmp/SEALEVEL_DB/relayer ; RUST_BACKTRACE=full CONFIG_FILES=./config/sealevel/sealevel.json cargo run --bin relayer
```
When the original `sealevel-test.bash` exits with a 0 exit code and some logs about Hyperlane Token Storage, the message has been successfully delivered!

@ -21,10 +21,10 @@ solana-sdk.workspace = true
account-utils = { path = "../libraries/account-utils" }
hyperlane-core = { path = "../../hyperlane-core" }
hyperlane-sealevel-connection-client = { path = "../libraries/hyperlane-sealevel-connection-client" }
hyperlane-sealevel-mailbox = { path = "../programs/mailbox" }
hyperlane-sealevel-multisig-ism-message-id = { path = "../programs/ism/multisig-ism-message-id" }
hyperlane-sealevel-token = { path = "../programs/hyperlane-sealevel-token" }
hyperlane-sealevel-token-collateral = { path = "../programs/hyperlane-sealevel-token-collateral" }
hyperlane-sealevel-mailbox = { path = "../programs/mailbox", features = ["no-entrypoint"] }
hyperlane-sealevel-multisig-ism-message-id = { path = "../programs/ism/multisig-ism-message-id", features = ["no-entrypoint"] }
hyperlane-sealevel-token = { path = "../programs/hyperlane-sealevel-token", features = ["no-entrypoint"] }
hyperlane-sealevel-token-collateral = { path = "../programs/hyperlane-sealevel-token-collateral", features = ["no-entrypoint"] }
hyperlane-sealevel-token-lib = { path = "../libraries/hyperlane-sealevel-token" }
hyperlane-sealevel-token-native = { path = "../programs/hyperlane-sealevel-token-native" }
hyperlane-sealevel-validator-announce = { path = "../programs/validator-announce" }
hyperlane-sealevel-token-native = { path = "../programs/hyperlane-sealevel-token-native", features = ["no-entrypoint"] }
hyperlane-sealevel-validator-announce = { path = "../programs/validator-announce", features = ["no-entrypoint"] }

@ -35,8 +35,10 @@ pub fn build_cmd(
c.args(&cmd[1..]);
if log_all {
c.stdout(Stdio::inherit());
c.stderr(Stdio::inherit());
} else {
c.stdout(append_to(log));
c.stdout(append_to(log.as_ref()));
c.stderr(append_to(log));
}
if let Some(wd) = wd {
c.current_dir(wd);

@ -92,6 +92,8 @@ struct Cli {
compute_budget: u32,
#[arg(long, short = 'a')]
heap_size: Option<u32>,
#[arg(long, short = 'C')]
config: Option<String>,
}
#[derive(Subcommand)]
@ -472,8 +474,10 @@ fn main() {
pretty_env_logger::init();
let cli = Cli::parse();
let config = match CONFIG_FILE.as_ref() {
Some(config_file) => Config::load(config_file).unwrap(),
let config = match cli.config.as_ref().or(CONFIG_FILE.as_ref()) {
Some(config_file) => Config::load(config_file)
.map_err(|e| format!("Failed to load solana config file {}: {}", config_file, e))
.unwrap(),
None => Config::default(),
};
let url = normalize_to_url_if_moniker(cli.url.unwrap_or(config.json_rpc_url));

@ -1,10 +1,10 @@
{
"sealeveltest1": {
"hex": "0xa77b4e2ed231894cc8cb8eee21adcc705d8489bccc6b2fcf40a358de23e60b7b",
"base58": "CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga"
},
"sealeveltest2": {
"hex": "0x2317f9615d4ebc2419ad4b88580e2a80a03b2c7a60bc960de7d6934dbc37a87e",
"base58": "3MzUPjP5LEkiHH82nEAe28Xtz9ztuMqWc8UmuKxrpVQH"
},
"sealeveltest1": {
"hex": "0xa77b4e2ed231894cc8cb8eee21adcc705d8489bccc6b2fcf40a358de23e60b7b",
"base58": "CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga"
}
}

@ -5,9 +5,6 @@ name = "access-control"
version = "0.1.0"
edition = "2021"
[features]
no-entrypoint = []
[dependencies]
solana-program.workspace = true

@ -5,9 +5,6 @@ name = "account-utils"
version = "0.1.0"
edition = "2021"
[features]
no-entrypoint = []
[dependencies]
borsh.workspace = true
solana-program.workspace = true

@ -5,9 +5,6 @@ name = "ecdsa-signature"
version = "0.1.0"
edition = "2021"
[features]
no-entrypoint = []
[dependencies]
solana-program.workspace = true
thiserror.workspace = true

@ -5,9 +5,6 @@ name = "hyperlane-sealevel-connection-client"
version = "0.1.0"
edition = "2021"
[features]
no-entrypoint = []
[dependencies]
borsh.workspace = true
solana-program.workspace = true

@ -5,9 +5,6 @@ name = "hyperlane-sealevel-token-lib"
version = "0.1.0"
edition = "2021"
[features]
no-entrypoint = []
[dependencies]
borsh.workspace = true
num-derive.workspace = true

@ -5,9 +5,6 @@ name = "hyperlane-sealevel-interchain-security-module-interface"
version = "0.1.0"
edition = "2021"
[features]
no-entrypoint = []
[dependencies]
borsh.workspace = true
solana-program.workspace = true

@ -5,9 +5,6 @@ name = "hyperlane-sealevel-message-recipient-interface"
version = "0.1.0"
edition = "2021"
[features]
no-entrypoint = []
[dependencies]
borsh.workspace = true
solana-program.workspace = true

@ -6,7 +6,6 @@ version = "0.1.0"
edition = "2021"
[features]
no-entrypoint = []
test-data = ["dep:hex"]
[dependencies]

@ -5,9 +5,6 @@ name = "serializable-account-meta"
version = "0.1.0"
edition = "2021"
[features]
no-entrypoint = []
[dependencies]
borsh.workspace = true
solana-program.workspace = true

@ -5,9 +5,6 @@ name = "hyperlane-test-utils"
version = "0.1.0"
edition = "2021"
[features]
no-entrypoint = []
[dependencies]
borsh.workspace = true
solana-program-test.workspace = true

@ -17,3 +17,4 @@ nix = { version = "0.26", default-features = false, features = ["signal"] }
tempfile = "3.3"
ureq = { version = "2.4", default-features = false }
which = "4.4"
macro_rules_attribute = "0.2"

@ -1,39 +1,20 @@
use std::collections::HashMap;
use std::env;
use std::ffi::OsStr;
use std::fmt::{Debug, Display, Formatter};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::sync::Arc;
use std::time::{SystemTime, UNIX_EPOCH};
use eyre::{Context, Result};
use crate::utils::{concat_path, LogFilter};
pub struct Config {
pub is_ci_env: bool,
pub ci_mode: bool,
pub ci_mode_timeout: u64,
pub kathy_messages: u64,
pub log_all: bool,
pub log_dir: PathBuf,
}
impl Config {
pub fn load() -> Self {
pub fn load() -> Arc<Self> {
let ci_mode = env::var("E2E_CI_MODE")
.map(|k| k.parse::<bool>().unwrap())
.unwrap_or_default();
let date_str = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs()
.to_string();
let log_dir = concat_path(env::temp_dir(), format!("logs/hyperlane-agents/{date_str}"));
Self {
Arc::new(Self {
ci_mode,
log_dir,
is_ci_env: env::var("CI").as_deref() == Ok("true"),
ci_mode_timeout: env::var("E2E_CI_TIMEOUT_SEC")
.map(|k| k.parse::<u64>().unwrap())
@ -44,183 +25,6 @@ impl Config {
.map(|r| r.parse::<u64>().unwrap());
r.unwrap_or(16)
},
log_all: env::var("E2E_LOG_ALL")
.map(|k| k.parse::<bool>().unwrap())
.unwrap_or(ci_mode),
}
}
}
#[derive(Default, Clone)]
pub struct ProgramArgs {
bin: Option<Arc<String>>,
args: Vec<Arc<String>>,
env: HashMap<Arc<String>, Arc<String>>,
working_dir: Option<Arc<PathBuf>>,
log_filter: Option<LogFilter>,
}
impl Debug for ProgramArgs {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ProgramArgs")
.field("bin", &self.bin)
.field("args", &self.args)
.field("env", &self.env)
.field("working_dir", &self.working_dir)
.field("log_filter", &self.log_filter.is_some())
.finish()
}
}
impl Display for ProgramArgs {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
if f.alternate() {
let wd = self
.working_dir
.as_ref()
.map(|wd| wd.display())
.unwrap_or_else(|| Path::new("./").display());
write!(f, "({wd})$ ")?;
for (k, v) in &self.env {
write!(f, "{k}={v} ")?;
}
if let Some(path_result) = self.get_bin_path() {
if let Ok(bp) = path_result {
write!(f, "{}", bp.display())?;
} else {
write!(f, "{}", self.bin.as_ref().unwrap())?;
}
} else {
write!(f, "???")?;
}
for a in &self.args {
write!(f, " {a}")?;
}
Ok(())
} else {
write!(
f,
"{}",
self.bin.as_deref().map(String::as_str).unwrap_or("???")
)
}
}
}
impl ProgramArgs {
pub fn new(bin: impl AsRef<OsStr>) -> Self {
Self::default().bin(bin)
}
pub fn bin(mut self, bin: impl AsRef<OsStr>) -> Self {
self.bin = Some(
bin.as_ref()
.to_str()
.expect("Invalid string encoding for binary name")
.to_owned()
.into(),
);
self
}
pub fn raw_arg(mut self, arg: impl Into<String>) -> Self {
self.args.push(arg.into().into());
self
}
pub fn cmd(self, cmd: impl Into<String>) -> Self {
let cmd = cmd.into();
debug_assert!(!cmd.starts_with('-'), "arg should not start with -");
self.raw_arg(cmd)
}
pub fn flag(self, arg: impl AsRef<str>) -> Self {
debug_assert!(
!arg.as_ref().starts_with('-'),
"arg should not start with -"
);
self.raw_arg(format!("--{}", arg.as_ref()))
}
/// Assumes an arg in the format of `--$ARG1 $ARG2`, arg1 and arg2 should exclude quoting, equal sign, and the leading hyphens.
pub fn arg(self, arg1: impl AsRef<str>, arg2: impl Into<String>) -> Self {
self.flag(arg1).cmd(arg2)
}
/// add an env that will be prefixed with the default hyperlane env prefix
pub fn hyp_env(self, key: impl AsRef<str>, value: impl Into<String>) -> Self {
const PREFIX: &str = "HYP_BASE_";
let key = key.as_ref();
debug_assert!(
!key.starts_with(PREFIX),
"env key should not start with prefix that is being added"
);
self.env(format!("{PREFIX}{key}"), value)
}
/// add a system env that makes no prefix assumptions
pub fn env(mut self, key: impl Into<String>, value: impl Into<String>) -> Self {
self.env.insert(key.into().into(), value.into().into());
self
}
pub fn working_dir(mut self, path: impl Into<PathBuf>) -> Self {
self.working_dir = Some(path.into().into());
self
}
/// Filter logs being printed to stdout/stderr. If the LogFilter returns true,
/// then it will keep that log line, if it returns false it will discard it.
/// This is ignored when logging to files.
pub fn filter_logs(mut self, filter: LogFilter) -> Self {
self.log_filter = Some(filter);
self
}
pub fn create_command(&self) -> Command {
let mut cmd = Command::new(
self.get_bin_path()
.expect("bin path must be specified")
.unwrap(),
);
if let Some(wd) = &self.working_dir {
cmd.current_dir(wd.as_path());
}
for (k, v) in self.env.iter() {
cmd.env(k.as_str(), v.as_str());
}
cmd.args(self.args.iter().map(AsRef::as_ref));
cmd
}
pub fn get_filter(&self) -> Option<LogFilter> {
self.log_filter
}
/// Try to get the path to the binary
pub fn get_bin_path(&self) -> Option<Result<PathBuf>> {
self.bin.as_ref().map(|raw_bin_name| {
which::which(raw_bin_name.as_ref())
.with_context(|| format!("Cannot find binary: {raw_bin_name}"))
})
}
/// Get just the name component of the binary
pub fn get_bin_name(&self) -> String {
Path::new(
self.bin
.as_ref()
.expect("bin path must be specified")
.as_str(),
)
.file_name()
.expect("bin must have a file name")
.to_str()
.unwrap()
.to_owned()
}
}

@ -0,0 +1,57 @@
use std::sync::Arc;
use std::thread::sleep;
use std::time::Duration;
use macro_rules_attribute::apply;
use crate::config::Config;
use crate::logging::log;
use crate::program::Program;
use crate::utils::{as_task, AgentHandles, TaskHandle};
use crate::{INFRA_PATH, MONOREPO_ROOT_PATH, TS_SDK_PATH};
#[apply(as_task)]
pub fn start_anvil(config: Arc<Config>) -> AgentHandles {
log!("Installing typescript dependencies...");
let yarn_monorepo = Program::new("yarn").working_dir(MONOREPO_ROOT_PATH);
yarn_monorepo.clone().cmd("install").run().join();
if !config.is_ci_env {
// don't need to clean in the CI
yarn_monorepo.clone().cmd("clean").run().join();
}
yarn_monorepo.clone().cmd("build").run().join();
log!("Launching anvil...");
let anvil_args = Program::new("anvil").flag("silent").filter_logs(|_| false); // for now do not keep any of the anvil logs
let anvil = anvil_args.spawn("ETH");
sleep(Duration::from_secs(10));
let yarn_infra = Program::new("yarn")
.working_dir(INFRA_PATH)
.env("ALLOW_LEGACY_MULTISIG_ISM", "true");
log!("Deploying hyperlane ism contracts...");
yarn_infra.clone().cmd("deploy-ism").run().join();
log!("Rebuilding sdk...");
let yarn_sdk = Program::new("yarn").working_dir(TS_SDK_PATH);
yarn_sdk.clone().cmd("build").run().join();
log!("Deploying hyperlane core contracts...");
yarn_infra.clone().cmd("deploy-core").run().join();
log!("Deploying hyperlane igp contracts...");
yarn_infra.cmd("deploy-igp").run().join();
if !config.is_ci_env {
// Follow-up 'yarn hardhat node' invocation with 'yarn prettier' to fixup
// formatting on any autogenerated json config files to avoid any diff creation.
yarn_monorepo.cmd("prettier").run().join();
}
// Rebuild the SDK to pick up the deployed contracts
log!("Rebuilding sdk...");
yarn_sdk.cmd("build").run().join();
anvil
}

@ -0,0 +1,117 @@
use std::path::Path;
use crate::config::Config;
use maplit::hashmap;
use crate::fetch_metric;
use crate::logging::log;
use crate::solana::solana_termination_invariants_met;
/// Use the metrics to check if the relayer queues are empty and the expected
/// number of messages have been sent.
pub fn termination_invariants_met(
config: &Config,
solana_cli_tools_path: &Path,
solana_config_path: &Path,
) -> eyre::Result<bool> {
let eth_messages_expected = (config.kathy_messages / 2) as u32 * 2;
let sol_messages_expected = 1;
let total_messages_expected = eth_messages_expected + sol_messages_expected;
let lengths = fetch_metric("9092", "hyperlane_submitter_queue_length", &hashmap! {})?;
assert!(!lengths.is_empty(), "Could not find queue length metric");
if lengths.into_iter().any(|n| n != 0) {
log!("Relayer queues not empty");
return Ok(false);
};
// Also ensure the counter is as expected (total number of messages), summed
// across all mailboxes.
let msg_processed_count =
fetch_metric("9092", "hyperlane_messages_processed_count", &hashmap! {})?
.iter()
.sum::<u32>();
if msg_processed_count != total_messages_expected {
log!(
"Relayer has {} processed messages, expected {}",
msg_processed_count,
total_messages_expected
);
return Ok(false);
}
let gas_payment_events_count = fetch_metric(
"9092",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "gas_payments"},
)?
.iter()
.sum::<u32>();
// TestSendReceiver randomly breaks gas payments up into
// two. So we expect at least as many gas payments as messages.
if gas_payment_events_count < total_messages_expected {
log!(
"Relayer has {} gas payment events, expected at least {}",
gas_payment_events_count,
total_messages_expected
);
return Ok(false);
}
if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) {
log!("Solana termination invariants not met");
return Ok(false);
}
let dispatched_messages_scraped = fetch_metric(
"9093",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "message_dispatch"},
)?
.iter()
.sum::<u32>();
if dispatched_messages_scraped != eth_messages_expected {
log!(
"Scraper has scraped {} dispatched messages, expected {}",
dispatched_messages_scraped,
eth_messages_expected
);
return Ok(false);
}
let gas_payments_scraped = fetch_metric(
"9093",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "gas_payment"},
)?
.iter()
.sum::<u32>();
// The relayer and scraper should have the same number of gas payments.
if gas_payments_scraped != gas_payment_events_count {
log!(
"Scraper has scraped {} gas payments, expected {}",
gas_payments_scraped,
eth_messages_expected
);
return Ok(false);
}
let delivered_messages_scraped = fetch_metric(
"9093",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "message_delivery"},
)?
.iter()
.sum::<u32>();
if delivered_messages_scraped != eth_messages_expected {
log!(
"Scraper has scraped {} delivered messages, expected {}",
delivered_messages_scraped,
eth_messages_expected
);
return Ok(false);
}
log!("Termination invariants have been meet");
Ok(true)
}

@ -10,56 +10,71 @@
//! does not include the initial setup time. If this timeout is reached before
//! the end conditions are met, the test is a failure. Defaults to 10 min.
//! - `E2E_KATHY_MESSAGES`: Number of kathy messages to dispatch. Defaults to 16 if CI mode is enabled.
//! - `E2E_LOG_ALL`: Log all output instead of writing to log files. Defaults to
//! true if CI mode,
//! else false.
use std::path::Path;
use std::{
fs::{self},
path::PathBuf,
fs,
process::{Child, ExitCode},
sync::atomic::{AtomicBool, Ordering},
thread::sleep,
time::{Duration, Instant},
};
use eyre::Result;
use maplit::hashmap;
use tempfile::tempdir;
use logging::log;
pub use metrics::fetch_metric;
use program::Program;
use crate::config::ProgramArgs;
use crate::utils::{
build_cmd, concat_path, make_static, run_agent, stop_child, AgentHandles, TaskHandle,
};
use crate::config::Config;
use crate::ethereum::start_anvil;
use crate::invariants::termination_invariants_met;
use crate::solana::*;
use crate::utils::{concat_path, make_static, stop_child, AgentHandles, ArbitraryData, TaskHandle};
mod config;
mod ethereum;
mod invariants;
mod logging;
mod metrics;
mod program;
mod solana;
mod utils;
pub use metrics::fetch_metric;
/// These private keys are from hardhat/anvil's testing accounts.
const RELAYER_KEYS: &[&str] = &[
// test1
"0x2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6",
// test2
"0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97",
// test3
"0x4bbbf85ce3377467afe5d46f804f221813b2bb87f24d81f60f1fcdbf7cbf4356",
// sealeveltest1
"0x892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f",
// sealeveltest2
"0x892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f",
];
/// These private keys are from hardhat/anvil's testing accounts.
/// These must be consistent with the ISM config for the test.
const VALIDATOR_KEYS: &[&str] = &[
// eth
"0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a",
"0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba",
"0x92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e",
// sealevel
"0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d",
];
const VALIDATOR_ORIGIN_CHAINS: &[&str] = &["test1", "test2", "test3", "sealeveltest1"];
const AGENT_BIN_PATH: &str = "target/debug";
const INFRA_PATH: &str = "../typescript/infra";
const TS_SDK_PATH: &str = "../typescript/sdk";
const MONOREPO_ROOT_PATH: &str = "../";
type DynPath = Box<dyn AsRef<Path>>;
static RUN_LOG_WATCHERS: AtomicBool = AtomicBool::new(true);
static SHUTDOWN: AtomicBool = AtomicBool::new(false);
@ -67,17 +82,16 @@ static SHUTDOWN: AtomicBool = AtomicBool::new(false);
/// cleanup purposes at this time.
#[derive(Default)]
struct State {
build_log: PathBuf,
log_all: bool,
scraper_postgres_initialized: bool,
agents: Vec<Child>,
watchers: Vec<TaskHandle<()>>,
agents: Vec<(String, Child)>,
watchers: Vec<Box<dyn TaskHandle<Output = ()>>>,
data: Vec<Box<dyn ArbitraryData>>,
}
impl State {
fn push_agent(&mut self, handles: AgentHandles) {
self.agents.push(handles.0);
self.watchers.push(handles.1);
self.agents.push((handles.0, handles.1));
self.watchers.push(handles.2);
self.watchers.push(handles.3);
self.data.push(handles.4);
}
}
impl Drop for State {
@ -86,31 +100,25 @@ impl Drop for State {
log!("Signaling children to stop...");
// stop children in reverse order
self.agents.reverse();
for mut agent in self.agents.drain(..) {
for (name, mut agent) in self.agents.drain(..) {
log!("Stopping child {}", name);
stop_child(&mut agent);
}
if self.scraper_postgres_initialized {
log!("Stopping scraper postgres...");
kill_scraper_postgres(&self.build_log, self.log_all);
}
log!("Joining watchers...");
RUN_LOG_WATCHERS.store(false, Ordering::Relaxed);
for w in self.watchers.drain(..) {
w.join();
w.join_box();
}
// drop any held data
self.data.reverse();
for data in self.data.drain(..) {
drop(data)
}
fs::remove_dir_all(SOLANA_CHECKPOINT_LOCATION).unwrap_or_default();
}
}
fn main() -> ExitCode {
macro_rules! shutdown_if_needed {
() => {
if SHUTDOWN.load(Ordering::Relaxed) {
log!("Early termination, shutting down");
return ExitCode::FAILURE;
}
};
}
// on sigint we want to trigger things to stop running
ctrlc::set_handler(|| {
log!("Terminating...");
@ -118,23 +126,26 @@ fn main() -> ExitCode {
})
.unwrap();
let config = config::Config::load();
assert_eq!(VALIDATOR_ORIGIN_CHAINS.len(), VALIDATOR_KEYS.len());
const VALIDATOR_COUNT: usize = VALIDATOR_KEYS.len();
if !config.log_all {
fs::create_dir_all(&config.log_dir).expect("Failed to make log dir");
}
let build_log = concat_path(&config.log_dir, "build.log");
let config = Config::load();
let checkpoints_dirs = (0..3).map(|_| tempdir().unwrap()).collect::<Vec<_>>();
let solana_checkpoint_path = Path::new(SOLANA_CHECKPOINT_LOCATION);
fs::remove_dir_all(solana_checkpoint_path).unwrap_or_default();
let checkpoints_dirs: Vec<DynPath> = (0..VALIDATOR_COUNT - 1)
.map(|_| Box::new(tempdir().unwrap()) as DynPath)
.chain([Box::new(solana_checkpoint_path) as DynPath])
.collect();
let rocks_db_dir = tempdir().unwrap();
let relayer_db = concat_path(&rocks_db_dir, "relayer");
let validator_dbs = (0..3)
let validator_dbs = (0..VALIDATOR_COUNT)
.map(|i| concat_path(&rocks_db_dir, format!("validator{i}")))
.collect::<Vec<_>>();
let common_agent_env = ProgramArgs::default()
let common_agent_env = Program::default()
.env("RUST_BACKTRACE", "full")
.hyp_env("TRACING_FMT", "pretty")
.hyp_env("TRACING_FMT", "compact")
.hyp_env("TRACING_LEVEL", "debug")
.hyp_env("CHAINS_TEST1_INDEX_CHUNK", "1")
.hyp_env("CHAINS_TEST2_INDEX_CHUNK", "1")
@ -156,6 +167,8 @@ fn main() -> ExitCode {
.hyp_env("DB", relayer_db.to_str().unwrap())
.hyp_env("CHAINS_TEST1_SIGNER_KEY", RELAYER_KEYS[0])
.hyp_env("CHAINS_TEST2_SIGNER_KEY", RELAYER_KEYS[1])
.hyp_env("CHAINS_SEALEVELTEST1_SIGNER_KEY", RELAYER_KEYS[3])
.hyp_env("CHAINS_SEALEVELTEST2_SIGNER_KEY", RELAYER_KEYS[4])
.hyp_env("RELAYCHAINS", "invalidchain,otherinvalid")
.hyp_env("ALLOWLOCALCHECKPOINTSYNCERS", "true")
.arg(
@ -164,7 +177,10 @@ fn main() -> ExitCode {
)
// default is used for TEST3
.arg("defaultSigner.key", RELAYER_KEYS[2])
.arg("relayChains", "test1,test2,test3");
.arg(
"relayChains",
"test1,test2,test3,sealeveltest1,sealeveltest2",
);
let base_validator_env = common_agent_env
.clone()
@ -184,17 +200,17 @@ fn main() -> ExitCode {
.hyp_env("INTERVAL", "5")
.hyp_env("CHECKPOINTSYNCER_TYPE", "localStorage");
let validator_envs = (0..3)
let validator_envs = (0..VALIDATOR_COUNT)
.map(|i| {
base_validator_env
.clone()
.hyp_env("METRICS", (9094 + i).to_string())
.hyp_env("DB", validator_dbs[i].to_str().unwrap())
.hyp_env("ORIGINCHAINNAME", format!("test{}", 1 + i))
.hyp_env("ORIGINCHAINNAME", VALIDATOR_ORIGIN_CHAINS[i])
.hyp_env("VALIDATOR_KEY", VALIDATOR_KEYS[i])
.hyp_env(
"CHECKPOINTSYNCER_PATH",
checkpoints_dirs[i].path().to_str().unwrap(),
(*checkpoints_dirs[i]).as_ref().to_str().unwrap(),
)
})
.collect::<Vec<_>>();
@ -215,17 +231,12 @@ fn main() -> ExitCode {
);
let mut state = State::default();
state.build_log = build_log;
state.log_all = config.log_all;
if !config.log_all {
log!("Logs in {}", config.log_dir.display());
}
log!(
"Signed checkpoints in {}",
checkpoints_dirs
.iter()
.map(|d| d.path().display().to_string())
.map(|d| (**d).as_ref().display().to_string())
.collect::<Vec<_>>()
.join(", ")
);
@ -234,131 +245,89 @@ fn main() -> ExitCode {
log!("Validator {} DB in {}", i + 1, validator_dbs[i].display());
});
let build_log_ref = make_static(state.build_log.to_str().unwrap().to_owned());
let build_cmd = move |cmd| build_cmd(cmd, build_log_ref, config.log_all, true);
let run_agent = |args, prefix| run_agent(args, prefix, &config);
//
// Ready to run...
//
let (solana_path, solana_path_tempdir) = install_solana_cli_tools().join();
state.data.push(Box::new(solana_path_tempdir));
let solana_program_builder = build_solana_programs(solana_path.clone());
shutdown_if_needed!();
// this task takes a long time in the CI so run it in parallel
log!("Building rust...");
let build_rust = build_cmd(
ProgramArgs::new("cargo")
.cmd("build")
.arg("features", "test-utils")
.arg("bin", "relayer")
.arg("bin", "validator")
.arg("bin", "scraper")
.arg("bin", "init-db"),
);
log!("Running postgres db...");
kill_scraper_postgres(&state.build_log, config.log_all);
build_cmd(
ProgramArgs::new("docker")
.cmd("run")
.flag("rm")
.arg("name", "scraper-testnet-postgres")
.arg("env", "POSTGRES_PASSWORD=47221c18c610")
.arg("publish", "5432:5432")
.flag("detach")
.cmd("postgres:14"),
)
.join();
state.scraper_postgres_initialized = true;
shutdown_if_needed!();
log!("Installing typescript dependencies...");
let yarn_monorepo = ProgramArgs::new("yarn").working_dir(MONOREPO_ROOT_PATH);
build_cmd(yarn_monorepo.clone().cmd("install")).join();
if !config.is_ci_env {
// don't need to clean in the CI
build_cmd(yarn_monorepo.clone().cmd("clean")).join();
}
shutdown_if_needed!();
build_cmd(yarn_monorepo.clone().cmd("build")).join();
shutdown_if_needed!();
log!("Launching anvil...");
let anvil_args = ProgramArgs::new("anvil")
.flag("silent")
.filter_logs(filter_anvil_logs);
let anvil = run_agent(anvil_args, "ETH");
state.push_agent(anvil);
sleep(Duration::from_secs(10));
let build_rust = Program::new("cargo")
.cmd("build")
.arg("features", "test-utils")
.arg("bin", "relayer")
.arg("bin", "validator")
.arg("bin", "scraper")
.arg("bin", "init-db")
.arg("bin", "hyperlane-sealevel-client")
.filter_logs(|l| !l.contains("workspace-inheritance"))
.run();
let yarn_infra = ProgramArgs::new("yarn")
.working_dir(INFRA_PATH)
.env("ALLOW_LEGACY_MULTISIG_ISM", "true");
log!("Deploying hyperlane ism contracts...");
build_cmd(yarn_infra.clone().cmd("deploy-ism")).join();
shutdown_if_needed!();
log!("Rebuilding sdk...");
let yarn_sdk = ProgramArgs::new("yarn").working_dir(TS_SDK_PATH);
build_cmd(yarn_sdk.clone().cmd("build")).join();
let start_anvil = start_anvil(config.clone());
log!("Deploying hyperlane core contracts...");
build_cmd(yarn_infra.clone().cmd("deploy-core")).join();
let solana_program_path = solana_program_builder.join();
log!("Deploying hyperlane igp contracts...");
build_cmd(yarn_infra.clone().cmd("deploy-igp")).join();
if !config.is_ci_env {
// Follow-up 'yarn hardhat node' invocation with 'yarn prettier' to fixup
// formatting on any autogenerated json config files to avoid any diff creation.
build_cmd(yarn_monorepo.cmd("prettier")).join();
}
shutdown_if_needed!();
// Rebuild the SDK to pick up the deployed contracts
log!("Rebuilding sdk...");
build_cmd(yarn_sdk.cmd("build")).join();
log!("Running postgres db...");
let postgres = Program::new("docker")
.cmd("run")
.flag("rm")
.arg("name", "scraper-testnet-postgres")
.arg("env", "POSTGRES_PASSWORD=47221c18c610")
.arg("publish", "5432:5432")
.cmd("postgres:14")
.spawn("SQL");
state.push_agent(postgres);
build_rust.join();
log!("Init postgres db...");
build_cmd(ProgramArgs::new(concat_path(AGENT_BIN_PATH, "init-db"))).join();
shutdown_if_needed!();
let solana_ledger_dir = tempdir().unwrap();
let start_solana_validator = start_solana_test_validator(
solana_path.clone(),
solana_program_path,
solana_ledger_dir.as_ref().to_path_buf(),
);
let scraper = run_agent(scraper_env, "SCR");
state.push_agent(scraper);
let (solana_config_path, solana_validator) = start_solana_validator.join();
state.push_agent(solana_validator);
state.push_agent(start_anvil.join());
// spawn 1st validator before any messages have been sent to test empty mailbox
let validator1_env = validator_envs.first().unwrap().clone();
let validator1 = run_agent(validator1_env, "VAL1");
state.push_agent(validator1);
state.push_agent(validator_envs.first().unwrap().clone().spawn("VL1"));
sleep(Duration::from_secs(5));
log!("Init postgres db...");
Program::new(concat_path(AGENT_BIN_PATH, "init-db"))
.run()
.join();
state.push_agent(scraper_env.spawn("SCR"));
// Send half the kathy messages before starting the rest of the agents
let kathy_env = yarn_infra
let kathy_env = Program::new("yarn")
.working_dir(INFRA_PATH)
.cmd("kathy")
.arg("messages", (config.kathy_messages / 2).to_string())
.arg("timeout", "1000");
let (mut kathy, kathy_stdout, kathy_stderr) = run_agent(kathy_env.clone(), "KTY");
state.watchers.push(kathy_stdout);
state.watchers.push(kathy_stderr);
kathy.wait().unwrap();
kathy_env.clone().run().join();
// spawn the rest of the validators
for (i, validator_env) in validator_envs.into_iter().enumerate().skip(1) {
let validator = run_agent(validator_env, make_static(format!("VAL{}", 1 + i)));
let validator = validator_env.spawn(make_static(format!("VL{}", 1 + i)));
state.push_agent(validator);
}
let relayer = run_agent(relayer_env, "RLY");
state.push_agent(relayer);
state.push_agent(relayer_env.spawn("RLY"));
initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join();
log!("Setup complete! Agents running in background...");
log!("Ctrl+C to end execution...");
// Send half the kathy messages after the relayer comes up
let kathy_env = kathy_env.flag("mineforever");
let kathy = run_agent(kathy_env, "KTY");
state.push_agent(kathy);
state.push_agent(kathy_env.flag("mineforever").spawn("KTY"));
let loop_start = Instant::now();
// give things a chance to fully start.
@ -367,10 +336,10 @@ fn main() -> ExitCode {
while !SHUTDOWN.load(Ordering::Relaxed) {
if config.ci_mode {
// for CI we have to look for the end condition.
let num_messages_expected = (config.kathy_messages / 2) as u32 * 2;
if termination_invariants_met(num_messages_expected).unwrap_or(false) {
if termination_invariants_met(&config, &solana_path, &solana_config_path)
.unwrap_or(false)
{
// end condition reached successfully
log!("Agent metrics look healthy");
break;
} else if (Instant::now() - loop_start).as_secs() > config.ci_mode_timeout {
// we ran out of time
@ -381,10 +350,11 @@ fn main() -> ExitCode {
}
// verify long-running tasks are still running
for child in state.agents.iter_mut() {
for (name, child) in state.agents.iter_mut() {
if child.try_wait().unwrap().is_some() {
log!("Child process exited unexpectedly, shutting down");
log!("Child process {} exited unexpectedly, shutting down", name);
failure_occurred = true;
SHUTDOWN.store(true, Ordering::Relaxed);
break;
}
}
@ -393,122 +363,10 @@ fn main() -> ExitCode {
}
if failure_occurred {
log!("E2E tests failed");
ExitCode::FAILURE
} else {
log!("E2E tests passed");
ExitCode::SUCCESS
}
}
/// Use the metrics to check if the relayer queues are empty and the expected
/// number of messages have been sent.
fn termination_invariants_met(num_expected_messages: u32) -> Result<bool> {
let lengths = fetch_metric("9092", "hyperlane_submitter_queue_length", &hashmap! {})?;
assert!(!lengths.is_empty(), "Could not find queue length metric");
if lengths.into_iter().any(|n| n != 0) {
log!("Relayer queues not empty");
return Ok(false);
};
// Also ensure the counter is as expected (total number of messages), summed
// across all mailboxes.
let msg_processed_count =
fetch_metric("9092", "hyperlane_messages_processed_count", &hashmap! {})?
.iter()
.sum::<u32>();
if msg_processed_count != num_expected_messages {
log!(
"Relayer has {} processed messages, expected {}",
msg_processed_count,
num_expected_messages
);
return Ok(false);
}
let gas_payment_events_count = fetch_metric(
"9092",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "gas_payments"},
)?
.iter()
.sum::<u32>();
// TestSendReceiver randomly breaks gas payments up into
// two. So we expect at least as many gas payments as messages.
if gas_payment_events_count < num_expected_messages {
log!(
"Relayer has {} gas payment events, expected at least {}",
gas_payment_events_count,
num_expected_messages
);
return Ok(false);
}
let dispatched_messages_scraped = fetch_metric(
"9093",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "message_dispatch"},
)?
.iter()
.sum::<u32>();
if dispatched_messages_scraped != num_expected_messages {
log!(
"Scraper has scraped {} dispatched messages, expected {}",
dispatched_messages_scraped,
num_expected_messages
);
return Ok(false);
}
let gas_payments_scraped = fetch_metric(
"9093",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "gas_payment"},
)?
.iter()
.sum::<u32>();
// The relayer and scraper should have the same number of gas payments.
if gas_payments_scraped != gas_payment_events_count {
log!(
"Scraper has scraped {} gas payments, expected {}",
gas_payments_scraped,
num_expected_messages
);
return Ok(false);
}
let delivered_messages_scraped = fetch_metric(
"9093",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "message_delivery"},
)?
.iter()
.sum::<u32>();
if delivered_messages_scraped != num_expected_messages {
log!(
"Scraper has scraped {} delivered messages, expected {}",
delivered_messages_scraped,
num_expected_messages
);
Ok(false)
} else {
log!("Termination invariants have been meet");
Ok(true)
}
}
fn kill_scraper_postgres(build_log: impl AsRef<Path>, log_all: bool) {
build_cmd(
ProgramArgs::new("docker")
.cmd("stop")
.cmd("scraper-testnet-postgres"),
&build_log,
log_all,
false,
)
.join();
}
/// Return true if a given log line should be kept.
fn filter_anvil_logs(_log: &str) -> bool {
// for now discard all anvil logs
false
}

@ -0,0 +1,346 @@
use std::collections::BTreeMap;
use std::ffi::OsStr;
use std::fmt::{Debug, Display, Formatter};
use std::io::{BufRead, BufReader, Read};
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::mpsc::Sender;
use std::sync::{mpsc, Arc};
use std::thread::{sleep, spawn};
use std::time::Duration;
use eyre::Context;
use macro_rules_attribute::apply;
use crate::logging::log;
use crate::utils::{
as_task, stop_child, AgentHandles, ArbitraryData, LogFilter, MappingTaskHandle,
SimpleTaskHandle, TaskHandle,
};
use crate::{RUN_LOG_WATCHERS, SHUTDOWN};
#[derive(Default, Clone)]
#[must_use]
pub struct Program {
bin: Option<Arc<String>>,
args: Vec<Arc<String>>,
env: BTreeMap<Arc<String>, Arc<String>>,
working_dir: Option<Arc<PathBuf>>,
log_filter: Option<LogFilter>,
arbitrary_data: Vec<Arc<dyn ArbitraryData>>,
}
impl Debug for Program {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Program")
.field("bin", &self.bin)
.field("args", &self.args)
.field("env", &self.env)
.field("working_dir", &self.working_dir)
.field("log_filter", &self.log_filter.is_some())
.finish()
}
}
impl Display for Program {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
if f.alternate() {
let wd = self
.working_dir
.as_ref()
.map(|wd| wd.display())
.unwrap_or_else(|| Path::new("./").display());
write!(f, "({wd})$ ")?;
for (k, v) in &self.env {
write!(f, "{k}={v} ")?;
}
if let Some(path_result) = self.get_bin_path() {
if let Ok(bp) = path_result {
write!(f, "{}", bp.display())?;
} else {
write!(f, "{}", self.bin.as_ref().unwrap())?;
}
} else {
write!(f, "???")?;
}
for a in &self.args {
write!(f, " {a}")?;
}
Ok(())
} else {
write!(
f,
"{}",
self.bin.as_deref().map(String::as_str).unwrap_or("???")
)
}
}
}
impl Program {
pub fn new(bin: impl AsRef<OsStr>) -> Self {
Self::default().bin(bin)
}
pub fn bin(mut self, bin: impl AsRef<OsStr>) -> Self {
self.bin = Some(
bin.as_ref()
.to_str()
.expect("Invalid string encoding for binary name")
.to_owned()
.into(),
);
self
}
pub fn raw_arg(mut self, arg: impl Into<String>) -> Self {
self.args.push(arg.into().into());
self
}
pub fn cmd(self, cmd: impl Into<String>) -> Self {
let cmd = cmd.into();
debug_assert!(!cmd.starts_with('-'), "arg should not start with -");
self.raw_arg(cmd)
}
pub fn flag(self, arg: impl AsRef<str>) -> Self {
debug_assert!(
!arg.as_ref().starts_with('-'),
"arg should not start with -"
);
self.raw_arg(format!("--{}", arg.as_ref()))
}
/// Assumes an arg in the format of `--$ARG1 $ARG2`, arg1 and arg2 should exclude quoting, equal sign, and the leading hyphens.
pub fn arg(self, arg1: impl AsRef<str>, arg2: impl Into<String>) -> Self {
self.flag(arg1).cmd(arg2)
}
/// Assumes an arg in the format of `--$ARG1 $ARG2 $ARG3`, args should exclude quoting, equal sign, and the leading hyphens.
pub fn arg3(
self,
arg1: impl AsRef<str>,
arg2: impl Into<String>,
arg3: impl Into<String>,
) -> Self {
self.flag(arg1).cmd(arg2).cmd(arg3)
}
/// add an env that will be prefixed with the default hyperlane env prefix
pub fn hyp_env(self, key: impl AsRef<str>, value: impl Into<String>) -> Self {
const PREFIX: &str = "HYP_BASE_";
let key = key.as_ref();
debug_assert!(
!key.starts_with(PREFIX),
"env key should not start with prefix that is being added"
);
self.env(format!("{PREFIX}{key}"), value)
}
/// add a system env that makes no prefix assumptions
pub fn env(mut self, key: impl Into<String>, value: impl Into<String>) -> Self {
self.env.insert(key.into().into(), value.into().into());
self
}
pub fn working_dir(mut self, path: impl Into<PathBuf>) -> Self {
self.working_dir = Some(path.into().into());
self
}
/// Filter logs being printed to stdout/stderr. If the LogFilter returns true,
/// then it will keep that log line, if it returns false it will discard it.
/// This is ignored when logging to files.
pub fn filter_logs(mut self, filter: LogFilter) -> Self {
self.log_filter = Some(filter);
self
}
/// Remember some arbitrary data until either this program args goes out of scope or until the
/// agent/child process exits. This is useful for preventing something from dropping.
pub fn remember(mut self, data: impl ArbitraryData) -> Self {
self.arbitrary_data.push(Arc::new(data));
self
}
pub fn create_command(&self) -> Command {
let mut cmd = Command::new(
self.get_bin_path()
.expect("bin path must be specified")
.unwrap(),
);
if let Some(wd) = &self.working_dir {
cmd.current_dir(wd.as_path());
}
for (k, v) in self.env.iter() {
cmd.env(k.as_str(), v.as_str());
}
cmd.args(self.args.iter().map(AsRef::as_ref));
cmd
}
pub fn get_filter(&self) -> Option<LogFilter> {
self.log_filter
}
/// Try to get the path to the binary
pub fn get_bin_path(&self) -> Option<eyre::Result<PathBuf>> {
self.bin.as_ref().map(|raw_bin_name| {
which::which(raw_bin_name.as_ref())
.with_context(|| format!("Cannot find binary: {raw_bin_name}"))
})
}
/// Get just the name component of the binary
pub fn get_bin_name(&self) -> String {
Path::new(
self.bin
.as_ref()
.expect("bin path must be specified")
.as_str(),
)
.file_name()
.expect("bin must have a file name")
.to_str()
.unwrap()
.to_owned()
}
pub fn get_memory(&self) -> Box<dyn ArbitraryData> {
Box::new(self.arbitrary_data.clone())
}
#[allow(dead_code)]
pub fn run(self) -> impl TaskHandle<Output = ()> {
MappingTaskHandle(self.run_full(true, false), |_| ())
}
#[allow(dead_code)]
pub fn run_ignore_code(self) -> impl TaskHandle<Output = ()> {
MappingTaskHandle(self.run_full(false, false), |_| ())
}
#[allow(dead_code)]
pub fn run_with_output(self) -> impl TaskHandle<Output = Vec<String>> {
MappingTaskHandle(self.run_full(false, true), |o| {
o.expect("Command did not return output")
})
}
pub fn spawn(self, log_prefix: &'static str) -> AgentHandles {
let mut command = self.create_command();
command.stdout(Stdio::piped()).stderr(Stdio::piped());
log!("Spawning {}...", &self);
let mut child = command
.spawn()
.unwrap_or_else(|e| panic!("Failed to start {:?} with error: {e}", &self));
let child_stdout = child.stdout.take().unwrap();
let filter = self.get_filter();
let stdout =
spawn(move || prefix_log(child_stdout, log_prefix, &RUN_LOG_WATCHERS, filter, None));
let child_stderr = child.stderr.take().unwrap();
let stderr =
spawn(move || prefix_log(child_stderr, log_prefix, &RUN_LOG_WATCHERS, filter, None));
(
log_prefix.to_owned(),
child,
Box::new(SimpleTaskHandle(stdout)),
Box::new(SimpleTaskHandle(stderr)),
self.get_memory(),
)
}
#[apply(as_task)]
fn run_full(self, assert_success: bool, capture_output: bool) -> Option<Vec<String>> {
let mut command = self.create_command();
command.stdout(Stdio::piped());
command.stderr(Stdio::piped());
log!("{:#}", &self);
let mut child = command
.spawn()
.unwrap_or_else(|e| panic!("Failed to start command `{}` with Error: {e}", &self));
let filter = self.get_filter();
let running = Arc::new(AtomicBool::new(true));
let (stdout_ch_tx, stdout_ch_rx) = capture_output.then(mpsc::channel).unzip();
let stdout = {
let stdout = child.stdout.take().unwrap();
let name = self.get_bin_name();
let running = running.clone();
spawn(move || prefix_log(stdout, &name, &running, filter, stdout_ch_tx))
};
let stderr = {
let stderr = child.stderr.take().unwrap();
let name = self.get_bin_name();
let running = running.clone();
spawn(move || prefix_log(stderr, &name, &running, filter, None))
};
let status = loop {
sleep(Duration::from_millis(500));
if let Some(exit_status) = child.try_wait().expect("Failed to run command") {
break exit_status;
} else if SHUTDOWN.load(Ordering::Relaxed) {
log!("Forcing termination of command `{}`", &self);
stop_child(&mut child);
break child.wait().expect("Failed to run command");
}
};
running.store(false, Ordering::Relaxed);
stdout.join().unwrap();
stderr.join().unwrap();
assert!(
!assert_success || !RUN_LOG_WATCHERS.load(Ordering::Relaxed) || status.success(),
"Command returned non-zero exit code: {:?}",
&self
);
stdout_ch_rx.map(|rx| rx.into_iter().collect())
}
}
/// Read from a process output and add a string to the front before writing it to stdout.
fn prefix_log(
output: impl Read,
prefix: &str,
run_log_watcher: &AtomicBool,
filter: Option<LogFilter>,
channel: Option<Sender<String>>,
) {
let mut reader = BufReader::new(output).lines();
loop {
if let Some(line) = reader.next() {
let line = match line {
Ok(l) => l,
Err(e) => {
// end of stream, probably
log!("Error reading from output for {}: {}", prefix, e);
break;
}
};
if let Some(filter) = filter.as_ref() {
if !(filter)(&line) {
continue;
}
}
println!("<{prefix}> {line}");
if let Some(channel) = &channel {
// ignore send errors
channel.send(line).unwrap_or(());
}
} else if run_log_watcher.load(Ordering::Relaxed) {
sleep(Duration::from_millis(10));
} else {
break;
}
}
}

@ -0,0 +1,342 @@
use std::fs;
use std::path::{Path, PathBuf};
use std::thread::sleep;
use std::time::Duration;
use macro_rules_attribute::apply;
use tempfile::{tempdir, NamedTempFile};
use crate::logging::log;
use crate::program::Program;
use crate::utils::{as_task, concat_path, AgentHandles, ArbitraryData, TaskHandle};
use crate::AGENT_BIN_PATH;
// Solana program tuples of:
// 0: Solana address or keypair for the bpf program
// 1: Name of the program's shared object file
const SOLANA_PROGRAMS: &[(&str, &str)] = &[
(
"TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA",
"spl_token.so",
),
(
"TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb",
"spl_token_2022.so",
),
(
"ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL",
"spl_associated_token_account.so",
),
("noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV", "spl_noop.so"),
];
const SOLANA_KEYPAIR: &str = "config/test-sealevel-keys/test_deployer-keypair.json";
const SOLANA_DEPLOYER_ACCOUNT: &str = "config/test-sealevel-keys/test_deployer-account.json";
const SBF_OUT_PATH: &str = "target/dist";
// Relative paths to solana program source code within rust/sealevel/programs repo.
const SOLANA_HYPERLANE_PROGRAMS: &[&str] = &[
"mailbox",
"validator-announce",
"ism/multisig-ism-message-id",
"hyperlane-sealevel-token",
"hyperlane-sealevel-token-native",
"hyperlane-sealevel-token-collateral",
];
const SOLANA_PROGRAM_LIBRARY_ARCHIVE: &str =
"https://github.com/hyperlane-xyz/solana-program-library/releases/download/2023-07-27-01/spl.tar.gz";
const SOLANA_LOCAL_CHAIN_ID: &str = "13375";
const SOLANA_REMOTE_CHAIN_ID: &str = "13376";
/// The Solana CLI tool version to download and use.
const SOLANA_CLI_VERSION: &str = "1.14.20";
// TODO: use a temp dir instead!
pub const SOLANA_CHECKPOINT_LOCATION: &str =
"/tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8";
// Install the CLI tools and return the path to the bin dir.
#[apply(as_task)]
pub fn install_solana_cli_tools() -> (PathBuf, impl ArbitraryData) {
let solana_download_dir = tempdir().unwrap();
let solana_tools_dir = tempdir().unwrap();
log!("Downloading solana cli release v{}", SOLANA_CLI_VERSION);
let solana_release_name = {
// best effort ot pick one of the supported targets
let target = if cfg!(target_os = "linux") {
"x86_64-unknown-linux-gnu"
} else if cfg!(target_os = "macos") {
if cfg!(target_arch = "aarch64") {
"aarch64-apple-darwin"
} else {
"x86_64-apple-darwin"
}
} else if cfg!(target_os = "windows") {
"pc-windows-msvc"
} else {
panic!("Current os is not supported by solana")
};
format!("solana-release-{target}")
};
let solana_archive_name = format!("{solana_release_name}.tar.bz2");
Program::new("curl")
.arg("output", &solana_archive_name)
.flag("location")
.cmd(format!("https://github.com/solana-labs/solana/releases/download/v{SOLANA_CLI_VERSION}/{solana_archive_name}"))
.flag("silent")
.working_dir(solana_download_dir.as_ref().to_str().unwrap())
.run()
.join();
log!("Uncompressing solana release");
Program::new("tar")
.flag("extract")
.arg("file", &solana_archive_name)
.working_dir(solana_download_dir.as_ref().to_str().unwrap())
.run()
.join();
fs::rename(
concat_path(&solana_download_dir, "solana-release"),
&solana_tools_dir,
)
.expect("Failed to move solana-release dir");
(concat_path(&solana_tools_dir, "bin"), solana_tools_dir)
}
#[apply(as_task)]
pub fn build_solana_programs(solana_cli_tools_path: PathBuf) -> PathBuf {
let out_path = Path::new(SBF_OUT_PATH);
if out_path.exists() {
fs::remove_dir_all(out_path).expect("Failed to remove solana program deploy dir");
}
fs::create_dir_all(out_path).expect("Failed to create solana program deploy dir");
let out_path = out_path.canonicalize().unwrap();
Program::new("curl")
.arg("output", "spl.tar.gz")
.flag("location")
.cmd(SOLANA_PROGRAM_LIBRARY_ARCHIVE)
.flag("silent")
.working_dir(&out_path)
.run()
.join();
log!("Uncompressing solana programs");
Program::new("tar")
.flag("extract")
.arg("file", "spl.tar.gz")
.working_dir(&out_path)
.run()
.join();
log!("Remove temporary solana files");
fs::remove_file(concat_path(&out_path, "spl.tar.gz"))
.expect("Failed to remove solana program archive");
let build_sbf = Program::new(
concat_path(&solana_cli_tools_path, "cargo-build-sbf")
.to_str()
.unwrap(),
)
.env("PATH", updated_path(&solana_cli_tools_path))
.env("SBF_OUT_PATH", out_path.to_str().unwrap());
// build our programs
for &path in SOLANA_HYPERLANE_PROGRAMS {
build_sbf
.clone()
.working_dir(concat_path("sealevel/programs", path))
.run()
.join();
}
log!("All hyperlane solana programs built successfully");
out_path
}
#[apply(as_task)]
pub fn start_solana_test_validator(
solana_cli_tools_path: PathBuf,
solana_programs_path: PathBuf,
ledger_dir: PathBuf,
) -> (PathBuf, AgentHandles) {
// init solana config
let solana_config = NamedTempFile::new().unwrap().into_temp_path();
let solana_config_path = solana_config.to_path_buf();
Program::new(concat_path(&solana_cli_tools_path, "solana"))
.arg("config", solana_config.to_str().unwrap())
.cmd("config")
.cmd("set")
.arg("url", "localhost")
.run()
.join();
log!("Starting solana validator");
let mut args = Program::new(concat_path(&solana_cli_tools_path, "solana-test-validator"))
.flag("quiet")
.flag("reset")
.arg("ledger", ledger_dir.to_str().unwrap())
.arg3(
"account",
"E9VrvAdGRvCguN2XgXsgu9PNmMM3vZsU8LSUrM68j8ty",
SOLANA_DEPLOYER_ACCOUNT,
)
.remember(solana_config);
for &(address, lib) in SOLANA_PROGRAMS {
args = args.arg3(
"bpf-program",
address,
concat_path(&solana_programs_path, lib).to_str().unwrap(),
);
}
let validator = args.spawn("SOL");
sleep(Duration::from_secs(5));
log!("Deploying the hyperlane programs to solana");
let sealevel_client = sealevel_client(&solana_cli_tools_path, &solana_config_path);
let sealevel_client_deploy_core = sealevel_client
.clone()
.arg("compute-budget", "200000")
.cmd("core")
.cmd("deploy")
.arg("environment", "local-e2e")
.arg("environments-dir", "sealevel/environments")
.arg("built-so-dir", SBF_OUT_PATH)
.flag("use-existing-keys");
sealevel_client_deploy_core
.clone()
.arg("local-domain", SOLANA_LOCAL_CHAIN_ID)
.arg("chain", "sealeveltest1")
.run()
.join();
sealevel_client_deploy_core
.arg("local-domain", SOLANA_REMOTE_CHAIN_ID)
.arg("chain", "sealeveltest2")
.run()
.join();
sealevel_client
.clone()
.arg("compute-budget", "200000")
.cmd("warp-route")
.cmd("deploy")
.arg("environment", "local-e2e")
.arg("environments-dir", "sealevel/environments")
.arg("built-so-dir", SBF_OUT_PATH)
.arg("warp-route-name", "testwarproute")
.arg(
"token-config-file",
"sealevel/environments/local-e2e/warp-routes/testwarproute/token-config.json",
)
.arg(
"chain-config-file",
"sealevel/environments/local-e2e/warp-routes/chain-config.json",
)
.arg("ata-payer-funding-amount", "1000000000")
.run()
.join();
log!("Initializing solana programs");
sealevel_client
.clone()
.cmd("multisig-ism-message-id")
.cmd("set-validators-and-threshold")
.arg("domain", SOLANA_LOCAL_CHAIN_ID)
.arg("validators", "0x70997970c51812dc3a010c7d01b50e0d17dc79c8")
.arg("threshold", "1")
.arg("program-id", "4RSV6iyqW9X66Xq3RDCVsKJ7hMba5uv6XP8ttgxjVUB1")
.run()
.join();
sealevel_client
.cmd("validator-announce")
.cmd("announce")
.arg("validator", "0x70997970c51812dc3a010c7d01b50e0d17dc79c8")
.arg(
"storage-location",
format!("file://{SOLANA_CHECKPOINT_LOCATION}")
)
.arg("signature", "0xcd87b715cd4c2e3448be9e34204cf16376a6ba6106e147a4965e26ea946dd2ab19598140bf26f1e9e599c23f6b661553c7d89e8db22b3609068c91eb7f0fa2f01b")
.run()
.join();
log!("Local Solana chain started and hyperlane programs deployed and initialized successfully");
(solana_config_path, validator)
}
#[apply(as_task)]
pub fn initiate_solana_hyperlane_transfer(
solana_cli_tools_path: PathBuf,
solana_config_path: PathBuf,
) {
let sender = Program::new(concat_path(&solana_cli_tools_path, "solana"))
.arg("config", solana_config_path.to_str().unwrap())
.arg("keypair", SOLANA_KEYPAIR)
.cmd("address")
.run_with_output()
.join()
.get(0)
.expect("failed to get sender address")
.trim()
.to_owned();
sealevel_client(&solana_cli_tools_path, &solana_config_path)
.cmd("token")
.cmd("transfer-remote")
.cmd(SOLANA_KEYPAIR)
.cmd("10000000000")
.cmd(SOLANA_REMOTE_CHAIN_ID)
.cmd(sender) // send to self
.cmd("native")
.arg("program-id", "CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga")
.run()
.join();
}
pub fn solana_termination_invariants_met(
solana_cli_tools_path: &Path,
solana_config_path: &Path,
) -> bool {
sealevel_client(solana_cli_tools_path, solana_config_path)
.cmd("mailbox")
.cmd("delivered")
.arg(
// this will break if any parts of `transfer-remote` change.
// This value was gotten by observing the relayer logs.
// TODO: get the actual message-id so we don't have to hardcode it
"message-id",
"0x7b8ba684e5ce44f898c5fa81785c83a00e32b5bef3412e648eb7a17bec497685",
)
.arg("program-id", "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj")
.run_with_output()
.join()
.join("\n")
.contains("Message delivered")
}
fn sealevel_client(solana_cli_tools_path: &Path, solana_config_path: &Path) -> Program {
Program::new(concat_path(AGENT_BIN_PATH, "hyperlane-sealevel-client"))
.env("PATH", updated_path(solana_cli_tools_path))
.env("RUST_BACKTRACE", "1")
.arg("config", solana_config_path.to_str().unwrap())
.arg("keypair", SOLANA_KEYPAIR)
}
fn updated_path(solana_cli_tools_path: &Path) -> String {
format!(
"{}:{}",
solana_cli_tools_path
.canonicalize()
.expect("Failed to canonicalize solana cli tools path")
.to_str()
.unwrap(),
std::env::var("PATH").unwrap_or_default(),
)
}

@ -1,20 +1,33 @@
use std::fs::File;
use std::io::{BufRead, BufReader, BufWriter, Read, Write};
use std::path::{Path, PathBuf};
use std::process::{Child, Stdio};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread::{sleep, spawn, JoinHandle};
use std::time::Duration;
use std::process::Child;
use std::thread::JoinHandle;
use nix::libc::pid_t;
use nix::sys::signal;
use nix::sys::signal::Signal;
use nix::unistd::Pid;
use crate::config::{Config, ProgramArgs};
use crate::logging::log;
use crate::{RUN_LOG_WATCHERS, SHUTDOWN};
/// Make a function run as a task by writing `#[apply(as_task)]`. This will spawn a new thread
/// and then return the result through a TaskHandle.
macro_rules! as_task {
(
$(#[$fn_meta:meta])*
$fn_vis:vis fn $fn_name:ident(
$($arg_name:ident$(: $arg_type:ty)?),*$(,)?
) $(-> $ret_type:ty)? $body:block
) => {
$(#[$fn_meta])*
$fn_vis fn $fn_name($($arg_name$(: $arg_type)*),*) -> impl $crate::utils::TaskHandle<Output=as_task!(@handle $($ret_type)?)> {
$crate::utils::SimpleTaskHandle(::std::thread::spawn(move || $body))
}
};
(@handle $ret_type:ty) => {$ret_type};
(@handle) => {()};
}
pub(crate) use as_task;
pub fn make_static(s: String) -> &'static str {
Box::leak(s.into_boxed_str())
@ -27,62 +40,62 @@ pub fn concat_path(p1: impl AsRef<Path>, p2: impl AsRef<Path>) -> PathBuf {
p
}
pub type AgentHandles = (Child, TaskHandle<()>, TaskHandle<()>);
pub trait ArbitraryData: Send + Sync + 'static {}
impl<T: Send + Sync + 'static> ArbitraryData for T {}
pub type AgentHandles = (
// name
String,
// child process
Child,
// stdout
Box<dyn TaskHandle<Output = ()>>,
// stderr
Box<dyn TaskHandle<Output = ()>>,
// data to drop once program exits
Box<dyn ArbitraryData>,
);
pub type LogFilter = fn(&str) -> bool;
pub fn run_agent(args: ProgramArgs, log_prefix: &'static str, config: &Config) -> AgentHandles {
let mut command = args.create_command();
command.stdout(Stdio::piped()).stderr(Stdio::piped());
log!("Spawning {}...", &args);
let mut child = command
.spawn()
.unwrap_or_else(|e| panic!("Failed to start {:?} with error: {e}", &args));
let stdout_path = concat_path(&config.log_dir, format!("{log_prefix}.stdout.log"));
let child_stdout = child.stdout.take().unwrap();
let filter = args.get_filter();
let log_all = config.log_all;
let stdout = spawn(move || {
if log_all {
prefix_log(child_stdout, log_prefix, &RUN_LOG_WATCHERS, filter)
} else {
inspect_and_write_to_file(
child_stdout,
stdout_path,
&["ERROR", "message successfully processed"],
)
}
});
let stderr_path = concat_path(&config.log_dir, format!("{log_prefix}.stderr.log"));
let child_stderr = child.stderr.take().unwrap();
let stderr = spawn(move || {
if log_all {
prefix_log(child_stderr, log_prefix, &RUN_LOG_WATCHERS, filter)
} else {
inspect_and_write_to_file(child_stderr, stderr_path, &[])
}
});
(child, TaskHandle(stdout), TaskHandle(stderr))
#[must_use]
pub trait TaskHandle: Send {
type Output;
fn join(self) -> Self::Output;
fn join_box(self: Box<Self>) -> Self::Output;
}
/// Wrapper around a join handle to simplify use.
#[must_use]
pub struct TaskHandle<T>(pub JoinHandle<T>);
impl<T> TaskHandle<T> {
pub fn join(self) -> T {
pub struct SimpleTaskHandle<T>(pub JoinHandle<T>);
impl<T> TaskHandle for SimpleTaskHandle<T> {
type Output = T;
fn join(self) -> Self::Output {
self.0.join().expect("Task thread panicked!")
}
fn join_box(self: Box<Self>) -> T {
self.join()
}
}
pub fn build_cmd(
args: ProgramArgs,
log: impl AsRef<Path>,
log_all: bool,
assert_success: bool,
) -> TaskHandle<()> {
let log = log.as_ref().to_owned();
let handle = spawn(move || build_cmd_task(args, log, log_all, assert_success));
TaskHandle(handle)
#[must_use]
pub struct MappingTaskHandle<T, H: TaskHandle<Output = T>, U, F: FnOnce(T) -> U>(pub H, pub F);
impl<T, H, U, F> TaskHandle for MappingTaskHandle<T, H, U, F>
where
H: TaskHandle<Output = T>,
F: Send + FnOnce(T) -> U,
{
type Output = U;
fn join(self) -> Self::Output {
(self.1)(self.0.join())
}
fn join_box(self: Box<Self>) -> U {
self.join()
}
}
/// Attempt to kindly signal a child to stop running, and kill it if that fails.
@ -99,133 +112,3 @@ pub fn stop_child(child: &mut Child) {
}
};
}
/// Open a file in append mode, or create it if it does not exist.
fn append_to(p: impl AsRef<Path>) -> File {
File::options()
.create(true)
.append(true)
.open(p)
.expect("Failed to open file")
}
/// Read from a process output and add a string to the front before writing it
/// to stdout.
fn prefix_log(
output: impl Read,
prefix: &str,
run_log_watcher: &AtomicBool,
filter: Option<LogFilter>,
) {
let mut reader = BufReader::new(output).lines();
loop {
if let Some(line) = reader.next() {
let line = match line {
Ok(l) => l,
Err(e) => {
// end of stream, probably
log!("Error reading from output for {}: {}", prefix, e);
break;
}
};
if let Some(filter) = filter.as_ref() {
if !(filter)(&line) {
continue;
}
}
println!("<{prefix}> {line}");
} else if run_log_watcher.load(Ordering::Relaxed) {
sleep(Duration::from_millis(10));
} else {
break;
}
}
}
/// Basically `tail -f file | grep <FILTER>` but also has to write to the file
/// (writes to file all lines, not just what passes the filter).
fn inspect_and_write_to_file(output: impl Read, log: impl AsRef<Path>, filter_array: &[&str]) {
let mut writer = BufWriter::new(append_to(log));
let mut reader = BufReader::new(output).lines();
loop {
if let Some(line) = reader.next() {
let line = match line {
Ok(l) => l,
Err(e) => {
// end of stream, probably
log!("Error reading from output: {}", e);
break;
}
};
if filter_array.is_empty() {
println!("{line}")
} else {
for filter in filter_array {
if line.contains(filter) {
println!("{line}")
}
}
}
writeln!(writer, "{line}").unwrap();
} else if RUN_LOG_WATCHERS.load(Ordering::Relaxed) {
sleep(Duration::from_millis(10))
} else {
break;
}
}
}
fn build_cmd_task(args: ProgramArgs, log: PathBuf, log_all: bool, assert_success: bool) {
let mut command = args.create_command();
if log_all {
command.stdout(Stdio::piped());
} else {
command.stdout(append_to(log));
}
command.stderr(Stdio::piped());
log!("{:#}", &args);
let mut child = command
.spawn()
.unwrap_or_else(|e| panic!("Failed to start command `{}` with Error: {e}", &args));
let filter = args.get_filter();
let running = Arc::new(AtomicBool::new(true));
let stdout = if log_all {
let stdout = child.stdout.take().unwrap();
let name = args.get_bin_name();
let running = running.clone();
Some(spawn(move || prefix_log(stdout, &name, &running, filter)))
} else {
None
};
let stderr = {
let stderr = child.stderr.take().unwrap();
let name = args.get_bin_name();
let running = running.clone();
spawn(move || prefix_log(stderr, &name, &running, filter))
};
let status = loop {
sleep(Duration::from_millis(500));
if let Some(exit_status) = child.try_wait().expect("Failed to run command") {
break exit_status;
} else if SHUTDOWN.load(Ordering::Relaxed) {
log!("Forcing termination of command `{}`", &args);
stop_child(&mut child);
break child.wait().expect("Failed to run command");
}
};
running.store(false, Ordering::Relaxed);
if let Some(stdout) = stdout {
stdout.join().unwrap();
}
stderr.join().unwrap();
assert!(
!assert_success || !RUN_LOG_WATCHERS.load(Ordering::Relaxed) || status.success(),
"Command returned non-zero exit code: {:?}",
&args
);
}

@ -1,166 +0,0 @@
#!/usr/bin/env bash
if [ -z $SOLAR_ECLIPSE_DIR ]; then
echo '$SOLAR_ECLIPSE_DIR must be set'
fi
if [ -z $ECLIPSE_PROGRAM_LIBRARY_DIR ]; then
echo '$ECLIPSE_PROGRAM_LIBRARY_DIR must be set'
fi
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
TEST_KEYS_DIR="${SCRIPT_DIR}/../config/sealevel/test-keys"
KEYPAIR="${TEST_KEYS_DIR}/test_deployer-keypair.json"
TARGET_DIR="${SCRIPT_DIR}/../target"
SEALEVEL_DIR="${SCRIPT_DIR}/../sealevel"
DEPLOY_DIR="${TARGET_DIR}/deploy"
BIN_DIR="${TARGET_DIR}/debug"
SPL_TOKEN="${ECLIPSE_PROGRAM_LIBRARY_DIR}/target/debug/spl-token"
CHAIN_ID="13375"
REMOTE_CHAIN_ID="13376"
# Ensure that the solar-eclipse `solana` binary is used
alias solana="${SOLAR_ECLIPSE_DIR}/target/debug/solana"
# first arg = path to .so file
# second arg = path to directory to build program in if the .so file doesn't exist
# third arg = whether to force build the program
build_program() {
if $3 || [ ! -e $1 ]; then
# .so file doesn't exist, build it
pushd "${2}"
cargo build-sbf
popd
fi
}
# first arg = path to .so file
# second arg = path to directory to build program in if the .so file doesn't exist
build_and_copy_program() {
build_program $1 $2 $3
# essentially cp, but -u won't copy if the source is older than the destination.
# used as a workaround to prevent copying to the same destination as the source
rsync -u $1 $DEPLOY_DIR
}
build_programs() {
local force_build="${1}"
# token programs
build_program "${ECLIPSE_PROGRAM_LIBRARY_DIR}/target/deploy/spl_token.so" "${ECLIPSE_PROGRAM_LIBRARY_DIR}/token/program" "${force_build}"
build_program "${ECLIPSE_PROGRAM_LIBRARY_DIR}/target/deploy/spl_token_2022.so" "${ECLIPSE_PROGRAM_LIBRARY_DIR}/token/program-2022" "${force_build}"
build_program "${ECLIPSE_PROGRAM_LIBRARY_DIR}/target/deploy/spl_associated_token_account.so" "${ECLIPSE_PROGRAM_LIBRARY_DIR}/associated-token-account/program" "${force_build}"
# noop
build_program "${ECLIPSE_PROGRAM_LIBRARY_DIR}/account-compression/target/deploy/spl_noop.so" "${ECLIPSE_PROGRAM_LIBRARY_DIR}/account-compression/programs/noop" "${force_build}"
# hyperlane sealevel programs
build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_mailbox.so" "${SEALEVEL_DIR}/programs/mailbox" "${force_build}"
build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_validator_announce.so" "${SEALEVEL_DIR}/programs/validator-announce" "${force_build}"
build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_multisig_ism_message_id.so" "${SEALEVEL_DIR}/programs/ism/multisig-ism-message-id" "${force_build}"
build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_token.so" "${SEALEVEL_DIR}/programs/hyperlane-sealevel-token" "${force_build}"
build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_token_native.so" "${SEALEVEL_DIR}/programs/hyperlane-sealevel-token-native" "${force_build}"
build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_token_collateral.so" "${SEALEVEL_DIR}/programs/hyperlane-sealevel-token-collateral" "${force_build}"
}
build_spl_token_cli() {
if [ ! -e $SPL_TOKEN ]; then
pushd "${ECLIPSE_PROGRAM_LIBRARY_DIR}/token/cli"
cargo build
popd
fi
}
setup_multisig_ism_message_id() {
"${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" multisig-ism-message-id set-validators-and-threshold --domain "${CHAIN_ID}" --validators 0x70997970c51812dc3a010c7d01b50e0d17dc79c8 --threshold 1 --program-id "4RSV6iyqW9X66Xq3RDCVsKJ7hMba5uv6XP8ttgxjVUB1"
}
announce_validator() {
"${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" validator-announce announce --validator 0x70997970c51812dc3a010c7d01b50e0d17dc79c8 --storage-location "file:///tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8" --signature "0xcd87b715cd4c2e3448be9e34204cf16376a6ba6106e147a4965e26ea946dd2ab19598140bf26f1e9e599c23f6b661553c7d89e8db22b3609068c91eb7f0fa2f01b"
}
test_token() {
setup_multisig_ism_message_id
announce_validator
"${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" --compute-budget 200000 warp-route deploy --warp-route-name testwarproute --environment local-e2e --environments-dir "${SEALEVEL_DIR}/environments" --built-so-dir "${DEPLOY_DIR}" --token-config-file "${SEALEVEL_DIR}/environments/local-e2e/warp-routes/testwarproute/token-config.json" --chain-config-file "${SEALEVEL_DIR}/environments/local-e2e/warp-routes/chain-config.json" --ata-payer-funding-amount 1000000000
local token_type=""
local program_id=""
local recipient_token_type=""
local recipient_program_id=""
token_type="native"
program_id="CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga"
recipient_token_type="synthetic"
recipient_program_id="3MzUPjP5LEkiHH82nEAe28Xtz9ztuMqWc8UmuKxrpVQH"
local amount=10000000000 # lamports
local -r sender_keypair="${KEYPAIR}"
local -r sender="$(solana -ul -k "${sender_keypair}" address)"
local -r recipient="${sender}"
local -r sender_balance="$(solana -ul balance "${sender}" | cut -d ' ' -f 1)"
local -r amount_float="$(python -c "print(${amount} / 1000000000)")"
if (( $(bc -l <<< "${sender_balance} < ${amount_float}") )); then
echo "Insufficient sender funds"
exit 1
fi
solana -ul balance "${sender}"
# Transfer the lamports
"${BIN_DIR}/hyperlane-sealevel-client" \
-k "${KEYPAIR}" \
token transfer-remote "${sender_keypair}" "${amount}" "${REMOTE_CHAIN_ID}" "${recipient}" "${token_type}" --program-id "${program_id}"
# Wait for token transfer message to appear in the destination Mailbox.
# This ID was manually gotten from running the Relayer and observing the logs - fragile, I know!
while "${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" mailbox delivered --message-id 0x7b8ba684e5ce44f898c5fa81785c83a00e32b5bef3412e648eb7a17bec497685 --program-id "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj" | grep -q 'Message not delivered'
do
sleep 3
done
solana -ul balance "${recipient}"
"${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" mailbox query
"${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" token query "${token_type}" --program-id "${program_id}"
}
main() {
if [ "${1}" = "build-only" ]; then
build_programs true
exit 0
fi
# build the client
pushd "${SCRIPT_DIR}/../sealevel/client"
cargo build
popd
# build all the required sealevel programs
if [ "${1}" = "force-build-programs" ]; then
build_programs true
else
build_programs false
fi
# build the SPL token CLI
build_spl_token_cli
"${BIN_DIR}/hyperlane-sealevel-client" --compute-budget 200000 -k "${KEYPAIR}" core deploy --local-domain "${CHAIN_ID}" --environment local-e2e --use-existing-keys --environments-dir "${SEALEVEL_DIR}/environments" --built-so-dir "${DEPLOY_DIR}" --chain sealeveltest1
"${BIN_DIR}/hyperlane-sealevel-client" --compute-budget 200000 -k "${KEYPAIR}" core deploy --local-domain "${REMOTE_CHAIN_ID}" --environment local-e2e --use-existing-keys --environments-dir "${SEALEVEL_DIR}/environments" --built-so-dir "${DEPLOY_DIR}" --chain sealeveltest2
test_token true
}
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
set -ex
main "$@"
fi
Loading…
Cancel
Save