Solana e2e tests (#2578)
### Description Adds support for testing solana with the e2e tests. This involves 1) Downloading the pre-built solana cli tools 2) Setting up the solana configuration 3) Downloading the pre-built solana programs we need (see https://github.com/hyperlane-xyz/solana-program-library/releases/) 4) The solana programs in the repo 5) Start the solana validator 6) Deploy the solana programs 7) Deploy a warp route 8) Initialize the multisig ism and validator announce 9) Initialize a transfer 10) Wait for the message to be delivered In addition these were "woven" in with the existing E2E test logic, so for instance we only run one new validator and the existing relayer was extended to support these additional chains. This will make it much easier later on to support testes between environments. ### Drive-by changes - Fix a bug in the relayer when trying to index the mailbox - Add support in the hyperlane sealevel CLI for custom solana config paths - Fix a linker error on linux causes by not specifying `no-entrypoint` - Cleaned up unnecessary `no-entrypoint` features in libraries - Minor refactor to how we run commands to avoid arg passing - Created an easy way to define tasks that run asyncly `as_task` - Split up main logic flow into a couple files for easier reading - Use mold linker to speed up builds (well, the linking part) - Removed support for `log_all` to simplify code pathways - Added context when a child process ends unexpectedly for easier debugging - Fixed a bug in the validator where it would infinitely retry to send an announcement on failure without waiting - Cleaned up solana configs - Fixed processes hanging on exit (very annoying when testing locally since you have to manually go kill them) - Added stderr logging to the hyperlane sealevel CLI subprocess calls ### Related issues Fixes #2415 ### Backward compatibility Yes ### Testing Manualpull/2396/merge
parent
127294decc
commit
9ed3a24902
@ -1,15 +0,0 @@ |
||||
export BASE_CONFIG="sealevel.json" |
||||
export RUN_ENV="sealevel" |
||||
export HYP_BASE_DB="/tmp/SEALEVEL_DB/relayer" |
||||
export HYP_RELAYER_RELAYCHAINS="sealeveltest1,sealeveltest2" |
||||
export HYP_BASE_METRICS=9091 |
||||
export HYP_BASE_ALLOWLOCALCHECKPOINTSYNCERS=true |
||||
|
||||
# The first 32 bytes of test-keys/test_deployer-keypair.json as hexadecimal, |
||||
# which is the secret key. |
||||
export HYP_BASE_CHAINS_SEALEVELTEST1_SIGNER_KEY=892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f |
||||
export HYP_BASE_CHAINS_SEALEVELTEST1_SIGNER_TYPE="hexKey" |
||||
export HYP_BASE_CHAINS_SEALEVELTEST2_SIGNER_KEY=892bf6949af4233e62f854cb3618bc1a3ee3341dc71ada08c4d5deca239acf4f |
||||
export HYP_BASE_CHAINS_SEALEVELTEST2_SIGNER_TYPE="hexKey" |
||||
|
||||
export HYP_BASE_TRACING_LEVEL="debug" |
@ -1,10 +0,0 @@ |
||||
export BASE_CONFIG="sealevel.json" |
||||
export RUN_ENV="sealevel" |
||||
export HYP_BASE_DB="/tmp/SEALEVEL_DB/validator" |
||||
export HYP_VALIDATOR_ORIGINCHAINNAME="sealeveltest1" |
||||
export HYP_VALIDATOR_VALIDATOR_KEY="59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d" |
||||
export HYP_VALIDATOR_VALIDATOR_TYPE="hexKey" |
||||
export HYP_VALIDATOR_REORGPERIOD="0" |
||||
export HYP_VALIDATOR_INTERVAL="1" |
||||
export HYP_VALIDATOR_CHECKPOINTSYNCER_TYPE="localStorage" |
||||
export HYP_VALIDATOR_CHECKPOINTSYNCER_PATH="/tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8" |
@ -1,49 +1,42 @@ |
||||
{ |
||||
"environment": "sealevel", |
||||
"chains": { |
||||
"sealeveltest1": { |
||||
"name": "SealevelTest1", |
||||
"domain": "13375", |
||||
"name": "sealeveltest1", |
||||
"domain": 13375, |
||||
"addresses": { |
||||
"mailbox": "692KZJaoe2KRcD6uhCQDLLXnLNA5ZLnfvdqjE4aX9iu1", |
||||
"interchainGasPaymaster": "FixmeFixmeFixmeFixmeFixmeFixmeFixmeFixmeFixm", |
||||
"validatorAnnounce": "DH43ae1LwemXAboWwSh8zc9pG8j72gKUEXNi57w8fEnn" |
||||
}, |
||||
"signer": null, |
||||
"protocol": "sealevel", |
||||
"finalityBlocks": "0", |
||||
"finalityBlocks": 0, |
||||
"connection": { |
||||
"type": "http", |
||||
"url": "http://localhost:8899" |
||||
}, |
||||
"index": { |
||||
"from": "1", |
||||
"from": 1, |
||||
"mode": "sequence" |
||||
} |
||||
}, |
||||
"sealeveltest2": { |
||||
"name": "SealevelTest2", |
||||
"domain": "13376", |
||||
"name": "sealeveltest2", |
||||
"domain": 13376, |
||||
"addresses": { |
||||
"mailbox": "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj", |
||||
"interchainGasPaymaster": "FixmeFixmeFixmeFixmeFixmeFixmeFixmeFixmeFixm", |
||||
"validatorAnnounce": "3Uo5j2Bti9aZtrDqJmAyuwiFaJFPFoNL5yxTpVCNcUhb" |
||||
}, |
||||
"signer": null, |
||||
"protocol": "sealevel", |
||||
"finalityBlocks": "0", |
||||
"finalityBlocks": 0, |
||||
"connection": { |
||||
"type": "http", |
||||
"url": "http://localhost:8899" |
||||
}, |
||||
"index": { |
||||
"from": "1", |
||||
"from": 1, |
||||
"mode": "sequence" |
||||
} |
||||
} |
||||
}, |
||||
"tracing": { |
||||
"level": "info", |
||||
"fmt": "pretty" |
||||
} |
||||
} |
@ -1,127 +0,0 @@ |
||||
# Hyperlane Sealevel (Solana VM) Integration |
||||
|
||||
# Running local end to end test |
||||
|
||||
A local end to end test has been written that will: |
||||
|
||||
1. Run a local Solana network |
||||
2. Deploy two sets of core contracts (i.e. Mailbox / Multisig ISM / ValidatorAnnounce) onto this chain, one with domain 13375 and the other 13376. |
||||
3. Deploy a "native" warp route on domain 13375 and a "synthetic" warp route on domain 13376 |
||||
4. Send native lamports from domain 13375 to 13376 |
||||
5. A validator & relayer can then be spun up to deliver the message |
||||
|
||||
### Build and run solana-test-validator |
||||
|
||||
This only needs to be done once when initially setting things up. |
||||
|
||||
1. Clone the `solar-eclipse` repo, which is the Eclipse fork of the Solana repo. This is needed to run the local Solana network. Check out the `steven/hyperlane-fix-deps` branch: |
||||
|
||||
``` |
||||
git clone git@github.com:Eclipse-Laboratories-Inc/solar-eclipse --branch steven/hyperlane-fix-deps |
||||
``` |
||||
|
||||
2. `cd` into the repo and build the `solana-test-validator` using the local `cargo` script (which ensures the correct version is used): |
||||
|
||||
``` |
||||
./cargo build -p solana-test-validator |
||||
``` |
||||
|
||||
### Check out `eclipse-program-library` |
||||
|
||||
This is a fork (with some dependency fixes) of the eclipse fork of the `solana-program-library`. This contains "SPL" programs that are commonly used programs - stuff like the token program, etc. |
||||
|
||||
Note these instructions previously required a different remote and branch - make sure to move to this remote & branch if you ahven't already! |
||||
|
||||
1. Check out the branch `trevor/steven/eclipse-1.14.13/with-tlv-lib`: |
||||
|
||||
``` |
||||
git clone git@github.com:tkporter/eclipse-program-library.git --branch trevor/steven/eclipse-1.14.13/with-tlv-lib |
||||
``` |
||||
|
||||
### Build the required SPL programs and Hyperlane programs |
||||
|
||||
This command will build all the required SPL programs (e.g. the token program, token 2022 program, SPL noop, etc...) found in the local repo of `eclipse-program-library`, |
||||
and will build all the required Hyperlane programs (e.g. the Mailbox program, Validator Announce, etc...). |
||||
|
||||
You need to run this if any changes are made to programs that you want to be used in future runs of the end to end test. |
||||
|
||||
Change the paths to your local `solar-eclipse` repo and `eclipse-program-library` as necessary, and run this from the `rust` directory of hyperlane-monorepo. |
||||
|
||||
``` |
||||
SOLAR_ECLIPSE_DIR=~/solar-eclipse ECLIPSE_PROGRAM_LIBRARY_DIR=~/eclipse-program-library ./utils/sealevel-test.bash build-only |
||||
``` |
||||
|
||||
### Run the local Solana network |
||||
|
||||
This will run the `solana-test-validator` with a funded test account `E9VrvAdGRvCguN2XgXsgu9PNmMM3vZsU8LSUrM68j8ty` that will later be used for deploying contracts. It will also create some of the required SPL programs at the specified program IDs - these program IDs are consistent across Solana networks and are required by our Hyperlane programs. Change paths as necessary - the \*.so files should have been created by the prior command. The `--ledger` directory is arbitrary and is just the data dir for the Solana validator. |
||||
|
||||
``` |
||||
mkdir -p /tmp/eclipse/ledger-dir && target/debug/solana-test-validator --reset --ledger /tmp/eclipse/ledger-dir --account E9VrvAdGRvCguN2XgXsgu9PNmMM3vZsU8LSUrM68j8ty ~/abacus-monorepo/rust/config/sealevel/test-keys/test_deployer-account.json --bpf-program TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA ~/eclipse-program-library/target/deploy/spl_token.so --bpf-program TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb ~/eclipse-program-library/target/deploy/spl_token_2022.so --bpf-program ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL ~/eclipse-program-library/target/deploy/spl_associated_token_account.so --bpf-program noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV ~/eclipse-program-library/account-compression/target/deploy/spl_noop.so |
||||
``` |
||||
|
||||
By now you should have an output like this - keep it running and move to another terminal: |
||||
|
||||
``` |
||||
Ledger location: /tmp/eclipse/ledger-dir |
||||
Log: /tmp/eclipse/ledger-dir/validator.log |
||||
⠒ Initializing... |
||||
⠄ Initializing... |
||||
Identity: 4P5rtWdphhehU32myNQcTSMgrCRz7kdvZEnasX6fahJQ |
||||
Genesis Hash: G7CY7wEzbdjh8RwqTszxrpYTqiHKvqwpaw3JbmKJjJhU |
||||
Version: 1.14.13 |
||||
Shred Version: 419 |
||||
Gossip Address: 127.0.0.1:1024 |
||||
TPU Address: 127.0.0.1:1027 |
||||
JSON RPC URL: http://127.0.0.1:8899 |
||||
⠒ 00:05:35 | Processed Slot: 668 | Confirmed Slot: 668 | Finalized Slot: 6 |
||||
``` |
||||
|
||||
### Run the local end to end script |
||||
|
||||
Run the script found at `rust/utils/sealevel-test.bash`. This will build all required programs, deploy contracts, and test sending a warp route message. You need to supply the paths to your local `solar-eclipse` and `eclipse-program-library` repos: |
||||
|
||||
``` |
||||
SOLAR_ECLIPSE_DIR=~/solar-eclipse ECLIPSE_PROGRAM_LIBRARY_DIR=~/eclipse-program-library ./utils/sealevel-test.bash |
||||
``` |
||||
|
||||
Note: this won't rebuild any of the programs. If you want to rebuild them, you can either cd into them individually and run `cargo build-sbf --arch sbf`, or you can run the above bash script with `force-build-programs` as the first argument. |
||||
|
||||
You'll see a bunch of output here showing programs being built and deployed. Eventually you should see some logs saying `grep -q 'Message not delivered'`. At this point, the contracts have all been deployed and a native warp route transfer has been made. You can move on to running the validator and relayer. |
||||
|
||||
### Running the validator |
||||
|
||||
In a separate terminal, cd to `hyperlane-monorepo/rust`. |
||||
|
||||
1. Source the env vars: |
||||
|
||||
``` |
||||
source ./config/sealevel/validator.env |
||||
``` |
||||
|
||||
2. Run the validator (this clears the DB / checkpoints if present): |
||||
|
||||
``` |
||||
mkdir /tmp/SEALEVEL_DB ; rm -rf /tmp/SEALEVEL_DB/validator /tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8/* ; CONFIG_FILES=./config/sealevel/sealevel.json cargo run --bin validator |
||||
``` |
||||
|
||||
You should see some INFO logs about checkpoint at index 0. |
||||
|
||||
You can confirm things are working correctly by looking at `/tmp/CHECKPOINTS_DIR`, where the validator posts its signatures. |
||||
|
||||
### Running the relayer |
||||
|
||||
In a separate terminal, again in `hyperlane-monorepo/rust`: |
||||
|
||||
1. Source the env vars: |
||||
|
||||
``` |
||||
source ./config/sealevel/relayer.env |
||||
``` |
||||
|
||||
2. Run the relayer (the rm is to make sure the relayer's DB is cleared): |
||||
|
||||
``` |
||||
rm -rf /tmp/SEALEVEL_DB/relayer ; RUST_BACKTRACE=full CONFIG_FILES=./config/sealevel/sealevel.json cargo run --bin relayer |
||||
``` |
||||
|
||||
When the original `sealevel-test.bash` exits with a 0 exit code and some logs about Hyperlane Token Storage, the message has been successfully delivered! |
@ -1,10 +1,10 @@ |
||||
{ |
||||
"sealeveltest1": { |
||||
"hex": "0xa77b4e2ed231894cc8cb8eee21adcc705d8489bccc6b2fcf40a358de23e60b7b", |
||||
"base58": "CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga" |
||||
}, |
||||
"sealeveltest2": { |
||||
"hex": "0x2317f9615d4ebc2419ad4b88580e2a80a03b2c7a60bc960de7d6934dbc37a87e", |
||||
"base58": "3MzUPjP5LEkiHH82nEAe28Xtz9ztuMqWc8UmuKxrpVQH" |
||||
}, |
||||
"sealeveltest1": { |
||||
"hex": "0xa77b4e2ed231894cc8cb8eee21adcc705d8489bccc6b2fcf40a358de23e60b7b", |
||||
"base58": "CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga" |
||||
} |
||||
} |
@ -0,0 +1,57 @@ |
||||
use std::sync::Arc; |
||||
use std::thread::sleep; |
||||
use std::time::Duration; |
||||
|
||||
use macro_rules_attribute::apply; |
||||
|
||||
use crate::config::Config; |
||||
use crate::logging::log; |
||||
use crate::program::Program; |
||||
use crate::utils::{as_task, AgentHandles, TaskHandle}; |
||||
use crate::{INFRA_PATH, MONOREPO_ROOT_PATH, TS_SDK_PATH}; |
||||
|
||||
#[apply(as_task)] |
||||
pub fn start_anvil(config: Arc<Config>) -> AgentHandles { |
||||
log!("Installing typescript dependencies..."); |
||||
let yarn_monorepo = Program::new("yarn").working_dir(MONOREPO_ROOT_PATH); |
||||
yarn_monorepo.clone().cmd("install").run().join(); |
||||
if !config.is_ci_env { |
||||
// don't need to clean in the CI
|
||||
yarn_monorepo.clone().cmd("clean").run().join(); |
||||
} |
||||
yarn_monorepo.clone().cmd("build").run().join(); |
||||
|
||||
log!("Launching anvil..."); |
||||
let anvil_args = Program::new("anvil").flag("silent").filter_logs(|_| false); // for now do not keep any of the anvil logs
|
||||
let anvil = anvil_args.spawn("ETH"); |
||||
|
||||
sleep(Duration::from_secs(10)); |
||||
|
||||
let yarn_infra = Program::new("yarn") |
||||
.working_dir(INFRA_PATH) |
||||
.env("ALLOW_LEGACY_MULTISIG_ISM", "true"); |
||||
log!("Deploying hyperlane ism contracts..."); |
||||
yarn_infra.clone().cmd("deploy-ism").run().join(); |
||||
|
||||
log!("Rebuilding sdk..."); |
||||
let yarn_sdk = Program::new("yarn").working_dir(TS_SDK_PATH); |
||||
yarn_sdk.clone().cmd("build").run().join(); |
||||
|
||||
log!("Deploying hyperlane core contracts..."); |
||||
yarn_infra.clone().cmd("deploy-core").run().join(); |
||||
|
||||
log!("Deploying hyperlane igp contracts..."); |
||||
yarn_infra.cmd("deploy-igp").run().join(); |
||||
|
||||
if !config.is_ci_env { |
||||
// Follow-up 'yarn hardhat node' invocation with 'yarn prettier' to fixup
|
||||
// formatting on any autogenerated json config files to avoid any diff creation.
|
||||
yarn_monorepo.cmd("prettier").run().join(); |
||||
} |
||||
|
||||
// Rebuild the SDK to pick up the deployed contracts
|
||||
log!("Rebuilding sdk..."); |
||||
yarn_sdk.cmd("build").run().join(); |
||||
|
||||
anvil |
||||
} |
@ -0,0 +1,117 @@ |
||||
use std::path::Path; |
||||
|
||||
use crate::config::Config; |
||||
use maplit::hashmap; |
||||
|
||||
use crate::fetch_metric; |
||||
use crate::logging::log; |
||||
use crate::solana::solana_termination_invariants_met; |
||||
|
||||
/// Use the metrics to check if the relayer queues are empty and the expected
|
||||
/// number of messages have been sent.
|
||||
pub fn termination_invariants_met( |
||||
config: &Config, |
||||
solana_cli_tools_path: &Path, |
||||
solana_config_path: &Path, |
||||
) -> eyre::Result<bool> { |
||||
let eth_messages_expected = (config.kathy_messages / 2) as u32 * 2; |
||||
let sol_messages_expected = 1; |
||||
let total_messages_expected = eth_messages_expected + sol_messages_expected; |
||||
|
||||
let lengths = fetch_metric("9092", "hyperlane_submitter_queue_length", &hashmap! {})?; |
||||
assert!(!lengths.is_empty(), "Could not find queue length metric"); |
||||
if lengths.into_iter().any(|n| n != 0) { |
||||
log!("Relayer queues not empty"); |
||||
return Ok(false); |
||||
}; |
||||
|
||||
// Also ensure the counter is as expected (total number of messages), summed
|
||||
// across all mailboxes.
|
||||
let msg_processed_count = |
||||
fetch_metric("9092", "hyperlane_messages_processed_count", &hashmap! {})? |
||||
.iter() |
||||
.sum::<u32>(); |
||||
if msg_processed_count != total_messages_expected { |
||||
log!( |
||||
"Relayer has {} processed messages, expected {}", |
||||
msg_processed_count, |
||||
total_messages_expected |
||||
); |
||||
return Ok(false); |
||||
} |
||||
|
||||
let gas_payment_events_count = fetch_metric( |
||||
"9092", |
||||
"hyperlane_contract_sync_stored_events", |
||||
&hashmap! {"data_type" => "gas_payments"}, |
||||
)? |
||||
.iter() |
||||
.sum::<u32>(); |
||||
// TestSendReceiver randomly breaks gas payments up into
|
||||
// two. So we expect at least as many gas payments as messages.
|
||||
if gas_payment_events_count < total_messages_expected { |
||||
log!( |
||||
"Relayer has {} gas payment events, expected at least {}", |
||||
gas_payment_events_count, |
||||
total_messages_expected |
||||
); |
||||
return Ok(false); |
||||
} |
||||
|
||||
if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) { |
||||
log!("Solana termination invariants not met"); |
||||
return Ok(false); |
||||
} |
||||
|
||||
let dispatched_messages_scraped = fetch_metric( |
||||
"9093", |
||||
"hyperlane_contract_sync_stored_events", |
||||
&hashmap! {"data_type" => "message_dispatch"}, |
||||
)? |
||||
.iter() |
||||
.sum::<u32>(); |
||||
if dispatched_messages_scraped != eth_messages_expected { |
||||
log!( |
||||
"Scraper has scraped {} dispatched messages, expected {}", |
||||
dispatched_messages_scraped, |
||||
eth_messages_expected |
||||
); |
||||
return Ok(false); |
||||
} |
||||
|
||||
let gas_payments_scraped = fetch_metric( |
||||
"9093", |
||||
"hyperlane_contract_sync_stored_events", |
||||
&hashmap! {"data_type" => "gas_payment"}, |
||||
)? |
||||
.iter() |
||||
.sum::<u32>(); |
||||
// The relayer and scraper should have the same number of gas payments.
|
||||
if gas_payments_scraped != gas_payment_events_count { |
||||
log!( |
||||
"Scraper has scraped {} gas payments, expected {}", |
||||
gas_payments_scraped, |
||||
eth_messages_expected |
||||
); |
||||
return Ok(false); |
||||
} |
||||
|
||||
let delivered_messages_scraped = fetch_metric( |
||||
"9093", |
||||
"hyperlane_contract_sync_stored_events", |
||||
&hashmap! {"data_type" => "message_delivery"}, |
||||
)? |
||||
.iter() |
||||
.sum::<u32>(); |
||||
if delivered_messages_scraped != eth_messages_expected { |
||||
log!( |
||||
"Scraper has scraped {} delivered messages, expected {}", |
||||
delivered_messages_scraped, |
||||
eth_messages_expected |
||||
); |
||||
return Ok(false); |
||||
} |
||||
|
||||
log!("Termination invariants have been meet"); |
||||
Ok(true) |
||||
} |
@ -0,0 +1,346 @@ |
||||
use std::collections::BTreeMap; |
||||
use std::ffi::OsStr; |
||||
use std::fmt::{Debug, Display, Formatter}; |
||||
use std::io::{BufRead, BufReader, Read}; |
||||
use std::path::{Path, PathBuf}; |
||||
use std::process::{Command, Stdio}; |
||||
use std::sync::atomic::{AtomicBool, Ordering}; |
||||
use std::sync::mpsc::Sender; |
||||
use std::sync::{mpsc, Arc}; |
||||
use std::thread::{sleep, spawn}; |
||||
use std::time::Duration; |
||||
|
||||
use eyre::Context; |
||||
use macro_rules_attribute::apply; |
||||
|
||||
use crate::logging::log; |
||||
use crate::utils::{ |
||||
as_task, stop_child, AgentHandles, ArbitraryData, LogFilter, MappingTaskHandle, |
||||
SimpleTaskHandle, TaskHandle, |
||||
}; |
||||
use crate::{RUN_LOG_WATCHERS, SHUTDOWN}; |
||||
|
||||
#[derive(Default, Clone)] |
||||
#[must_use] |
||||
pub struct Program { |
||||
bin: Option<Arc<String>>, |
||||
args: Vec<Arc<String>>, |
||||
env: BTreeMap<Arc<String>, Arc<String>>, |
||||
working_dir: Option<Arc<PathBuf>>, |
||||
log_filter: Option<LogFilter>, |
||||
arbitrary_data: Vec<Arc<dyn ArbitraryData>>, |
||||
} |
||||
|
||||
impl Debug for Program { |
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { |
||||
f.debug_struct("Program") |
||||
.field("bin", &self.bin) |
||||
.field("args", &self.args) |
||||
.field("env", &self.env) |
||||
.field("working_dir", &self.working_dir) |
||||
.field("log_filter", &self.log_filter.is_some()) |
||||
.finish() |
||||
} |
||||
} |
||||
|
||||
impl Display for Program { |
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { |
||||
if f.alternate() { |
||||
let wd = self |
||||
.working_dir |
||||
.as_ref() |
||||
.map(|wd| wd.display()) |
||||
.unwrap_or_else(|| Path::new("./").display()); |
||||
write!(f, "({wd})$ ")?; |
||||
|
||||
for (k, v) in &self.env { |
||||
write!(f, "{k}={v} ")?; |
||||
} |
||||
|
||||
if let Some(path_result) = self.get_bin_path() { |
||||
if let Ok(bp) = path_result { |
||||
write!(f, "{}", bp.display())?; |
||||
} else { |
||||
write!(f, "{}", self.bin.as_ref().unwrap())?; |
||||
} |
||||
} else { |
||||
write!(f, "???")?; |
||||
} |
||||
|
||||
for a in &self.args { |
||||
write!(f, " {a}")?; |
||||
} |
||||
|
||||
Ok(()) |
||||
} else { |
||||
write!( |
||||
f, |
||||
"{}", |
||||
self.bin.as_deref().map(String::as_str).unwrap_or("???") |
||||
) |
||||
} |
||||
} |
||||
} |
||||
|
||||
impl Program { |
||||
pub fn new(bin: impl AsRef<OsStr>) -> Self { |
||||
Self::default().bin(bin) |
||||
} |
||||
|
||||
pub fn bin(mut self, bin: impl AsRef<OsStr>) -> Self { |
||||
self.bin = Some( |
||||
bin.as_ref() |
||||
.to_str() |
||||
.expect("Invalid string encoding for binary name") |
||||
.to_owned() |
||||
.into(), |
||||
); |
||||
self |
||||
} |
||||
|
||||
pub fn raw_arg(mut self, arg: impl Into<String>) -> Self { |
||||
self.args.push(arg.into().into()); |
||||
self |
||||
} |
||||
|
||||
pub fn cmd(self, cmd: impl Into<String>) -> Self { |
||||
let cmd = cmd.into(); |
||||
debug_assert!(!cmd.starts_with('-'), "arg should not start with -"); |
||||
self.raw_arg(cmd) |
||||
} |
||||
|
||||
pub fn flag(self, arg: impl AsRef<str>) -> Self { |
||||
debug_assert!( |
||||
!arg.as_ref().starts_with('-'), |
||||
"arg should not start with -" |
||||
); |
||||
self.raw_arg(format!("--{}", arg.as_ref())) |
||||
} |
||||
|
||||
/// Assumes an arg in the format of `--$ARG1 $ARG2`, arg1 and arg2 should exclude quoting, equal sign, and the leading hyphens.
|
||||
pub fn arg(self, arg1: impl AsRef<str>, arg2: impl Into<String>) -> Self { |
||||
self.flag(arg1).cmd(arg2) |
||||
} |
||||
|
||||
/// Assumes an arg in the format of `--$ARG1 $ARG2 $ARG3`, args should exclude quoting, equal sign, and the leading hyphens.
|
||||
pub fn arg3( |
||||
self, |
||||
arg1: impl AsRef<str>, |
||||
arg2: impl Into<String>, |
||||
arg3: impl Into<String>, |
||||
) -> Self { |
||||
self.flag(arg1).cmd(arg2).cmd(arg3) |
||||
} |
||||
|
||||
/// add an env that will be prefixed with the default hyperlane env prefix
|
||||
pub fn hyp_env(self, key: impl AsRef<str>, value: impl Into<String>) -> Self { |
||||
const PREFIX: &str = "HYP_BASE_"; |
||||
let key = key.as_ref(); |
||||
debug_assert!( |
||||
!key.starts_with(PREFIX), |
||||
"env key should not start with prefix that is being added" |
||||
); |
||||
self.env(format!("{PREFIX}{key}"), value) |
||||
} |
||||
|
||||
/// add a system env that makes no prefix assumptions
|
||||
pub fn env(mut self, key: impl Into<String>, value: impl Into<String>) -> Self { |
||||
self.env.insert(key.into().into(), value.into().into()); |
||||
self |
||||
} |
||||
|
||||
pub fn working_dir(mut self, path: impl Into<PathBuf>) -> Self { |
||||
self.working_dir = Some(path.into().into()); |
||||
self |
||||
} |
||||
|
||||
/// Filter logs being printed to stdout/stderr. If the LogFilter returns true,
|
||||
/// then it will keep that log line, if it returns false it will discard it.
|
||||
/// This is ignored when logging to files.
|
||||
pub fn filter_logs(mut self, filter: LogFilter) -> Self { |
||||
self.log_filter = Some(filter); |
||||
self |
||||
} |
||||
|
||||
/// Remember some arbitrary data until either this program args goes out of scope or until the
|
||||
/// agent/child process exits. This is useful for preventing something from dropping.
|
||||
pub fn remember(mut self, data: impl ArbitraryData) -> Self { |
||||
self.arbitrary_data.push(Arc::new(data)); |
||||
self |
||||
} |
||||
|
||||
pub fn create_command(&self) -> Command { |
||||
let mut cmd = Command::new( |
||||
self.get_bin_path() |
||||
.expect("bin path must be specified") |
||||
.unwrap(), |
||||
); |
||||
if let Some(wd) = &self.working_dir { |
||||
cmd.current_dir(wd.as_path()); |
||||
} |
||||
for (k, v) in self.env.iter() { |
||||
cmd.env(k.as_str(), v.as_str()); |
||||
} |
||||
cmd.args(self.args.iter().map(AsRef::as_ref)); |
||||
cmd |
||||
} |
||||
|
||||
pub fn get_filter(&self) -> Option<LogFilter> { |
||||
self.log_filter |
||||
} |
||||
|
||||
/// Try to get the path to the binary
|
||||
pub fn get_bin_path(&self) -> Option<eyre::Result<PathBuf>> { |
||||
self.bin.as_ref().map(|raw_bin_name| { |
||||
which::which(raw_bin_name.as_ref()) |
||||
.with_context(|| format!("Cannot find binary: {raw_bin_name}")) |
||||
}) |
||||
} |
||||
|
||||
/// Get just the name component of the binary
|
||||
pub fn get_bin_name(&self) -> String { |
||||
Path::new( |
||||
self.bin |
||||
.as_ref() |
||||
.expect("bin path must be specified") |
||||
.as_str(), |
||||
) |
||||
.file_name() |
||||
.expect("bin must have a file name") |
||||
.to_str() |
||||
.unwrap() |
||||
.to_owned() |
||||
} |
||||
|
||||
pub fn get_memory(&self) -> Box<dyn ArbitraryData> { |
||||
Box::new(self.arbitrary_data.clone()) |
||||
} |
||||
|
||||
#[allow(dead_code)] |
||||
pub fn run(self) -> impl TaskHandle<Output = ()> { |
||||
MappingTaskHandle(self.run_full(true, false), |_| ()) |
||||
} |
||||
|
||||
#[allow(dead_code)] |
||||
pub fn run_ignore_code(self) -> impl TaskHandle<Output = ()> { |
||||
MappingTaskHandle(self.run_full(false, false), |_| ()) |
||||
} |
||||
|
||||
#[allow(dead_code)] |
||||
pub fn run_with_output(self) -> impl TaskHandle<Output = Vec<String>> { |
||||
MappingTaskHandle(self.run_full(false, true), |o| { |
||||
o.expect("Command did not return output") |
||||
}) |
||||
} |
||||
|
||||
pub fn spawn(self, log_prefix: &'static str) -> AgentHandles { |
||||
let mut command = self.create_command(); |
||||
command.stdout(Stdio::piped()).stderr(Stdio::piped()); |
||||
|
||||
log!("Spawning {}...", &self); |
||||
let mut child = command |
||||
.spawn() |
||||
.unwrap_or_else(|e| panic!("Failed to start {:?} with error: {e}", &self)); |
||||
let child_stdout = child.stdout.take().unwrap(); |
||||
let filter = self.get_filter(); |
||||
let stdout = |
||||
spawn(move || prefix_log(child_stdout, log_prefix, &RUN_LOG_WATCHERS, filter, None)); |
||||
let child_stderr = child.stderr.take().unwrap(); |
||||
let stderr = |
||||
spawn(move || prefix_log(child_stderr, log_prefix, &RUN_LOG_WATCHERS, filter, None)); |
||||
( |
||||
log_prefix.to_owned(), |
||||
child, |
||||
Box::new(SimpleTaskHandle(stdout)), |
||||
Box::new(SimpleTaskHandle(stderr)), |
||||
self.get_memory(), |
||||
) |
||||
} |
||||
|
||||
#[apply(as_task)] |
||||
fn run_full(self, assert_success: bool, capture_output: bool) -> Option<Vec<String>> { |
||||
let mut command = self.create_command(); |
||||
command.stdout(Stdio::piped()); |
||||
command.stderr(Stdio::piped()); |
||||
|
||||
log!("{:#}", &self); |
||||
let mut child = command |
||||
.spawn() |
||||
.unwrap_or_else(|e| panic!("Failed to start command `{}` with Error: {e}", &self)); |
||||
let filter = self.get_filter(); |
||||
let running = Arc::new(AtomicBool::new(true)); |
||||
let (stdout_ch_tx, stdout_ch_rx) = capture_output.then(mpsc::channel).unzip(); |
||||
let stdout = { |
||||
let stdout = child.stdout.take().unwrap(); |
||||
let name = self.get_bin_name(); |
||||
let running = running.clone(); |
||||
spawn(move || prefix_log(stdout, &name, &running, filter, stdout_ch_tx)) |
||||
}; |
||||
let stderr = { |
||||
let stderr = child.stderr.take().unwrap(); |
||||
let name = self.get_bin_name(); |
||||
let running = running.clone(); |
||||
spawn(move || prefix_log(stderr, &name, &running, filter, None)) |
||||
}; |
||||
|
||||
let status = loop { |
||||
sleep(Duration::from_millis(500)); |
||||
|
||||
if let Some(exit_status) = child.try_wait().expect("Failed to run command") { |
||||
break exit_status; |
||||
} else if SHUTDOWN.load(Ordering::Relaxed) { |
||||
log!("Forcing termination of command `{}`", &self); |
||||
stop_child(&mut child); |
||||
break child.wait().expect("Failed to run command"); |
||||
} |
||||
}; |
||||
|
||||
running.store(false, Ordering::Relaxed); |
||||
stdout.join().unwrap(); |
||||
stderr.join().unwrap(); |
||||
assert!( |
||||
!assert_success || !RUN_LOG_WATCHERS.load(Ordering::Relaxed) || status.success(), |
||||
"Command returned non-zero exit code: {:?}", |
||||
&self |
||||
); |
||||
|
||||
stdout_ch_rx.map(|rx| rx.into_iter().collect()) |
||||
} |
||||
} |
||||
|
||||
/// Read from a process output and add a string to the front before writing it to stdout.
|
||||
fn prefix_log( |
||||
output: impl Read, |
||||
prefix: &str, |
||||
run_log_watcher: &AtomicBool, |
||||
filter: Option<LogFilter>, |
||||
channel: Option<Sender<String>>, |
||||
) { |
||||
let mut reader = BufReader::new(output).lines(); |
||||
loop { |
||||
if let Some(line) = reader.next() { |
||||
let line = match line { |
||||
Ok(l) => l, |
||||
Err(e) => { |
||||
// end of stream, probably
|
||||
log!("Error reading from output for {}: {}", prefix, e); |
||||
break; |
||||
} |
||||
}; |
||||
if let Some(filter) = filter.as_ref() { |
||||
if !(filter)(&line) { |
||||
continue; |
||||
} |
||||
} |
||||
println!("<{prefix}> {line}"); |
||||
if let Some(channel) = &channel { |
||||
// ignore send errors
|
||||
channel.send(line).unwrap_or(()); |
||||
} |
||||
} else if run_log_watcher.load(Ordering::Relaxed) { |
||||
sleep(Duration::from_millis(10)); |
||||
} else { |
||||
break; |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,342 @@ |
||||
use std::fs; |
||||
use std::path::{Path, PathBuf}; |
||||
use std::thread::sleep; |
||||
use std::time::Duration; |
||||
|
||||
use macro_rules_attribute::apply; |
||||
use tempfile::{tempdir, NamedTempFile}; |
||||
|
||||
use crate::logging::log; |
||||
use crate::program::Program; |
||||
use crate::utils::{as_task, concat_path, AgentHandles, ArbitraryData, TaskHandle}; |
||||
use crate::AGENT_BIN_PATH; |
||||
|
||||
// Solana program tuples of:
|
||||
// 0: Solana address or keypair for the bpf program
|
||||
// 1: Name of the program's shared object file
|
||||
const SOLANA_PROGRAMS: &[(&str, &str)] = &[ |
||||
( |
||||
"TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA", |
||||
"spl_token.so", |
||||
), |
||||
( |
||||
"TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb", |
||||
"spl_token_2022.so", |
||||
), |
||||
( |
||||
"ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL", |
||||
"spl_associated_token_account.so", |
||||
), |
||||
("noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV", "spl_noop.so"), |
||||
]; |
||||
|
||||
const SOLANA_KEYPAIR: &str = "config/test-sealevel-keys/test_deployer-keypair.json"; |
||||
const SOLANA_DEPLOYER_ACCOUNT: &str = "config/test-sealevel-keys/test_deployer-account.json"; |
||||
|
||||
const SBF_OUT_PATH: &str = "target/dist"; |
||||
|
||||
// Relative paths to solana program source code within rust/sealevel/programs repo.
|
||||
const SOLANA_HYPERLANE_PROGRAMS: &[&str] = &[ |
||||
"mailbox", |
||||
"validator-announce", |
||||
"ism/multisig-ism-message-id", |
||||
"hyperlane-sealevel-token", |
||||
"hyperlane-sealevel-token-native", |
||||
"hyperlane-sealevel-token-collateral", |
||||
]; |
||||
|
||||
const SOLANA_PROGRAM_LIBRARY_ARCHIVE: &str = |
||||
"https://github.com/hyperlane-xyz/solana-program-library/releases/download/2023-07-27-01/spl.tar.gz"; |
||||
|
||||
const SOLANA_LOCAL_CHAIN_ID: &str = "13375"; |
||||
const SOLANA_REMOTE_CHAIN_ID: &str = "13376"; |
||||
|
||||
/// The Solana CLI tool version to download and use.
|
||||
const SOLANA_CLI_VERSION: &str = "1.14.20"; |
||||
|
||||
// TODO: use a temp dir instead!
|
||||
pub const SOLANA_CHECKPOINT_LOCATION: &str = |
||||
"/tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8"; |
||||
|
||||
// Install the CLI tools and return the path to the bin dir.
|
||||
#[apply(as_task)] |
||||
pub fn install_solana_cli_tools() -> (PathBuf, impl ArbitraryData) { |
||||
let solana_download_dir = tempdir().unwrap(); |
||||
let solana_tools_dir = tempdir().unwrap(); |
||||
log!("Downloading solana cli release v{}", SOLANA_CLI_VERSION); |
||||
let solana_release_name = { |
||||
// best effort ot pick one of the supported targets
|
||||
let target = if cfg!(target_os = "linux") { |
||||
"x86_64-unknown-linux-gnu" |
||||
} else if cfg!(target_os = "macos") { |
||||
if cfg!(target_arch = "aarch64") { |
||||
"aarch64-apple-darwin" |
||||
} else { |
||||
"x86_64-apple-darwin" |
||||
} |
||||
} else if cfg!(target_os = "windows") { |
||||
"pc-windows-msvc" |
||||
} else { |
||||
panic!("Current os is not supported by solana") |
||||
}; |
||||
format!("solana-release-{target}") |
||||
}; |
||||
let solana_archive_name = format!("{solana_release_name}.tar.bz2"); |
||||
|
||||
Program::new("curl") |
||||
.arg("output", &solana_archive_name) |
||||
.flag("location") |
||||
.cmd(format!("https://github.com/solana-labs/solana/releases/download/v{SOLANA_CLI_VERSION}/{solana_archive_name}")) |
||||
.flag("silent") |
||||
.working_dir(solana_download_dir.as_ref().to_str().unwrap()) |
||||
.run() |
||||
.join(); |
||||
log!("Uncompressing solana release"); |
||||
|
||||
Program::new("tar") |
||||
.flag("extract") |
||||
.arg("file", &solana_archive_name) |
||||
.working_dir(solana_download_dir.as_ref().to_str().unwrap()) |
||||
.run() |
||||
.join(); |
||||
|
||||
fs::rename( |
||||
concat_path(&solana_download_dir, "solana-release"), |
||||
&solana_tools_dir, |
||||
) |
||||
.expect("Failed to move solana-release dir"); |
||||
(concat_path(&solana_tools_dir, "bin"), solana_tools_dir) |
||||
} |
||||
|
||||
#[apply(as_task)] |
||||
pub fn build_solana_programs(solana_cli_tools_path: PathBuf) -> PathBuf { |
||||
let out_path = Path::new(SBF_OUT_PATH); |
||||
if out_path.exists() { |
||||
fs::remove_dir_all(out_path).expect("Failed to remove solana program deploy dir"); |
||||
} |
||||
fs::create_dir_all(out_path).expect("Failed to create solana program deploy dir"); |
||||
let out_path = out_path.canonicalize().unwrap(); |
||||
|
||||
Program::new("curl") |
||||
.arg("output", "spl.tar.gz") |
||||
.flag("location") |
||||
.cmd(SOLANA_PROGRAM_LIBRARY_ARCHIVE) |
||||
.flag("silent") |
||||
.working_dir(&out_path) |
||||
.run() |
||||
.join(); |
||||
log!("Uncompressing solana programs"); |
||||
|
||||
Program::new("tar") |
||||
.flag("extract") |
||||
.arg("file", "spl.tar.gz") |
||||
.working_dir(&out_path) |
||||
.run() |
||||
.join(); |
||||
log!("Remove temporary solana files"); |
||||
fs::remove_file(concat_path(&out_path, "spl.tar.gz")) |
||||
.expect("Failed to remove solana program archive"); |
||||
|
||||
let build_sbf = Program::new( |
||||
concat_path(&solana_cli_tools_path, "cargo-build-sbf") |
||||
.to_str() |
||||
.unwrap(), |
||||
) |
||||
.env("PATH", updated_path(&solana_cli_tools_path)) |
||||
.env("SBF_OUT_PATH", out_path.to_str().unwrap()); |
||||
|
||||
// build our programs
|
||||
for &path in SOLANA_HYPERLANE_PROGRAMS { |
||||
build_sbf |
||||
.clone() |
||||
.working_dir(concat_path("sealevel/programs", path)) |
||||
.run() |
||||
.join(); |
||||
} |
||||
log!("All hyperlane solana programs built successfully"); |
||||
out_path |
||||
} |
||||
|
||||
#[apply(as_task)] |
||||
pub fn start_solana_test_validator( |
||||
solana_cli_tools_path: PathBuf, |
||||
solana_programs_path: PathBuf, |
||||
ledger_dir: PathBuf, |
||||
) -> (PathBuf, AgentHandles) { |
||||
// init solana config
|
||||
let solana_config = NamedTempFile::new().unwrap().into_temp_path(); |
||||
let solana_config_path = solana_config.to_path_buf(); |
||||
Program::new(concat_path(&solana_cli_tools_path, "solana")) |
||||
.arg("config", solana_config.to_str().unwrap()) |
||||
.cmd("config") |
||||
.cmd("set") |
||||
.arg("url", "localhost") |
||||
.run() |
||||
.join(); |
||||
|
||||
log!("Starting solana validator"); |
||||
let mut args = Program::new(concat_path(&solana_cli_tools_path, "solana-test-validator")) |
||||
.flag("quiet") |
||||
.flag("reset") |
||||
.arg("ledger", ledger_dir.to_str().unwrap()) |
||||
.arg3( |
||||
"account", |
||||
"E9VrvAdGRvCguN2XgXsgu9PNmMM3vZsU8LSUrM68j8ty", |
||||
SOLANA_DEPLOYER_ACCOUNT, |
||||
) |
||||
.remember(solana_config); |
||||
for &(address, lib) in SOLANA_PROGRAMS { |
||||
args = args.arg3( |
||||
"bpf-program", |
||||
address, |
||||
concat_path(&solana_programs_path, lib).to_str().unwrap(), |
||||
); |
||||
} |
||||
let validator = args.spawn("SOL"); |
||||
sleep(Duration::from_secs(5)); |
||||
|
||||
log!("Deploying the hyperlane programs to solana"); |
||||
let sealevel_client = sealevel_client(&solana_cli_tools_path, &solana_config_path); |
||||
|
||||
let sealevel_client_deploy_core = sealevel_client |
||||
.clone() |
||||
.arg("compute-budget", "200000") |
||||
.cmd("core") |
||||
.cmd("deploy") |
||||
.arg("environment", "local-e2e") |
||||
.arg("environments-dir", "sealevel/environments") |
||||
.arg("built-so-dir", SBF_OUT_PATH) |
||||
.flag("use-existing-keys"); |
||||
|
||||
sealevel_client_deploy_core |
||||
.clone() |
||||
.arg("local-domain", SOLANA_LOCAL_CHAIN_ID) |
||||
.arg("chain", "sealeveltest1") |
||||
.run() |
||||
.join(); |
||||
|
||||
sealevel_client_deploy_core |
||||
.arg("local-domain", SOLANA_REMOTE_CHAIN_ID) |
||||
.arg("chain", "sealeveltest2") |
||||
.run() |
||||
.join(); |
||||
|
||||
sealevel_client |
||||
.clone() |
||||
.arg("compute-budget", "200000") |
||||
.cmd("warp-route") |
||||
.cmd("deploy") |
||||
.arg("environment", "local-e2e") |
||||
.arg("environments-dir", "sealevel/environments") |
||||
.arg("built-so-dir", SBF_OUT_PATH) |
||||
.arg("warp-route-name", "testwarproute") |
||||
.arg( |
||||
"token-config-file", |
||||
"sealevel/environments/local-e2e/warp-routes/testwarproute/token-config.json", |
||||
) |
||||
.arg( |
||||
"chain-config-file", |
||||
"sealevel/environments/local-e2e/warp-routes/chain-config.json", |
||||
) |
||||
.arg("ata-payer-funding-amount", "1000000000") |
||||
.run() |
||||
.join(); |
||||
|
||||
log!("Initializing solana programs"); |
||||
sealevel_client |
||||
.clone() |
||||
.cmd("multisig-ism-message-id") |
||||
.cmd("set-validators-and-threshold") |
||||
.arg("domain", SOLANA_LOCAL_CHAIN_ID) |
||||
.arg("validators", "0x70997970c51812dc3a010c7d01b50e0d17dc79c8") |
||||
.arg("threshold", "1") |
||||
.arg("program-id", "4RSV6iyqW9X66Xq3RDCVsKJ7hMba5uv6XP8ttgxjVUB1") |
||||
.run() |
||||
.join(); |
||||
|
||||
sealevel_client |
||||
.cmd("validator-announce") |
||||
.cmd("announce") |
||||
.arg("validator", "0x70997970c51812dc3a010c7d01b50e0d17dc79c8") |
||||
.arg( |
||||
"storage-location", |
||||
format!("file://{SOLANA_CHECKPOINT_LOCATION}") |
||||
) |
||||
.arg("signature", "0xcd87b715cd4c2e3448be9e34204cf16376a6ba6106e147a4965e26ea946dd2ab19598140bf26f1e9e599c23f6b661553c7d89e8db22b3609068c91eb7f0fa2f01b") |
||||
.run() |
||||
.join(); |
||||
|
||||
log!("Local Solana chain started and hyperlane programs deployed and initialized successfully"); |
||||
|
||||
(solana_config_path, validator) |
||||
} |
||||
|
||||
#[apply(as_task)] |
||||
pub fn initiate_solana_hyperlane_transfer( |
||||
solana_cli_tools_path: PathBuf, |
||||
solana_config_path: PathBuf, |
||||
) { |
||||
let sender = Program::new(concat_path(&solana_cli_tools_path, "solana")) |
||||
.arg("config", solana_config_path.to_str().unwrap()) |
||||
.arg("keypair", SOLANA_KEYPAIR) |
||||
.cmd("address") |
||||
.run_with_output() |
||||
.join() |
||||
.get(0) |
||||
.expect("failed to get sender address") |
||||
.trim() |
||||
.to_owned(); |
||||
|
||||
sealevel_client(&solana_cli_tools_path, &solana_config_path) |
||||
.cmd("token") |
||||
.cmd("transfer-remote") |
||||
.cmd(SOLANA_KEYPAIR) |
||||
.cmd("10000000000") |
||||
.cmd(SOLANA_REMOTE_CHAIN_ID) |
||||
.cmd(sender) // send to self
|
||||
.cmd("native") |
||||
.arg("program-id", "CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga") |
||||
.run() |
||||
.join(); |
||||
} |
||||
|
||||
pub fn solana_termination_invariants_met( |
||||
solana_cli_tools_path: &Path, |
||||
solana_config_path: &Path, |
||||
) -> bool { |
||||
sealevel_client(solana_cli_tools_path, solana_config_path) |
||||
.cmd("mailbox") |
||||
.cmd("delivered") |
||||
.arg( |
||||
// this will break if any parts of `transfer-remote` change.
|
||||
// This value was gotten by observing the relayer logs.
|
||||
// TODO: get the actual message-id so we don't have to hardcode it
|
||||
"message-id", |
||||
"0x7b8ba684e5ce44f898c5fa81785c83a00e32b5bef3412e648eb7a17bec497685", |
||||
) |
||||
.arg("program-id", "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj") |
||||
.run_with_output() |
||||
.join() |
||||
.join("\n") |
||||
.contains("Message delivered") |
||||
} |
||||
|
||||
fn sealevel_client(solana_cli_tools_path: &Path, solana_config_path: &Path) -> Program { |
||||
Program::new(concat_path(AGENT_BIN_PATH, "hyperlane-sealevel-client")) |
||||
.env("PATH", updated_path(solana_cli_tools_path)) |
||||
.env("RUST_BACKTRACE", "1") |
||||
.arg("config", solana_config_path.to_str().unwrap()) |
||||
.arg("keypair", SOLANA_KEYPAIR) |
||||
} |
||||
|
||||
fn updated_path(solana_cli_tools_path: &Path) -> String { |
||||
format!( |
||||
"{}:{}", |
||||
solana_cli_tools_path |
||||
.canonicalize() |
||||
.expect("Failed to canonicalize solana cli tools path") |
||||
.to_str() |
||||
.unwrap(), |
||||
std::env::var("PATH").unwrap_or_default(), |
||||
) |
||||
} |
@ -1,166 +0,0 @@ |
||||
#!/usr/bin/env bash |
||||
|
||||
if [ -z $SOLAR_ECLIPSE_DIR ]; then |
||||
echo '$SOLAR_ECLIPSE_DIR must be set' |
||||
fi |
||||
|
||||
if [ -z $ECLIPSE_PROGRAM_LIBRARY_DIR ]; then |
||||
echo '$ECLIPSE_PROGRAM_LIBRARY_DIR must be set' |
||||
fi |
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" |
||||
TEST_KEYS_DIR="${SCRIPT_DIR}/../config/sealevel/test-keys" |
||||
KEYPAIR="${TEST_KEYS_DIR}/test_deployer-keypair.json" |
||||
TARGET_DIR="${SCRIPT_DIR}/../target" |
||||
SEALEVEL_DIR="${SCRIPT_DIR}/../sealevel" |
||||
DEPLOY_DIR="${TARGET_DIR}/deploy" |
||||
BIN_DIR="${TARGET_DIR}/debug" |
||||
SPL_TOKEN="${ECLIPSE_PROGRAM_LIBRARY_DIR}/target/debug/spl-token" |
||||
CHAIN_ID="13375" |
||||
REMOTE_CHAIN_ID="13376" |
||||
|
||||
# Ensure that the solar-eclipse `solana` binary is used |
||||
alias solana="${SOLAR_ECLIPSE_DIR}/target/debug/solana" |
||||
|
||||
# first arg = path to .so file |
||||
# second arg = path to directory to build program in if the .so file doesn't exist |
||||
# third arg = whether to force build the program |
||||
build_program() { |
||||
if $3 || [ ! -e $1 ]; then |
||||
# .so file doesn't exist, build it |
||||
pushd "${2}" |
||||
cargo build-sbf |
||||
popd |
||||
fi |
||||
} |
||||
|
||||
# first arg = path to .so file |
||||
# second arg = path to directory to build program in if the .so file doesn't exist |
||||
build_and_copy_program() { |
||||
build_program $1 $2 $3 |
||||
|
||||
# essentially cp, but -u won't copy if the source is older than the destination. |
||||
# used as a workaround to prevent copying to the same destination as the source |
||||
rsync -u $1 $DEPLOY_DIR |
||||
} |
||||
|
||||
build_programs() { |
||||
local force_build="${1}" |
||||
|
||||
# token programs |
||||
build_program "${ECLIPSE_PROGRAM_LIBRARY_DIR}/target/deploy/spl_token.so" "${ECLIPSE_PROGRAM_LIBRARY_DIR}/token/program" "${force_build}" |
||||
build_program "${ECLIPSE_PROGRAM_LIBRARY_DIR}/target/deploy/spl_token_2022.so" "${ECLIPSE_PROGRAM_LIBRARY_DIR}/token/program-2022" "${force_build}" |
||||
build_program "${ECLIPSE_PROGRAM_LIBRARY_DIR}/target/deploy/spl_associated_token_account.so" "${ECLIPSE_PROGRAM_LIBRARY_DIR}/associated-token-account/program" "${force_build}" |
||||
|
||||
# noop |
||||
build_program "${ECLIPSE_PROGRAM_LIBRARY_DIR}/account-compression/target/deploy/spl_noop.so" "${ECLIPSE_PROGRAM_LIBRARY_DIR}/account-compression/programs/noop" "${force_build}" |
||||
|
||||
# hyperlane sealevel programs |
||||
build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_mailbox.so" "${SEALEVEL_DIR}/programs/mailbox" "${force_build}" |
||||
build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_validator_announce.so" "${SEALEVEL_DIR}/programs/validator-announce" "${force_build}" |
||||
build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_multisig_ism_message_id.so" "${SEALEVEL_DIR}/programs/ism/multisig-ism-message-id" "${force_build}" |
||||
build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_token.so" "${SEALEVEL_DIR}/programs/hyperlane-sealevel-token" "${force_build}" |
||||
build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_token_native.so" "${SEALEVEL_DIR}/programs/hyperlane-sealevel-token-native" "${force_build}" |
||||
build_and_copy_program "${TARGET_DIR}/deploy/hyperlane_sealevel_token_collateral.so" "${SEALEVEL_DIR}/programs/hyperlane-sealevel-token-collateral" "${force_build}" |
||||
} |
||||
|
||||
build_spl_token_cli() { |
||||
if [ ! -e $SPL_TOKEN ]; then |
||||
pushd "${ECLIPSE_PROGRAM_LIBRARY_DIR}/token/cli" |
||||
cargo build |
||||
popd |
||||
fi |
||||
} |
||||
|
||||
setup_multisig_ism_message_id() { |
||||
"${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" multisig-ism-message-id set-validators-and-threshold --domain "${CHAIN_ID}" --validators 0x70997970c51812dc3a010c7d01b50e0d17dc79c8 --threshold 1 --program-id "4RSV6iyqW9X66Xq3RDCVsKJ7hMba5uv6XP8ttgxjVUB1" |
||||
} |
||||
|
||||
announce_validator() { |
||||
"${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" validator-announce announce --validator 0x70997970c51812dc3a010c7d01b50e0d17dc79c8 --storage-location "file:///tmp/test_sealevel_checkpoints_0x70997970c51812dc3a010c7d01b50e0d17dc79c8" --signature "0xcd87b715cd4c2e3448be9e34204cf16376a6ba6106e147a4965e26ea946dd2ab19598140bf26f1e9e599c23f6b661553c7d89e8db22b3609068c91eb7f0fa2f01b" |
||||
} |
||||
|
||||
test_token() { |
||||
|
||||
setup_multisig_ism_message_id |
||||
|
||||
announce_validator |
||||
|
||||
"${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" --compute-budget 200000 warp-route deploy --warp-route-name testwarproute --environment local-e2e --environments-dir "${SEALEVEL_DIR}/environments" --built-so-dir "${DEPLOY_DIR}" --token-config-file "${SEALEVEL_DIR}/environments/local-e2e/warp-routes/testwarproute/token-config.json" --chain-config-file "${SEALEVEL_DIR}/environments/local-e2e/warp-routes/chain-config.json" --ata-payer-funding-amount 1000000000 |
||||
|
||||
local token_type="" |
||||
local program_id="" |
||||
|
||||
local recipient_token_type="" |
||||
local recipient_program_id="" |
||||
|
||||
token_type="native" |
||||
program_id="CGn8yNtSD3aTTqJfYhUb6s1aVTN75NzwtsFKo1e83aga" |
||||
|
||||
recipient_token_type="synthetic" |
||||
recipient_program_id="3MzUPjP5LEkiHH82nEAe28Xtz9ztuMqWc8UmuKxrpVQH" |
||||
|
||||
local amount=10000000000 # lamports |
||||
|
||||
local -r sender_keypair="${KEYPAIR}" |
||||
local -r sender="$(solana -ul -k "${sender_keypair}" address)" |
||||
local -r recipient="${sender}" |
||||
|
||||
local -r sender_balance="$(solana -ul balance "${sender}" | cut -d ' ' -f 1)" |
||||
local -r amount_float="$(python -c "print(${amount} / 1000000000)")" |
||||
if (( $(bc -l <<< "${sender_balance} < ${amount_float}") )); then |
||||
echo "Insufficient sender funds" |
||||
exit 1 |
||||
fi |
||||
|
||||
solana -ul balance "${sender}" |
||||
|
||||
# Transfer the lamports |
||||
"${BIN_DIR}/hyperlane-sealevel-client" \ |
||||
-k "${KEYPAIR}" \ |
||||
token transfer-remote "${sender_keypair}" "${amount}" "${REMOTE_CHAIN_ID}" "${recipient}" "${token_type}" --program-id "${program_id}" |
||||
|
||||
# Wait for token transfer message to appear in the destination Mailbox. |
||||
# This ID was manually gotten from running the Relayer and observing the logs - fragile, I know! |
||||
while "${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" mailbox delivered --message-id 0x7b8ba684e5ce44f898c5fa81785c83a00e32b5bef3412e648eb7a17bec497685 --program-id "9tCUWNjpqcf3NUSrtp7vquYVCwbEByvLjZUrhG5dgvhj" | grep -q 'Message not delivered' |
||||
do |
||||
sleep 3 |
||||
done |
||||
|
||||
solana -ul balance "${recipient}" |
||||
|
||||
"${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" mailbox query |
||||
"${BIN_DIR}/hyperlane-sealevel-client" -k "${KEYPAIR}" token query "${token_type}" --program-id "${program_id}" |
||||
} |
||||
|
||||
main() { |
||||
if [ "${1}" = "build-only" ]; then |
||||
build_programs true |
||||
exit 0 |
||||
fi |
||||
|
||||
# build the client |
||||
pushd "${SCRIPT_DIR}/../sealevel/client" |
||||
cargo build |
||||
popd |
||||
|
||||
# build all the required sealevel programs |
||||
if [ "${1}" = "force-build-programs" ]; then |
||||
build_programs true |
||||
else |
||||
build_programs false |
||||
fi |
||||
|
||||
# build the SPL token CLI |
||||
build_spl_token_cli |
||||
|
||||
"${BIN_DIR}/hyperlane-sealevel-client" --compute-budget 200000 -k "${KEYPAIR}" core deploy --local-domain "${CHAIN_ID}" --environment local-e2e --use-existing-keys --environments-dir "${SEALEVEL_DIR}/environments" --built-so-dir "${DEPLOY_DIR}" --chain sealeveltest1 |
||||
"${BIN_DIR}/hyperlane-sealevel-client" --compute-budget 200000 -k "${KEYPAIR}" core deploy --local-domain "${REMOTE_CHAIN_ID}" --environment local-e2e --use-existing-keys --environments-dir "${SEALEVEL_DIR}/environments" --built-so-dir "${DEPLOY_DIR}" --chain sealeveltest2 |
||||
|
||||
test_token true |
||||
} |
||||
|
||||
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then |
||||
set -ex |
||||
main "$@" |
||||
fi |
Loading…
Reference in new issue