feat: Report git commit hash as agent metadata (#4282)
### Description * We are storing git commit hash as an environment variable when we compile the source code of an agent. * Validator uses git commit hash as a agent version in `metadata_latest.json` file. * File `metadata_latest.json` is overwritten every time when validator is restarted. * Git commit hash is extracted by `git` util, so, it should be available during build time. * If the directory with the source code does not contain `.git` folder, git commit hash will be defaulted to `VERGEN_IDEMPOTENT_OUTPUT`. Agent Metadata is propagated to Relayer and Scraper, but is not used there. We can use it in the future, reporting git hash commit in the logs, for example. ### Backward compatibility Yes ### Testing * Tested locally with E2E Tests. * Tested against S3 by deploying docker image into k8s --------- Co-authored-by: Danil Nemirovsky <4614623+ameten@users.noreply.github.com>pull/4336/head
parent
7625bf8dce
commit
d1bf212bf7
@ -0,0 +1,8 @@ |
|||||||
|
use anyhow::Result; |
||||||
|
use vergen::EmitBuilder; |
||||||
|
|
||||||
|
fn main() -> Result<()> { |
||||||
|
EmitBuilder::builder().git_sha(false).emit()?; |
||||||
|
|
||||||
|
Ok(()) |
||||||
|
} |
@ -0,0 +1,9 @@ |
|||||||
|
use derive_new::new; |
||||||
|
use serde::{Deserialize, Serialize}; |
||||||
|
|
||||||
|
/// Metadata about agent
|
||||||
|
#[derive(Debug, Deserialize, Serialize, new)] |
||||||
|
pub struct AgentMetadata { |
||||||
|
/// Contains git commit hash of the agent binary
|
||||||
|
pub git_sha: String, |
||||||
|
} |
@ -0,0 +1,8 @@ |
|||||||
|
use anyhow::Result; |
||||||
|
use vergen::EmitBuilder; |
||||||
|
|
||||||
|
fn main() -> Result<()> { |
||||||
|
EmitBuilder::builder().git_sha(false).emit()?; |
||||||
|
|
||||||
|
Ok(()) |
||||||
|
} |
@ -1,213 +1,7 @@ |
|||||||
use std::fs::File; |
pub use common::SOL_MESSAGES_EXPECTED; |
||||||
use std::path::Path; |
pub use post_startup_invariants::post_startup_invariants; |
||||||
|
pub use termination_invariants::termination_invariants_met; |
||||||
|
|
||||||
use crate::config::Config; |
mod common; |
||||||
use crate::metrics::agent_balance_sum; |
mod post_startup_invariants; |
||||||
use crate::utils::get_matching_lines; |
mod termination_invariants; |
||||||
use maplit::hashmap; |
|
||||||
use relayer::GAS_EXPENDITURE_LOG_MESSAGE; |
|
||||||
|
|
||||||
use crate::logging::log; |
|
||||||
use crate::solana::solana_termination_invariants_met; |
|
||||||
use crate::{fetch_metric, AGENT_LOGGING_DIR, ZERO_MERKLE_INSERTION_KATHY_MESSAGES}; |
|
||||||
|
|
||||||
// This number should be even, so the messages can be split into two equal halves
|
|
||||||
// sent before and after the relayer spins up, to avoid rounding errors.
|
|
||||||
pub const SOL_MESSAGES_EXPECTED: u32 = 20; |
|
||||||
|
|
||||||
/// Use the metrics to check if the relayer queues are empty and the expected
|
|
||||||
/// number of messages have been sent.
|
|
||||||
pub fn termination_invariants_met( |
|
||||||
config: &Config, |
|
||||||
starting_relayer_balance: f64, |
|
||||||
solana_cli_tools_path: Option<&Path>, |
|
||||||
solana_config_path: Option<&Path>, |
|
||||||
) -> eyre::Result<bool> { |
|
||||||
let eth_messages_expected = (config.kathy_messages / 2) as u32 * 2; |
|
||||||
let sol_messages_expected = if config.sealevel_enabled { |
|
||||||
SOL_MESSAGES_EXPECTED |
|
||||||
} else { |
|
||||||
0 |
|
||||||
}; |
|
||||||
let total_messages_expected = eth_messages_expected + sol_messages_expected; |
|
||||||
|
|
||||||
let lengths = fetch_metric("9092", "hyperlane_submitter_queue_length", &hashmap! {})?; |
|
||||||
assert!(!lengths.is_empty(), "Could not find queue length metric"); |
|
||||||
if lengths.iter().sum::<u32>() != ZERO_MERKLE_INSERTION_KATHY_MESSAGES { |
|
||||||
log!("Relayer queues not empty. Lengths: {:?}", lengths); |
|
||||||
return Ok(false); |
|
||||||
}; |
|
||||||
|
|
||||||
// Also ensure the counter is as expected (total number of messages), summed
|
|
||||||
// across all mailboxes.
|
|
||||||
let msg_processed_count = |
|
||||||
fetch_metric("9092", "hyperlane_messages_processed_count", &hashmap! {})? |
|
||||||
.iter() |
|
||||||
.sum::<u32>(); |
|
||||||
if msg_processed_count != total_messages_expected { |
|
||||||
log!( |
|
||||||
"Relayer has {} processed messages, expected {}", |
|
||||||
msg_processed_count, |
|
||||||
total_messages_expected |
|
||||||
); |
|
||||||
return Ok(false); |
|
||||||
} |
|
||||||
|
|
||||||
let gas_payment_events_count = fetch_metric( |
|
||||||
"9092", |
|
||||||
"hyperlane_contract_sync_stored_events", |
|
||||||
&hashmap! {"data_type" => "gas_payments"}, |
|
||||||
)? |
|
||||||
.iter() |
|
||||||
.sum::<u32>(); |
|
||||||
|
|
||||||
let log_file_path = AGENT_LOGGING_DIR.join("RLY-output.log"); |
|
||||||
const STORING_NEW_MESSAGE_LOG_MESSAGE: &str = "Storing new message in db"; |
|
||||||
const LOOKING_FOR_EVENTS_LOG_MESSAGE: &str = "Looking for events in index range"; |
|
||||||
const HYPER_INCOMING_BODY_LOG_MESSAGE: &str = "incoming body completed"; |
|
||||||
|
|
||||||
const TX_ID_INDEXING_LOG_MESSAGE: &str = "Found log(s) for tx id"; |
|
||||||
|
|
||||||
let relayer_logfile = File::open(log_file_path)?; |
|
||||||
let invariant_logs = &[ |
|
||||||
STORING_NEW_MESSAGE_LOG_MESSAGE, |
|
||||||
LOOKING_FOR_EVENTS_LOG_MESSAGE, |
|
||||||
GAS_EXPENDITURE_LOG_MESSAGE, |
|
||||||
HYPER_INCOMING_BODY_LOG_MESSAGE, |
|
||||||
TX_ID_INDEXING_LOG_MESSAGE, |
|
||||||
]; |
|
||||||
let log_counts = get_matching_lines(&relayer_logfile, invariant_logs); |
|
||||||
// Zero insertion messages don't reach `submit` stage where gas is spent, so we only expect these logs for the other messages.
|
|
||||||
// TODO: Sometimes we find more logs than expected. This may either mean that gas is deducted twice for the same message due to a bug,
|
|
||||||
// or that submitting the message transaction fails for some messages. Figure out which is the case and convert this check to
|
|
||||||
// strict equality.
|
|
||||||
// EDIT: Having had a quick look, it seems like there are some legitimate reverts happening in the confirm step
|
|
||||||
// (`Transaction attempting to process message either reverted or was reorged`)
|
|
||||||
// in which case more gas expenditure logs than messages are expected.
|
|
||||||
assert!( |
|
||||||
log_counts.get(GAS_EXPENDITURE_LOG_MESSAGE).unwrap() >= &total_messages_expected, |
|
||||||
"Didn't record gas payment for all delivered messages" |
|
||||||
); |
|
||||||
// These tests check that we fixed https://github.com/hyperlane-xyz/hyperlane-monorepo/issues/3915, where some logs would not show up
|
|
||||||
assert!( |
|
||||||
log_counts.get(STORING_NEW_MESSAGE_LOG_MESSAGE).unwrap() > &0, |
|
||||||
"Didn't find any logs about storing messages in db" |
|
||||||
); |
|
||||||
assert!( |
|
||||||
log_counts.get(LOOKING_FOR_EVENTS_LOG_MESSAGE).unwrap() > &0, |
|
||||||
"Didn't find any logs about looking for events in index range" |
|
||||||
); |
|
||||||
let total_tx_id_log_count = log_counts.get(TX_ID_INDEXING_LOG_MESSAGE).unwrap(); |
|
||||||
assert!( |
|
||||||
// there are 3 txid-indexed events:
|
|
||||||
// - relayer: merkle insertion and gas payment
|
|
||||||
// - scraper: gas payment
|
|
||||||
// some logs are emitted for multiple events, so requiring there to be at least
|
|
||||||
// `config.kathy_messages` logs is a reasonable approximation, since all three of these events
|
|
||||||
// are expected to be logged for each message.
|
|
||||||
*total_tx_id_log_count as u64 >= config.kathy_messages, |
|
||||||
"Didn't find as many tx id logs as expected. Found {} and expected {}", |
|
||||||
total_tx_id_log_count, |
|
||||||
config.kathy_messages |
|
||||||
); |
|
||||||
assert!( |
|
||||||
log_counts.get(HYPER_INCOMING_BODY_LOG_MESSAGE).is_none(), |
|
||||||
"Verbose logs not expected at the log level set in e2e" |
|
||||||
); |
|
||||||
|
|
||||||
let gas_payment_sealevel_events_count = fetch_metric( |
|
||||||
"9092", |
|
||||||
"hyperlane_contract_sync_stored_events", |
|
||||||
&hashmap! { |
|
||||||
"data_type" => "gas_payments", |
|
||||||
"chain" => "sealeveltest", |
|
||||||
}, |
|
||||||
)? |
|
||||||
.iter() |
|
||||||
.sum::<u32>(); |
|
||||||
// TestSendReceiver randomly breaks gas payments up into
|
|
||||||
// two. So we expect at least as many gas payments as messages.
|
|
||||||
if gas_payment_events_count < total_messages_expected { |
|
||||||
log!( |
|
||||||
"Relayer has {} gas payment events, expected at least {}", |
|
||||||
gas_payment_events_count, |
|
||||||
total_messages_expected |
|
||||||
); |
|
||||||
return Ok(false); |
|
||||||
} |
|
||||||
|
|
||||||
if let Some((solana_cli_tools_path, solana_config_path)) = |
|
||||||
solana_cli_tools_path.zip(solana_config_path) |
|
||||||
{ |
|
||||||
if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) { |
|
||||||
log!("Solana termination invariants not met"); |
|
||||||
return Ok(false); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
let dispatched_messages_scraped = fetch_metric( |
|
||||||
"9093", |
|
||||||
"hyperlane_contract_sync_stored_events", |
|
||||||
&hashmap! {"data_type" => "message_dispatch"}, |
|
||||||
)? |
|
||||||
.iter() |
|
||||||
.sum::<u32>(); |
|
||||||
if dispatched_messages_scraped != eth_messages_expected + ZERO_MERKLE_INSERTION_KATHY_MESSAGES { |
|
||||||
log!( |
|
||||||
"Scraper has scraped {} dispatched messages, expected {}", |
|
||||||
dispatched_messages_scraped, |
|
||||||
eth_messages_expected |
|
||||||
); |
|
||||||
return Ok(false); |
|
||||||
} |
|
||||||
|
|
||||||
let gas_payments_scraped = fetch_metric( |
|
||||||
"9093", |
|
||||||
"hyperlane_contract_sync_stored_events", |
|
||||||
&hashmap! {"data_type" => "gas_payment"}, |
|
||||||
)? |
|
||||||
.iter() |
|
||||||
.sum::<u32>(); |
|
||||||
// The relayer and scraper should have the same number of gas payments.
|
|
||||||
// TODO: Sealevel gas payments are not yet included in the event count.
|
|
||||||
// For now, treat as an exception in the invariants.
|
|
||||||
let expected_gas_payments = gas_payment_events_count - gas_payment_sealevel_events_count; |
|
||||||
if gas_payments_scraped != expected_gas_payments { |
|
||||||
log!( |
|
||||||
"Scraper has scraped {} gas payments, expected {}", |
|
||||||
gas_payments_scraped, |
|
||||||
expected_gas_payments |
|
||||||
); |
|
||||||
return Ok(false); |
|
||||||
} |
|
||||||
|
|
||||||
let delivered_messages_scraped = fetch_metric( |
|
||||||
"9093", |
|
||||||
"hyperlane_contract_sync_stored_events", |
|
||||||
&hashmap! {"data_type" => "message_delivery"}, |
|
||||||
)? |
|
||||||
.iter() |
|
||||||
.sum::<u32>(); |
|
||||||
if delivered_messages_scraped != eth_messages_expected { |
|
||||||
log!( |
|
||||||
"Scraper has scraped {} delivered messages, expected {}", |
|
||||||
delivered_messages_scraped, |
|
||||||
eth_messages_expected |
|
||||||
); |
|
||||||
return Ok(false); |
|
||||||
} |
|
||||||
|
|
||||||
let ending_relayer_balance: f64 = agent_balance_sum(9092).unwrap(); |
|
||||||
// Make sure the balance was correctly updated in the metrics.
|
|
||||||
if starting_relayer_balance <= ending_relayer_balance { |
|
||||||
log!( |
|
||||||
"Expected starting relayer balance to be greater than ending relayer balance, but got {} <= {}", |
|
||||||
starting_relayer_balance, |
|
||||||
ending_relayer_balance |
|
||||||
); |
|
||||||
return Ok(false); |
|
||||||
} |
|
||||||
|
|
||||||
log!("Termination invariants have been meet"); |
|
||||||
Ok(true) |
|
||||||
} |
|
||||||
|
@ -0,0 +1,3 @@ |
|||||||
|
// This number should be even, so the messages can be split into two equal halves
|
||||||
|
// sent before and after the relayer spins up, to avoid rounding errors.
|
||||||
|
pub const SOL_MESSAGES_EXPECTED: u32 = 20; |
@ -0,0 +1,54 @@ |
|||||||
|
use std::fs::File; |
||||||
|
use std::io::BufReader; |
||||||
|
|
||||||
|
use hyperlane_base::AgentMetadata; |
||||||
|
|
||||||
|
use crate::DynPath; |
||||||
|
|
||||||
|
pub fn post_startup_invariants(checkpoints_dirs: &[DynPath]) -> bool { |
||||||
|
post_startup_validator_metadata_written(checkpoints_dirs) |
||||||
|
} |
||||||
|
|
||||||
|
fn post_startup_validator_metadata_written(checkpoints_dirs: &[DynPath]) -> bool { |
||||||
|
let expected_git_sha = env!("VERGEN_GIT_SHA"); |
||||||
|
|
||||||
|
let failed_metadata = checkpoints_dirs |
||||||
|
.iter() |
||||||
|
.map(|path| metadata_file_check(expected_git_sha, path)) |
||||||
|
.any(|b| !b); |
||||||
|
|
||||||
|
!failed_metadata |
||||||
|
} |
||||||
|
|
||||||
|
fn metadata_file_check(expected_git_sha: &str, path: &DynPath) -> bool { |
||||||
|
let path = (*path).as_ref().as_ref(); |
||||||
|
if !path.exists() { |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
let file = path.join("metadata_latest.json"); |
||||||
|
if !file.exists() { |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
let open = File::open(&file); |
||||||
|
let mut reader = if let Ok(file) = open { |
||||||
|
BufReader::new(file) |
||||||
|
} else { |
||||||
|
return false; |
||||||
|
}; |
||||||
|
|
||||||
|
let deserialized = serde_json::from_reader(&mut reader); |
||||||
|
|
||||||
|
let metadata: AgentMetadata = if let Ok(metadata) = deserialized { |
||||||
|
metadata |
||||||
|
} else { |
||||||
|
return false; |
||||||
|
}; |
||||||
|
|
||||||
|
if metadata.git_sha != expected_git_sha { |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
true |
||||||
|
} |
@ -0,0 +1,210 @@ |
|||||||
|
use std::fs::File; |
||||||
|
use std::path::Path; |
||||||
|
|
||||||
|
use crate::config::Config; |
||||||
|
use crate::metrics::agent_balance_sum; |
||||||
|
use crate::utils::get_matching_lines; |
||||||
|
use maplit::hashmap; |
||||||
|
use relayer::GAS_EXPENDITURE_LOG_MESSAGE; |
||||||
|
|
||||||
|
use crate::invariants::SOL_MESSAGES_EXPECTED; |
||||||
|
use crate::logging::log; |
||||||
|
use crate::solana::solana_termination_invariants_met; |
||||||
|
use crate::{fetch_metric, AGENT_LOGGING_DIR, ZERO_MERKLE_INSERTION_KATHY_MESSAGES}; |
||||||
|
|
||||||
|
/// Use the metrics to check if the relayer queues are empty and the expected
|
||||||
|
/// number of messages have been sent.
|
||||||
|
pub fn termination_invariants_met( |
||||||
|
config: &Config, |
||||||
|
starting_relayer_balance: f64, |
||||||
|
solana_cli_tools_path: Option<&Path>, |
||||||
|
solana_config_path: Option<&Path>, |
||||||
|
) -> eyre::Result<bool> { |
||||||
|
let eth_messages_expected = (config.kathy_messages / 2) as u32 * 2; |
||||||
|
let sol_messages_expected = if config.sealevel_enabled { |
||||||
|
SOL_MESSAGES_EXPECTED |
||||||
|
} else { |
||||||
|
0 |
||||||
|
}; |
||||||
|
let total_messages_expected = eth_messages_expected + sol_messages_expected; |
||||||
|
|
||||||
|
let lengths = fetch_metric("9092", "hyperlane_submitter_queue_length", &hashmap! {})?; |
||||||
|
assert!(!lengths.is_empty(), "Could not find queue length metric"); |
||||||
|
if lengths.iter().sum::<u32>() != ZERO_MERKLE_INSERTION_KATHY_MESSAGES { |
||||||
|
log!("Relayer queues not empty. Lengths: {:?}", lengths); |
||||||
|
return Ok(false); |
||||||
|
}; |
||||||
|
|
||||||
|
// Also ensure the counter is as expected (total number of messages), summed
|
||||||
|
// across all mailboxes.
|
||||||
|
let msg_processed_count = |
||||||
|
fetch_metric("9092", "hyperlane_messages_processed_count", &hashmap! {})? |
||||||
|
.iter() |
||||||
|
.sum::<u32>(); |
||||||
|
if msg_processed_count != total_messages_expected { |
||||||
|
log!( |
||||||
|
"Relayer has {} processed messages, expected {}", |
||||||
|
msg_processed_count, |
||||||
|
total_messages_expected |
||||||
|
); |
||||||
|
return Ok(false); |
||||||
|
} |
||||||
|
|
||||||
|
let gas_payment_events_count = fetch_metric( |
||||||
|
"9092", |
||||||
|
"hyperlane_contract_sync_stored_events", |
||||||
|
&hashmap! {"data_type" => "gas_payments"}, |
||||||
|
)? |
||||||
|
.iter() |
||||||
|
.sum::<u32>(); |
||||||
|
|
||||||
|
let log_file_path = AGENT_LOGGING_DIR.join("RLY-output.log"); |
||||||
|
const STORING_NEW_MESSAGE_LOG_MESSAGE: &str = "Storing new message in db"; |
||||||
|
const LOOKING_FOR_EVENTS_LOG_MESSAGE: &str = "Looking for events in index range"; |
||||||
|
const HYPER_INCOMING_BODY_LOG_MESSAGE: &str = "incoming body completed"; |
||||||
|
|
||||||
|
const TX_ID_INDEXING_LOG_MESSAGE: &str = "Found log(s) for tx id"; |
||||||
|
|
||||||
|
let relayer_logfile = File::open(log_file_path)?; |
||||||
|
let invariant_logs = &[ |
||||||
|
STORING_NEW_MESSAGE_LOG_MESSAGE, |
||||||
|
LOOKING_FOR_EVENTS_LOG_MESSAGE, |
||||||
|
GAS_EXPENDITURE_LOG_MESSAGE, |
||||||
|
HYPER_INCOMING_BODY_LOG_MESSAGE, |
||||||
|
TX_ID_INDEXING_LOG_MESSAGE, |
||||||
|
]; |
||||||
|
let log_counts = get_matching_lines(&relayer_logfile, invariant_logs); |
||||||
|
// Zero insertion messages don't reach `submit` stage where gas is spent, so we only expect these logs for the other messages.
|
||||||
|
// TODO: Sometimes we find more logs than expected. This may either mean that gas is deducted twice for the same message due to a bug,
|
||||||
|
// or that submitting the message transaction fails for some messages. Figure out which is the case and convert this check to
|
||||||
|
// strict equality.
|
||||||
|
// EDIT: Having had a quick look, it seems like there are some legitimate reverts happening in the confirm step
|
||||||
|
// (`Transaction attempting to process message either reverted or was reorged`)
|
||||||
|
// in which case more gas expenditure logs than messages are expected.
|
||||||
|
assert!( |
||||||
|
log_counts.get(GAS_EXPENDITURE_LOG_MESSAGE).unwrap() >= &total_messages_expected, |
||||||
|
"Didn't record gas payment for all delivered messages" |
||||||
|
); |
||||||
|
// These tests check that we fixed https://github.com/hyperlane-xyz/hyperlane-monorepo/issues/3915, where some logs would not show up
|
||||||
|
assert!( |
||||||
|
log_counts.get(STORING_NEW_MESSAGE_LOG_MESSAGE).unwrap() > &0, |
||||||
|
"Didn't find any logs about storing messages in db" |
||||||
|
); |
||||||
|
assert!( |
||||||
|
log_counts.get(LOOKING_FOR_EVENTS_LOG_MESSAGE).unwrap() > &0, |
||||||
|
"Didn't find any logs about looking for events in index range" |
||||||
|
); |
||||||
|
let total_tx_id_log_count = log_counts.get(TX_ID_INDEXING_LOG_MESSAGE).unwrap(); |
||||||
|
assert!( |
||||||
|
// there are 3 txid-indexed events:
|
||||||
|
// - relayer: merkle insertion and gas payment
|
||||||
|
// - scraper: gas payment
|
||||||
|
// some logs are emitted for multiple events, so requiring there to be at least
|
||||||
|
// `config.kathy_messages` logs is a reasonable approximation, since all three of these events
|
||||||
|
// are expected to be logged for each message.
|
||||||
|
*total_tx_id_log_count as u64 >= config.kathy_messages, |
||||||
|
"Didn't find as many tx id logs as expected. Found {} and expected {}", |
||||||
|
total_tx_id_log_count, |
||||||
|
config.kathy_messages |
||||||
|
); |
||||||
|
assert!( |
||||||
|
log_counts.get(HYPER_INCOMING_BODY_LOG_MESSAGE).is_none(), |
||||||
|
"Verbose logs not expected at the log level set in e2e" |
||||||
|
); |
||||||
|
|
||||||
|
let gas_payment_sealevel_events_count = fetch_metric( |
||||||
|
"9092", |
||||||
|
"hyperlane_contract_sync_stored_events", |
||||||
|
&hashmap! { |
||||||
|
"data_type" => "gas_payments", |
||||||
|
"chain" => "sealeveltest", |
||||||
|
}, |
||||||
|
)? |
||||||
|
.iter() |
||||||
|
.sum::<u32>(); |
||||||
|
// TestSendReceiver randomly breaks gas payments up into
|
||||||
|
// two. So we expect at least as many gas payments as messages.
|
||||||
|
if gas_payment_events_count < total_messages_expected { |
||||||
|
log!( |
||||||
|
"Relayer has {} gas payment events, expected at least {}", |
||||||
|
gas_payment_events_count, |
||||||
|
total_messages_expected |
||||||
|
); |
||||||
|
return Ok(false); |
||||||
|
} |
||||||
|
|
||||||
|
if let Some((solana_cli_tools_path, solana_config_path)) = |
||||||
|
solana_cli_tools_path.zip(solana_config_path) |
||||||
|
{ |
||||||
|
if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) { |
||||||
|
log!("Solana termination invariants not met"); |
||||||
|
return Ok(false); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
let dispatched_messages_scraped = fetch_metric( |
||||||
|
"9093", |
||||||
|
"hyperlane_contract_sync_stored_events", |
||||||
|
&hashmap! {"data_type" => "message_dispatch"}, |
||||||
|
)? |
||||||
|
.iter() |
||||||
|
.sum::<u32>(); |
||||||
|
if dispatched_messages_scraped != eth_messages_expected + ZERO_MERKLE_INSERTION_KATHY_MESSAGES { |
||||||
|
log!( |
||||||
|
"Scraper has scraped {} dispatched messages, expected {}", |
||||||
|
dispatched_messages_scraped, |
||||||
|
eth_messages_expected |
||||||
|
); |
||||||
|
return Ok(false); |
||||||
|
} |
||||||
|
|
||||||
|
let gas_payments_scraped = fetch_metric( |
||||||
|
"9093", |
||||||
|
"hyperlane_contract_sync_stored_events", |
||||||
|
&hashmap! {"data_type" => "gas_payment"}, |
||||||
|
)? |
||||||
|
.iter() |
||||||
|
.sum::<u32>(); |
||||||
|
// The relayer and scraper should have the same number of gas payments.
|
||||||
|
// TODO: Sealevel gas payments are not yet included in the event count.
|
||||||
|
// For now, treat as an exception in the invariants.
|
||||||
|
let expected_gas_payments = gas_payment_events_count - gas_payment_sealevel_events_count; |
||||||
|
if gas_payments_scraped != expected_gas_payments { |
||||||
|
log!( |
||||||
|
"Scraper has scraped {} gas payments, expected {}", |
||||||
|
gas_payments_scraped, |
||||||
|
expected_gas_payments |
||||||
|
); |
||||||
|
return Ok(false); |
||||||
|
} |
||||||
|
|
||||||
|
let delivered_messages_scraped = fetch_metric( |
||||||
|
"9093", |
||||||
|
"hyperlane_contract_sync_stored_events", |
||||||
|
&hashmap! {"data_type" => "message_delivery"}, |
||||||
|
)? |
||||||
|
.iter() |
||||||
|
.sum::<u32>(); |
||||||
|
if delivered_messages_scraped != eth_messages_expected { |
||||||
|
log!( |
||||||
|
"Scraper has scraped {} delivered messages, expected {}", |
||||||
|
delivered_messages_scraped, |
||||||
|
eth_messages_expected |
||||||
|
); |
||||||
|
return Ok(false); |
||||||
|
} |
||||||
|
|
||||||
|
let ending_relayer_balance: f64 = agent_balance_sum(9092).unwrap(); |
||||||
|
// Make sure the balance was correctly updated in the metrics.
|
||||||
|
if starting_relayer_balance <= ending_relayer_balance { |
||||||
|
log!( |
||||||
|
"Expected starting relayer balance to be greater than ending relayer balance, but got {} <= {}", |
||||||
|
starting_relayer_balance, |
||||||
|
ending_relayer_balance |
||||||
|
); |
||||||
|
return Ok(false); |
||||||
|
} |
||||||
|
|
||||||
|
log!("Termination invariants have been meet"); |
||||||
|
Ok(true) |
||||||
|
} |
Loading…
Reference in new issue