feat: Report git commit hash as agent metadata (#4282)

### Description

* We are storing git commit hash as an environment variable when we
compile the source code of an agent.
* Validator uses git commit hash as a agent version in
`metadata_latest.json` file.
* File `metadata_latest.json` is overwritten every time when validator
is restarted.
* Git commit hash is extracted by `git` util, so, it should be available
during build time.
* If the directory with the source code does not contain `.git` folder,
git commit hash will be defaulted to `VERGEN_IDEMPOTENT_OUTPUT`.

Agent Metadata is propagated to Relayer and Scraper, but is not used
there. We can use it in the future, reporting git hash commit in the
logs, for example.

### Backward compatibility

Yes

### Testing

* Tested locally with E2E Tests.
* Tested against S3 by deploying docker image into k8s

---------

Co-authored-by: Danil Nemirovsky <4614623+ameten@users.noreply.github.com>
pull/4336/head
Danil Nemirovsky 3 months ago committed by GitHub
parent 7625bf8dce
commit d1bf212bf7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 1
      .dockerignore
  2. 2
      .github/workflows/rust-docker.yml
  3. 17
      rust/Cargo.lock
  4. 36
      rust/Dockerfile
  5. 5
      rust/agents/relayer/src/relayer.rs
  6. 5
      rust/agents/scraper/src/agent.rs
  7. 20
      rust/agents/validator/src/validator.rs
  8. 4
      rust/hyperlane-base/Cargo.toml
  9. 8
      rust/hyperlane-base/build.rs
  10. 11
      rust/hyperlane-base/src/agent.rs
  11. 2
      rust/hyperlane-base/src/lib.rs
  12. 9
      rust/hyperlane-base/src/metadata.rs
  13. 3
      rust/hyperlane-base/src/traits/checkpoint_syncer.rs
  14. 12
      rust/hyperlane-base/src/types/gcs_storage.rs
  15. 17
      rust/hyperlane-base/src/types/local_storage.rs
  16. 15
      rust/hyperlane-base/src/types/s3_storage.rs
  17. 5
      rust/utils/run-locally/Cargo.toml
  18. 8
      rust/utils/run-locally/build.rs
  19. 218
      rust/utils/run-locally/src/invariants.rs
  20. 3
      rust/utils/run-locally/src/invariants/common.rs
  21. 54
      rust/utils/run-locally/src/invariants/post_startup_invariants.rs
  22. 210
      rust/utils/run-locally/src/invariants/termination_invariants.rs
  23. 14
      rust/utils/run-locally/src/main.rs

@ -8,7 +8,6 @@ typescript/hyperlane-deploy/.env
**/*.swp **/*.swp
**/*.swo **/*.swo
rust
tmp.env tmp.env
.DS_STORE .DS_STORE

@ -65,7 +65,7 @@ jobs:
- name: Build and push - name: Build and push
uses: docker/build-push-action@v5 uses: docker/build-push-action@v5
with: with:
context: ./rust context: .
file: ./rust/Dockerfile file: ./rust/Dockerfile
push: true push: true
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}

17
rust/Cargo.lock generated

@ -4191,6 +4191,7 @@ dependencies = [
name = "hyperlane-base" name = "hyperlane-base"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow",
"async-trait", "async-trait",
"axum", "axum",
"backtrace", "backtrace",
@ -4239,6 +4240,7 @@ dependencies = [
"tracing-subscriber", "tracing-subscriber",
"tracing-test", "tracing-test",
"url", "url",
"vergen",
"walkdir", "walkdir",
"warp", "warp",
"ya-gcp", "ya-gcp",
@ -7312,6 +7314,7 @@ dependencies = [
name = "run-locally" name = "run-locally"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow",
"cosmwasm-schema", "cosmwasm-schema",
"ctrlc", "ctrlc",
"ethers", "ethers",
@ -7319,6 +7322,7 @@ dependencies = [
"ethers-core", "ethers-core",
"eyre", "eyre",
"hex 0.4.3", "hex 0.4.3",
"hyperlane-base",
"hyperlane-core", "hyperlane-core",
"hyperlane-cosmos", "hyperlane-cosmos",
"hyperlane-cosmwasm-interface", "hyperlane-cosmwasm-interface",
@ -7338,6 +7342,7 @@ dependencies = [
"tokio", "tokio",
"toml_edit 0.19.15", "toml_edit 0.19.15",
"ureq", "ureq",
"vergen",
"which", "which",
] ]
@ -10859,6 +10864,18 @@ version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
[[package]]
name = "vergen"
version = "8.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2990d9ea5967266ea0ccf413a4aa5c42a93dbcfda9cb49a97de6931726b12566"
dependencies = [
"anyhow",
"cfg-if",
"rustversion",
"time",
]
[[package]] [[package]]
name = "version_check" name = "version_check"
version = "0.9.4" version = "0.9.4"

@ -9,18 +9,24 @@ RUN apt-get update && \
apt-get install -y musl-tools clang && \ apt-get install -y musl-tools clang && \
rustup target add x86_64-unknown-linux-musl rustup target add x86_64-unknown-linux-musl
RUN mkdir rust
# Add workspace to workdir # Add workspace to workdir
COPY agents ./agents COPY rust/agents rust/agents
COPY chains ./chains COPY rust/chains rust/chains
COPY hyperlane-base ./hyperlane-base COPY rust/hyperlane-base rust/hyperlane-base
COPY hyperlane-core ./hyperlane-core COPY rust/hyperlane-core rust/hyperlane-core
COPY hyperlane-test ./hyperlane-test COPY rust/hyperlane-test rust/hyperlane-test
COPY ethers-prometheus ./ethers-prometheus COPY rust/ethers-prometheus rust/ethers-prometheus
COPY utils ./utils COPY rust/utils rust/utils
COPY sealevel ./sealevel COPY rust/sealevel rust/sealevel
COPY Cargo.toml . COPY rust/Cargo.toml rust/.
COPY Cargo.lock . COPY rust/Cargo.lock rust/.
COPY .git .git
WORKDIR /usr/src/rust
# Build binaries # Build binaries
RUN \ RUN \
@ -29,9 +35,9 @@ RUN \
--mount=id=cargo-home-git,type=cache,sharing=locked,target=/usr/local/cargo/git \ --mount=id=cargo-home-git,type=cache,sharing=locked,target=/usr/local/cargo/git \
RUSTFLAGS="--cfg tokio_unstable" cargo build --release --bin validator --bin relayer --bin scraper && \ RUSTFLAGS="--cfg tokio_unstable" cargo build --release --bin validator --bin relayer --bin scraper && \
mkdir -p /release && \ mkdir -p /release && \
cp /usr/src/target/release/validator /release && \ cp /usr/src/rust/target/release/validator /release && \
cp /usr/src/target/release/relayer /release && \ cp /usr/src/rust/target/release/relayer /release && \
cp /usr/src/target/release/scraper /release cp /usr/src/rust/target/release/scraper /release
## 2: Copy the binaries to release image ## 2: Copy the binaries to release image
FROM ubuntu:22.04 FROM ubuntu:22.04
@ -43,7 +49,7 @@ RUN apt-get update && \
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
WORKDIR /app WORKDIR /app
COPY config ./config COPY rust/config ./config
COPY --from=builder /release/* . COPY --from=builder /release/* .
RUN chmod 777 /app && \ RUN chmod 777 /app && \

@ -13,8 +13,8 @@ use hyperlane_base::{
db::{HyperlaneRocksDB, DB}, db::{HyperlaneRocksDB, DB},
metrics::{AgentMetrics, MetricsUpdater}, metrics::{AgentMetrics, MetricsUpdater},
settings::ChainConf, settings::ChainConf,
BaseAgent, ChainMetrics, ContractSyncMetrics, ContractSyncer, CoreMetrics, HyperlaneAgentCore, AgentMetadata, BaseAgent, ChainMetrics, ContractSyncMetrics, ContractSyncer, CoreMetrics,
SyncOptions, HyperlaneAgentCore, SyncOptions,
}; };
use hyperlane_core::{ use hyperlane_core::{
HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, MerkleTreeInsertion, QueueOperation, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, MerkleTreeInsertion, QueueOperation,
@ -113,6 +113,7 @@ impl BaseAgent for Relayer {
type Settings = RelayerSettings; type Settings = RelayerSettings;
async fn from_settings( async fn from_settings(
_agent_metadata: AgentMetadata,
settings: Self::Settings, settings: Self::Settings,
core_metrics: Arc<CoreMetrics>, core_metrics: Arc<CoreMetrics>,
agent_metrics: AgentMetrics, agent_metrics: AgentMetrics,

@ -4,8 +4,8 @@ use async_trait::async_trait;
use derive_more::AsRef; use derive_more::AsRef;
use futures::future::try_join_all; use futures::future::try_join_all;
use hyperlane_base::{ use hyperlane_base::{
broadcast::BroadcastMpscSender, metrics::AgentMetrics, settings::IndexSettings, BaseAgent, broadcast::BroadcastMpscSender, metrics::AgentMetrics, settings::IndexSettings, AgentMetadata,
ChainMetrics, ContractSyncMetrics, ContractSyncer, CoreMetrics, HyperlaneAgentCore, BaseAgent, ChainMetrics, ContractSyncMetrics, ContractSyncer, CoreMetrics, HyperlaneAgentCore,
MetricsUpdater, SyncOptions, MetricsUpdater, SyncOptions,
}; };
use hyperlane_core::{Delivery, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, H512}; use hyperlane_core::{Delivery, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, H512};
@ -41,6 +41,7 @@ impl BaseAgent for Scraper {
type Settings = ScraperSettings; type Settings = ScraperSettings;
async fn from_settings( async fn from_settings(
_agent_metadata: AgentMetadata,
settings: Self::Settings, settings: Self::Settings,
metrics: Arc<CoreMetrics>, metrics: Arc<CoreMetrics>,
agent_metrics: AgentMetrics, agent_metrics: AgentMetrics,

@ -13,8 +13,8 @@ use hyperlane_base::{
db::{HyperlaneRocksDB, DB}, db::{HyperlaneRocksDB, DB},
metrics::AgentMetrics, metrics::AgentMetrics,
settings::ChainConf, settings::ChainConf,
BaseAgent, ChainMetrics, CheckpointSyncer, ContractSyncMetrics, ContractSyncer, CoreMetrics, AgentMetadata, BaseAgent, ChainMetrics, CheckpointSyncer, ContractSyncMetrics, ContractSyncer,
HyperlaneAgentCore, MetricsUpdater, SequencedDataContractSync, CoreMetrics, HyperlaneAgentCore, MetricsUpdater, SequencedDataContractSync,
}; };
use hyperlane_core::{ use hyperlane_core::{
@ -50,6 +50,7 @@ pub struct Validator {
core_metrics: Arc<CoreMetrics>, core_metrics: Arc<CoreMetrics>,
agent_metrics: AgentMetrics, agent_metrics: AgentMetrics,
chain_metrics: ChainMetrics, chain_metrics: ChainMetrics,
agent_metadata: AgentMetadata,
} }
#[async_trait] #[async_trait]
@ -59,6 +60,7 @@ impl BaseAgent for Validator {
type Settings = ValidatorSettings; type Settings = ValidatorSettings;
async fn from_settings( async fn from_settings(
agent_metadata: AgentMetadata,
settings: Self::Settings, settings: Self::Settings,
metrics: Arc<CoreMetrics>, metrics: Arc<CoreMetrics>,
agent_metrics: AgentMetrics, agent_metrics: AgentMetrics,
@ -123,6 +125,7 @@ impl BaseAgent for Validator {
agent_metrics, agent_metrics,
chain_metrics, chain_metrics,
core_metrics: metrics, core_metrics: metrics,
agent_metadata,
}) })
} }
@ -169,6 +172,11 @@ impl BaseAgent for Validator {
.instrument(info_span!("MetricsUpdater")), .instrument(info_span!("MetricsUpdater")),
); );
// report agent metadata
self.metadata()
.await
.expect("Failed to report agent metadata");
// announce the validator after spawning the signer task // announce the validator after spawning the signer task
self.announce().await.expect("Failed to announce validator"); self.announce().await.expect("Failed to announce validator");
@ -290,6 +298,14 @@ impl Validator {
} }
} }
async fn metadata(&self) -> Result<()> {
self.checkpoint_syncer
.write_metadata(&self.agent_metadata)
.await?;
Ok(())
}
async fn announce(&self) -> Result<()> { async fn announce(&self) -> Result<()> {
let address = self.signer.eth_address(); let address = self.signer.eth_address();
let announcement_location = self.checkpoint_syncer.announcement_location(); let announcement_location = self.checkpoint_syncer.announcement_location();

@ -70,6 +70,10 @@ tempfile.workspace = true
tracing-test.workspace = true tracing-test.workspace = true
walkdir.workspace = true walkdir.workspace = true
[build-dependencies]
anyhow = { workspace = true }
vergen = { version = "8.3.2", features = ["build", "git", "gitcl"] }
[features] [features]
default = ["oneline-errors", "color-eyre"] default = ["oneline-errors", "color-eyre"]
oneline-eyre = ["backtrace-oneline", "backtrace"] oneline-eyre = ["backtrace-oneline", "backtrace"]

@ -0,0 +1,8 @@
use anyhow::Result;
use vergen::EmitBuilder;
fn main() -> Result<()> {
EmitBuilder::builder().git_sha(false).emit()?;
Ok(())
}

@ -1,3 +1,5 @@
pub use crate::metadata::AgentMetadata;
use std::{env, fmt::Debug, sync::Arc}; use std::{env, fmt::Debug, sync::Arc};
use async_trait::async_trait; use async_trait::async_trait;
@ -40,6 +42,7 @@ pub trait BaseAgent: Send + Sync + Debug {
/// Instantiate the agent from the standard settings object /// Instantiate the agent from the standard settings object
async fn from_settings( async fn from_settings(
agent_metadata: AgentMetadata,
settings: Self::Settings, settings: Self::Settings,
metrics: Arc<CoreMetrics>, metrics: Arc<CoreMetrics>,
agent_metrics: AgentMetrics, agent_metrics: AgentMetrics,
@ -72,6 +75,13 @@ pub async fn agent_main<A: BaseAgent>() -> Result<()> {
color_eyre::install()?; color_eyre::install()?;
} }
// Latest git commit hash at the time when agent was built.
// If .git was not present at the time of build,
// the variable defaults to "VERGEN_IDEMPOTENT_OUTPUT".
let git_sha = env!("VERGEN_GIT_SHA").to_owned();
let agent_metadata = AgentMetadata::new(git_sha);
let settings = A::Settings::load()?; let settings = A::Settings::load()?;
let core_settings: &Settings = settings.as_ref(); let core_settings: &Settings = settings.as_ref();
@ -80,6 +90,7 @@ pub async fn agent_main<A: BaseAgent>() -> Result<()> {
let agent_metrics = create_agent_metrics(&metrics)?; let agent_metrics = create_agent_metrics(&metrics)?;
let chain_metrics = create_chain_metrics(&metrics)?; let chain_metrics = create_chain_metrics(&metrics)?;
let agent = A::from_settings( let agent = A::from_settings(
agent_metadata,
settings, settings,
metrics.clone(), metrics.clone(),
agent_metrics, agent_metrics,

@ -15,6 +15,8 @@ pub use agent::*;
/// The local database used by agents /// The local database used by agents
pub mod db; pub mod db;
mod metadata;
pub mod metrics; pub mod metrics;
pub use metrics::*; pub use metrics::*;

@ -0,0 +1,9 @@
use derive_new::new;
use serde::{Deserialize, Serialize};
/// Metadata about agent
#[derive(Debug, Deserialize, Serialize, new)]
pub struct AgentMetadata {
/// Contains git commit hash of the agent binary
pub git_sha: String,
}

@ -3,6 +3,7 @@ use std::fmt::Debug;
use async_trait::async_trait; use async_trait::async_trait;
use eyre::Result; use eyre::Result;
use crate::AgentMetadata;
use hyperlane_core::{SignedAnnouncement, SignedCheckpointWithMessageId}; use hyperlane_core::{SignedAnnouncement, SignedCheckpointWithMessageId};
/// A generic trait to read/write Checkpoints offchain /// A generic trait to read/write Checkpoints offchain
@ -27,6 +28,8 @@ pub trait CheckpointSyncer: Debug + Send + Sync {
&self, &self,
signed_checkpoint: &SignedCheckpointWithMessageId, signed_checkpoint: &SignedCheckpointWithMessageId,
) -> Result<()>; ) -> Result<()>;
/// Write the agent metadata to this syncer
async fn write_metadata(&self, metadata: &AgentMetadata) -> Result<()>;
/// Write the signed announcement to this syncer /// Write the signed announcement to this syncer
async fn write_announcement(&self, signed_announcement: &SignedAnnouncement) -> Result<()>; async fn write_announcement(&self, signed_announcement: &SignedAnnouncement) -> Result<()>;
/// Return the announcement storage location for this syncer /// Return the announcement storage location for this syncer

@ -1,4 +1,4 @@
use crate::CheckpointSyncer; use crate::{AgentMetadata, CheckpointSyncer};
use async_trait::async_trait; use async_trait::async_trait;
use derive_new::new; use derive_new::new;
use eyre::{bail, Result}; use eyre::{bail, Result};
@ -7,6 +7,7 @@ use std::fmt;
use ya_gcp::{storage::StorageClient, AuthFlow, ClientBuilder, ClientBuilderConfig}; use ya_gcp::{storage::StorageClient, AuthFlow, ClientBuilder, ClientBuilderConfig};
const LATEST_INDEX_KEY: &str = "gcsLatestIndexKey"; const LATEST_INDEX_KEY: &str = "gcsLatestIndexKey";
const METADATA_KEY: &str = "gcsMetadataKey";
const ANNOUNCEMENT_KEY: &str = "gcsAnnouncementKey"; const ANNOUNCEMENT_KEY: &str = "gcsAnnouncementKey";
/// Path to GCS users_secret file /// Path to GCS users_secret file
pub const GCS_USER_SECRET: &str = "GCS_USER_SECRET"; pub const GCS_USER_SECRET: &str = "GCS_USER_SECRET";
@ -174,6 +175,15 @@ impl CheckpointSyncer for GcsStorageClient {
Ok(()) Ok(())
} }
/// Write the agent metadata to this syncer
async fn write_metadata(&self, metadata: &AgentMetadata) -> Result<()> {
let serialized_metadata = serde_json::to_string_pretty(metadata)?;
self.inner
.insert_object(&self.bucket, METADATA_KEY, serialized_metadata)
.await?;
Ok(())
}
/// Write the signed announcement to this syncer /// Write the signed announcement to this syncer
async fn write_announcement(&self, signed_announcement: &SignedAnnouncement) -> Result<()> { async fn write_announcement(&self, signed_announcement: &SignedAnnouncement) -> Result<()> {
self.inner self.inner

@ -1,12 +1,12 @@
use std::path::PathBuf; use std::path::PathBuf;
use crate::traits::CheckpointSyncer;
use crate::AgentMetadata;
use async_trait::async_trait; use async_trait::async_trait;
use eyre::{Context, Result}; use eyre::{Context, Result};
use hyperlane_core::{SignedAnnouncement, SignedCheckpointWithMessageId}; use hyperlane_core::{SignedAnnouncement, SignedCheckpointWithMessageId};
use prometheus::IntGauge; use prometheus::IntGauge;
use crate::traits::CheckpointSyncer;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
/// Type for reading/write to LocalStorage /// Type for reading/write to LocalStorage
pub struct LocalStorage { pub struct LocalStorage {
@ -40,6 +40,10 @@ impl LocalStorage {
fn announcement_file_path(&self) -> PathBuf { fn announcement_file_path(&self) -> PathBuf {
self.path.join("announcement.json") self.path.join("announcement.json")
} }
fn metadata_file_path(&self) -> PathBuf {
self.path.join("metadata_latest.json")
}
} }
#[async_trait] #[async_trait]
@ -91,6 +95,15 @@ impl CheckpointSyncer for LocalStorage {
Ok(()) Ok(())
} }
async fn write_metadata(&self, metadata: &AgentMetadata) -> Result<()> {
let serialized_metadata = serde_json::to_string_pretty(metadata)?;
let path = self.metadata_file_path();
tokio::fs::write(&path, &serialized_metadata)
.await
.with_context(|| format!("Writing agent metadata to {path:?}"))?;
Ok(())
}
async fn write_announcement(&self, signed_announcement: &SignedAnnouncement) -> Result<()> { async fn write_announcement(&self, signed_announcement: &SignedAnnouncement) -> Result<()> {
let serialized_announcement = serde_json::to_string_pretty(signed_announcement)?; let serialized_announcement = serde_json::to_string_pretty(signed_announcement)?;
let path = self.announcement_file_path(); let path = self.announcement_file_path();

@ -14,7 +14,9 @@ use rusoto_s3::{GetObjectError, GetObjectRequest, PutObjectRequest, S3Client, S3
use tokio::time::timeout; use tokio::time::timeout;
use crate::types::utils; use crate::types::utils;
use crate::{settings::aws_credentials::AwsChainCredentialsProvider, CheckpointSyncer}; use crate::{
settings::aws_credentials::AwsChainCredentialsProvider, AgentMetadata, CheckpointSyncer,
};
/// The timeout for S3 requests. Rusoto doesn't offer timeout configuration /// The timeout for S3 requests. Rusoto doesn't offer timeout configuration
/// out of the box, so S3 requests must be wrapped with a timeout. /// out of the box, so S3 requests must be wrapped with a timeout.
@ -136,6 +138,10 @@ impl S3Storage {
"checkpoint_latest_index.json".to_owned() "checkpoint_latest_index.json".to_owned()
} }
fn metadata_key() -> String {
"metadata_latest.json".to_owned()
}
fn announcement_key() -> String { fn announcement_key() -> String {
"announcement.json".to_owned() "announcement.json".to_owned()
} }
@ -188,6 +194,13 @@ impl CheckpointSyncer for S3Storage {
Ok(()) Ok(())
} }
async fn write_metadata(&self, metadata: &AgentMetadata) -> Result<()> {
let serialized_metadata = serde_json::to_string_pretty(metadata)?;
self.write_to_bucket(S3Storage::metadata_key(), &serialized_metadata)
.await?;
Ok(())
}
async fn write_announcement(&self, signed_announcement: &SignedAnnouncement) -> Result<()> { async fn write_announcement(&self, signed_announcement: &SignedAnnouncement) -> Result<()> {
let serialized_announcement = serde_json::to_string_pretty(signed_announcement)?; let serialized_announcement = serde_json::to_string_pretty(signed_announcement)?;
self.write_to_bucket(S3Storage::announcement_key(), &serialized_announcement) self.write_to_bucket(S3Storage::announcement_key(), &serialized_announcement)

@ -10,6 +10,7 @@ publish.workspace = true
version.workspace = true version.workspace = true
[dependencies] [dependencies]
hyperlane-base = { path = "../../hyperlane-base" }
hyperlane-core = { path = "../../hyperlane-core", features = ["float"]} hyperlane-core = { path = "../../hyperlane-core", features = ["float"]}
hyperlane-cosmos = { path = "../../chains/hyperlane-cosmos"} hyperlane-cosmos = { path = "../../chains/hyperlane-cosmos"}
toml_edit.workspace = true toml_edit.workspace = true
@ -38,5 +39,9 @@ relayer = { path = "../../agents/relayer"}
hyperlane-cosmwasm-interface.workspace = true hyperlane-cosmwasm-interface.workspace = true
cosmwasm-schema.workspace = true cosmwasm-schema.workspace = true
[build-dependencies]
anyhow = { workspace = true }
vergen = { version = "8.3.2", features = ["build", "git", "gitcl"] }
[features] [features]
cosmos = [] cosmos = []

@ -0,0 +1,8 @@
use anyhow::Result;
use vergen::EmitBuilder;
fn main() -> Result<()> {
EmitBuilder::builder().git_sha(false).emit()?;
Ok(())
}

@ -1,213 +1,7 @@
use std::fs::File; pub use common::SOL_MESSAGES_EXPECTED;
use std::path::Path; pub use post_startup_invariants::post_startup_invariants;
pub use termination_invariants::termination_invariants_met;
use crate::config::Config; mod common;
use crate::metrics::agent_balance_sum; mod post_startup_invariants;
use crate::utils::get_matching_lines; mod termination_invariants;
use maplit::hashmap;
use relayer::GAS_EXPENDITURE_LOG_MESSAGE;
use crate::logging::log;
use crate::solana::solana_termination_invariants_met;
use crate::{fetch_metric, AGENT_LOGGING_DIR, ZERO_MERKLE_INSERTION_KATHY_MESSAGES};
// This number should be even, so the messages can be split into two equal halves
// sent before and after the relayer spins up, to avoid rounding errors.
pub const SOL_MESSAGES_EXPECTED: u32 = 20;
/// Use the metrics to check if the relayer queues are empty and the expected
/// number of messages have been sent.
pub fn termination_invariants_met(
config: &Config,
starting_relayer_balance: f64,
solana_cli_tools_path: Option<&Path>,
solana_config_path: Option<&Path>,
) -> eyre::Result<bool> {
let eth_messages_expected = (config.kathy_messages / 2) as u32 * 2;
let sol_messages_expected = if config.sealevel_enabled {
SOL_MESSAGES_EXPECTED
} else {
0
};
let total_messages_expected = eth_messages_expected + sol_messages_expected;
let lengths = fetch_metric("9092", "hyperlane_submitter_queue_length", &hashmap! {})?;
assert!(!lengths.is_empty(), "Could not find queue length metric");
if lengths.iter().sum::<u32>() != ZERO_MERKLE_INSERTION_KATHY_MESSAGES {
log!("Relayer queues not empty. Lengths: {:?}", lengths);
return Ok(false);
};
// Also ensure the counter is as expected (total number of messages), summed
// across all mailboxes.
let msg_processed_count =
fetch_metric("9092", "hyperlane_messages_processed_count", &hashmap! {})?
.iter()
.sum::<u32>();
if msg_processed_count != total_messages_expected {
log!(
"Relayer has {} processed messages, expected {}",
msg_processed_count,
total_messages_expected
);
return Ok(false);
}
let gas_payment_events_count = fetch_metric(
"9092",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "gas_payments"},
)?
.iter()
.sum::<u32>();
let log_file_path = AGENT_LOGGING_DIR.join("RLY-output.log");
const STORING_NEW_MESSAGE_LOG_MESSAGE: &str = "Storing new message in db";
const LOOKING_FOR_EVENTS_LOG_MESSAGE: &str = "Looking for events in index range";
const HYPER_INCOMING_BODY_LOG_MESSAGE: &str = "incoming body completed";
const TX_ID_INDEXING_LOG_MESSAGE: &str = "Found log(s) for tx id";
let relayer_logfile = File::open(log_file_path)?;
let invariant_logs = &[
STORING_NEW_MESSAGE_LOG_MESSAGE,
LOOKING_FOR_EVENTS_LOG_MESSAGE,
GAS_EXPENDITURE_LOG_MESSAGE,
HYPER_INCOMING_BODY_LOG_MESSAGE,
TX_ID_INDEXING_LOG_MESSAGE,
];
let log_counts = get_matching_lines(&relayer_logfile, invariant_logs);
// Zero insertion messages don't reach `submit` stage where gas is spent, so we only expect these logs for the other messages.
// TODO: Sometimes we find more logs than expected. This may either mean that gas is deducted twice for the same message due to a bug,
// or that submitting the message transaction fails for some messages. Figure out which is the case and convert this check to
// strict equality.
// EDIT: Having had a quick look, it seems like there are some legitimate reverts happening in the confirm step
// (`Transaction attempting to process message either reverted or was reorged`)
// in which case more gas expenditure logs than messages are expected.
assert!(
log_counts.get(GAS_EXPENDITURE_LOG_MESSAGE).unwrap() >= &total_messages_expected,
"Didn't record gas payment for all delivered messages"
);
// These tests check that we fixed https://github.com/hyperlane-xyz/hyperlane-monorepo/issues/3915, where some logs would not show up
assert!(
log_counts.get(STORING_NEW_MESSAGE_LOG_MESSAGE).unwrap() > &0,
"Didn't find any logs about storing messages in db"
);
assert!(
log_counts.get(LOOKING_FOR_EVENTS_LOG_MESSAGE).unwrap() > &0,
"Didn't find any logs about looking for events in index range"
);
let total_tx_id_log_count = log_counts.get(TX_ID_INDEXING_LOG_MESSAGE).unwrap();
assert!(
// there are 3 txid-indexed events:
// - relayer: merkle insertion and gas payment
// - scraper: gas payment
// some logs are emitted for multiple events, so requiring there to be at least
// `config.kathy_messages` logs is a reasonable approximation, since all three of these events
// are expected to be logged for each message.
*total_tx_id_log_count as u64 >= config.kathy_messages,
"Didn't find as many tx id logs as expected. Found {} and expected {}",
total_tx_id_log_count,
config.kathy_messages
);
assert!(
log_counts.get(HYPER_INCOMING_BODY_LOG_MESSAGE).is_none(),
"Verbose logs not expected at the log level set in e2e"
);
let gas_payment_sealevel_events_count = fetch_metric(
"9092",
"hyperlane_contract_sync_stored_events",
&hashmap! {
"data_type" => "gas_payments",
"chain" => "sealeveltest",
},
)?
.iter()
.sum::<u32>();
// TestSendReceiver randomly breaks gas payments up into
// two. So we expect at least as many gas payments as messages.
if gas_payment_events_count < total_messages_expected {
log!(
"Relayer has {} gas payment events, expected at least {}",
gas_payment_events_count,
total_messages_expected
);
return Ok(false);
}
if let Some((solana_cli_tools_path, solana_config_path)) =
solana_cli_tools_path.zip(solana_config_path)
{
if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) {
log!("Solana termination invariants not met");
return Ok(false);
}
}
let dispatched_messages_scraped = fetch_metric(
"9093",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "message_dispatch"},
)?
.iter()
.sum::<u32>();
if dispatched_messages_scraped != eth_messages_expected + ZERO_MERKLE_INSERTION_KATHY_MESSAGES {
log!(
"Scraper has scraped {} dispatched messages, expected {}",
dispatched_messages_scraped,
eth_messages_expected
);
return Ok(false);
}
let gas_payments_scraped = fetch_metric(
"9093",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "gas_payment"},
)?
.iter()
.sum::<u32>();
// The relayer and scraper should have the same number of gas payments.
// TODO: Sealevel gas payments are not yet included in the event count.
// For now, treat as an exception in the invariants.
let expected_gas_payments = gas_payment_events_count - gas_payment_sealevel_events_count;
if gas_payments_scraped != expected_gas_payments {
log!(
"Scraper has scraped {} gas payments, expected {}",
gas_payments_scraped,
expected_gas_payments
);
return Ok(false);
}
let delivered_messages_scraped = fetch_metric(
"9093",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "message_delivery"},
)?
.iter()
.sum::<u32>();
if delivered_messages_scraped != eth_messages_expected {
log!(
"Scraper has scraped {} delivered messages, expected {}",
delivered_messages_scraped,
eth_messages_expected
);
return Ok(false);
}
let ending_relayer_balance: f64 = agent_balance_sum(9092).unwrap();
// Make sure the balance was correctly updated in the metrics.
if starting_relayer_balance <= ending_relayer_balance {
log!(
"Expected starting relayer balance to be greater than ending relayer balance, but got {} <= {}",
starting_relayer_balance,
ending_relayer_balance
);
return Ok(false);
}
log!("Termination invariants have been meet");
Ok(true)
}

@ -0,0 +1,3 @@
// This number should be even, so the messages can be split into two equal halves
// sent before and after the relayer spins up, to avoid rounding errors.
pub const SOL_MESSAGES_EXPECTED: u32 = 20;

@ -0,0 +1,54 @@
use std::fs::File;
use std::io::BufReader;
use hyperlane_base::AgentMetadata;
use crate::DynPath;
pub fn post_startup_invariants(checkpoints_dirs: &[DynPath]) -> bool {
post_startup_validator_metadata_written(checkpoints_dirs)
}
fn post_startup_validator_metadata_written(checkpoints_dirs: &[DynPath]) -> bool {
let expected_git_sha = env!("VERGEN_GIT_SHA");
let failed_metadata = checkpoints_dirs
.iter()
.map(|path| metadata_file_check(expected_git_sha, path))
.any(|b| !b);
!failed_metadata
}
fn metadata_file_check(expected_git_sha: &str, path: &DynPath) -> bool {
let path = (*path).as_ref().as_ref();
if !path.exists() {
return false;
}
let file = path.join("metadata_latest.json");
if !file.exists() {
return false;
}
let open = File::open(&file);
let mut reader = if let Ok(file) = open {
BufReader::new(file)
} else {
return false;
};
let deserialized = serde_json::from_reader(&mut reader);
let metadata: AgentMetadata = if let Ok(metadata) = deserialized {
metadata
} else {
return false;
};
if metadata.git_sha != expected_git_sha {
return false;
}
true
}

@ -0,0 +1,210 @@
use std::fs::File;
use std::path::Path;
use crate::config::Config;
use crate::metrics::agent_balance_sum;
use crate::utils::get_matching_lines;
use maplit::hashmap;
use relayer::GAS_EXPENDITURE_LOG_MESSAGE;
use crate::invariants::SOL_MESSAGES_EXPECTED;
use crate::logging::log;
use crate::solana::solana_termination_invariants_met;
use crate::{fetch_metric, AGENT_LOGGING_DIR, ZERO_MERKLE_INSERTION_KATHY_MESSAGES};
/// Use the metrics to check if the relayer queues are empty and the expected
/// number of messages have been sent.
pub fn termination_invariants_met(
config: &Config,
starting_relayer_balance: f64,
solana_cli_tools_path: Option<&Path>,
solana_config_path: Option<&Path>,
) -> eyre::Result<bool> {
let eth_messages_expected = (config.kathy_messages / 2) as u32 * 2;
let sol_messages_expected = if config.sealevel_enabled {
SOL_MESSAGES_EXPECTED
} else {
0
};
let total_messages_expected = eth_messages_expected + sol_messages_expected;
let lengths = fetch_metric("9092", "hyperlane_submitter_queue_length", &hashmap! {})?;
assert!(!lengths.is_empty(), "Could not find queue length metric");
if lengths.iter().sum::<u32>() != ZERO_MERKLE_INSERTION_KATHY_MESSAGES {
log!("Relayer queues not empty. Lengths: {:?}", lengths);
return Ok(false);
};
// Also ensure the counter is as expected (total number of messages), summed
// across all mailboxes.
let msg_processed_count =
fetch_metric("9092", "hyperlane_messages_processed_count", &hashmap! {})?
.iter()
.sum::<u32>();
if msg_processed_count != total_messages_expected {
log!(
"Relayer has {} processed messages, expected {}",
msg_processed_count,
total_messages_expected
);
return Ok(false);
}
let gas_payment_events_count = fetch_metric(
"9092",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "gas_payments"},
)?
.iter()
.sum::<u32>();
let log_file_path = AGENT_LOGGING_DIR.join("RLY-output.log");
const STORING_NEW_MESSAGE_LOG_MESSAGE: &str = "Storing new message in db";
const LOOKING_FOR_EVENTS_LOG_MESSAGE: &str = "Looking for events in index range";
const HYPER_INCOMING_BODY_LOG_MESSAGE: &str = "incoming body completed";
const TX_ID_INDEXING_LOG_MESSAGE: &str = "Found log(s) for tx id";
let relayer_logfile = File::open(log_file_path)?;
let invariant_logs = &[
STORING_NEW_MESSAGE_LOG_MESSAGE,
LOOKING_FOR_EVENTS_LOG_MESSAGE,
GAS_EXPENDITURE_LOG_MESSAGE,
HYPER_INCOMING_BODY_LOG_MESSAGE,
TX_ID_INDEXING_LOG_MESSAGE,
];
let log_counts = get_matching_lines(&relayer_logfile, invariant_logs);
// Zero insertion messages don't reach `submit` stage where gas is spent, so we only expect these logs for the other messages.
// TODO: Sometimes we find more logs than expected. This may either mean that gas is deducted twice for the same message due to a bug,
// or that submitting the message transaction fails for some messages. Figure out which is the case and convert this check to
// strict equality.
// EDIT: Having had a quick look, it seems like there are some legitimate reverts happening in the confirm step
// (`Transaction attempting to process message either reverted or was reorged`)
// in which case more gas expenditure logs than messages are expected.
assert!(
log_counts.get(GAS_EXPENDITURE_LOG_MESSAGE).unwrap() >= &total_messages_expected,
"Didn't record gas payment for all delivered messages"
);
// These tests check that we fixed https://github.com/hyperlane-xyz/hyperlane-monorepo/issues/3915, where some logs would not show up
assert!(
log_counts.get(STORING_NEW_MESSAGE_LOG_MESSAGE).unwrap() > &0,
"Didn't find any logs about storing messages in db"
);
assert!(
log_counts.get(LOOKING_FOR_EVENTS_LOG_MESSAGE).unwrap() > &0,
"Didn't find any logs about looking for events in index range"
);
let total_tx_id_log_count = log_counts.get(TX_ID_INDEXING_LOG_MESSAGE).unwrap();
assert!(
// there are 3 txid-indexed events:
// - relayer: merkle insertion and gas payment
// - scraper: gas payment
// some logs are emitted for multiple events, so requiring there to be at least
// `config.kathy_messages` logs is a reasonable approximation, since all three of these events
// are expected to be logged for each message.
*total_tx_id_log_count as u64 >= config.kathy_messages,
"Didn't find as many tx id logs as expected. Found {} and expected {}",
total_tx_id_log_count,
config.kathy_messages
);
assert!(
log_counts.get(HYPER_INCOMING_BODY_LOG_MESSAGE).is_none(),
"Verbose logs not expected at the log level set in e2e"
);
let gas_payment_sealevel_events_count = fetch_metric(
"9092",
"hyperlane_contract_sync_stored_events",
&hashmap! {
"data_type" => "gas_payments",
"chain" => "sealeveltest",
},
)?
.iter()
.sum::<u32>();
// TestSendReceiver randomly breaks gas payments up into
// two. So we expect at least as many gas payments as messages.
if gas_payment_events_count < total_messages_expected {
log!(
"Relayer has {} gas payment events, expected at least {}",
gas_payment_events_count,
total_messages_expected
);
return Ok(false);
}
if let Some((solana_cli_tools_path, solana_config_path)) =
solana_cli_tools_path.zip(solana_config_path)
{
if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) {
log!("Solana termination invariants not met");
return Ok(false);
}
}
let dispatched_messages_scraped = fetch_metric(
"9093",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "message_dispatch"},
)?
.iter()
.sum::<u32>();
if dispatched_messages_scraped != eth_messages_expected + ZERO_MERKLE_INSERTION_KATHY_MESSAGES {
log!(
"Scraper has scraped {} dispatched messages, expected {}",
dispatched_messages_scraped,
eth_messages_expected
);
return Ok(false);
}
let gas_payments_scraped = fetch_metric(
"9093",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "gas_payment"},
)?
.iter()
.sum::<u32>();
// The relayer and scraper should have the same number of gas payments.
// TODO: Sealevel gas payments are not yet included in the event count.
// For now, treat as an exception in the invariants.
let expected_gas_payments = gas_payment_events_count - gas_payment_sealevel_events_count;
if gas_payments_scraped != expected_gas_payments {
log!(
"Scraper has scraped {} gas payments, expected {}",
gas_payments_scraped,
expected_gas_payments
);
return Ok(false);
}
let delivered_messages_scraped = fetch_metric(
"9093",
"hyperlane_contract_sync_stored_events",
&hashmap! {"data_type" => "message_delivery"},
)?
.iter()
.sum::<u32>();
if delivered_messages_scraped != eth_messages_expected {
log!(
"Scraper has scraped {} delivered messages, expected {}",
delivered_messages_scraped,
eth_messages_expected
);
return Ok(false);
}
let ending_relayer_balance: f64 = agent_balance_sum(9092).unwrap();
// Make sure the balance was correctly updated in the metrics.
if starting_relayer_balance <= ending_relayer_balance {
log!(
"Expected starting relayer balance to be greater than ending relayer balance, but got {} <= {}",
starting_relayer_balance,
ending_relayer_balance
);
return Ok(false);
}
log!("Termination invariants have been meet");
Ok(true)
}

@ -36,7 +36,7 @@ use tempfile::tempdir;
use crate::{ use crate::{
config::Config, config::Config,
ethereum::start_anvil, ethereum::start_anvil,
invariants::{termination_invariants_met, SOL_MESSAGES_EXPECTED}, invariants::{post_startup_invariants, termination_invariants_met, SOL_MESSAGES_EXPECTED},
metrics::agent_balance_sum, metrics::agent_balance_sum,
solana::*, solana::*,
utils::{concat_path, make_static, stop_child, AgentHandles, ArbitraryData, TaskHandle}, utils::{concat_path, make_static, stop_child, AgentHandles, ArbitraryData, TaskHandle},
@ -454,6 +454,14 @@ fn main() -> ExitCode {
let loop_start = Instant::now(); let loop_start = Instant::now();
// give things a chance to fully start. // give things a chance to fully start.
sleep(Duration::from_secs(10)); sleep(Duration::from_secs(10));
if !post_startup_invariants(&checkpoints_dirs) {
log!("Failure: Post startup invariants are not met");
return report_test_result(true);
} else {
log!("Success: Post startup invariants are met");
}
let mut failure_occurred = false; let mut failure_occurred = false;
let starting_relayer_balance: f64 = agent_balance_sum(9092).unwrap(); let starting_relayer_balance: f64 = agent_balance_sum(9092).unwrap();
while !SHUTDOWN.load(Ordering::Relaxed) { while !SHUTDOWN.load(Ordering::Relaxed) {
@ -499,6 +507,10 @@ fn main() -> ExitCode {
sleep(Duration::from_secs(5)); sleep(Duration::from_secs(5));
} }
report_test_result(failure_occurred)
}
fn report_test_result(failure_occurred: bool) -> ExitCode {
if failure_occurred { if failure_occurred {
log!("E2E tests failed"); log!("E2E tests failed");
ExitCode::FAILURE ExitCode::FAILURE

Loading…
Cancel
Save