Merge branch 'main' of github.com:abacus-network/abacus-monorepo into trevor/read-txs

trevor/read-txs-nov-8
Trevor Porter 3 weeks ago
commit 9eaa1559cc
  1. 4
      rust/main/agents/scraper/migration/bin/generate_entities.rs
  2. 9
      rust/main/agents/scraper/migration/src/m20230309_000003_create_table_transaction.rs
  3. 3
      rust/main/agents/scraper/src/db/generated/transaction.rs
  4. 1
      rust/main/agents/scraper/src/db/txn.rs
  5. 1
      rust/main/chains/hyperlane-cosmos/src/providers/cosmos/provider.rs
  6. 7
      rust/main/chains/hyperlane-ethereum/src/rpc_clients/provider.rs
  7. 1
      rust/main/chains/hyperlane-fuel/src/provider.rs
  8. 1
      rust/main/chains/hyperlane-sealevel/src/provider.rs
  9. 154
      rust/main/hyperlane-base/src/types/gcs_storage.rs
  10. 2
      rust/main/hyperlane-core/src/types/chain_data.rs
  11. 2
      typescript/infra/config/environments/mainnet3/agent.ts

@ -57,8 +57,8 @@ impl Drop for PostgresDockerContainer {
async fn main() -> Result<(), DbErr> {
assert_eq!(
std::env::current_dir().unwrap().file_name().unwrap(),
"rust",
"Must run from the rust dir"
"main",
"Must run from the rust/main dir"
);
let postgres = PostgresDockerContainer::start();

@ -52,6 +52,13 @@ impl MigrationTrait for Migration {
.col(ColumnDef::new_with_type(Transaction::Recipient, Address).borrow_mut())
.col(ColumnDef::new_with_type(Transaction::GasUsed, Wei).not_null())
.col(ColumnDef::new_with_type(Transaction::CumulativeGasUsed, Wei).not_null())
.col(
ColumnDef::new_with_type(
Transaction::RawInputData,
ColumnType::Binary(BlobSize::Blob(None)),
)
.borrow_mut(),
)
.foreign_key(
ForeignKey::create()
.from_col(Transaction::BlockId)
@ -128,4 +135,6 @@ pub enum Transaction {
GasUsed,
/// Cumulative gas used within the block after this was executed
CumulativeGasUsed,
/// Raw input data from Ethereum transaction
RawInputData,
}

@ -27,6 +27,7 @@ pub struct Model {
pub recipient: Option<Vec<u8>>,
pub gas_used: BigDecimal,
pub cumulative_gas_used: BigDecimal,
pub raw_input_data: Option<Vec<u8>>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
@ -45,6 +46,7 @@ pub enum Column {
Recipient,
GasUsed,
CumulativeGasUsed,
RawInputData,
}
#[derive(Copy, Clone, Debug, EnumIter, DerivePrimaryKey)]
@ -85,6 +87,7 @@ impl ColumnTrait for Column {
Self::Recipient => ColumnType::Binary(BlobSize::Blob(None)).def().null(),
Self::GasUsed => ColumnType::Decimal(Some((78u32, 0u32))).def(),
Self::CumulativeGasUsed => ColumnType::Decimal(Some((78u32, 0u32))).def(),
Self::RawInputData => ColumnType::Binary(BlobSize::Blob(None)).def().null(),
}
}
}

@ -94,6 +94,7 @@ impl ScraperDb {
recipient: Set(txn.recipient.as_ref().map(address_to_bytes)),
max_fee_per_gas: Set(txn.max_fee_per_gas.map(u256_to_decimal)),
cumulative_gas_used: Set(u256_to_decimal(receipt.cumulative_gas_used)),
raw_input_data: Set(txn.raw_input_data.clone()),
})
})
.collect::<Result<Vec<_>>>()?;

@ -441,6 +441,7 @@ impl HyperlaneProvider for CosmosProvider {
cumulative_gas_used: U256::from(response.tx_result.gas_used),
effective_gas_price: Some(gas_price),
}),
raw_input_data: None,
};
Ok(tx_info)

@ -106,7 +106,7 @@ where
})
.transpose()?;
Ok(TxnInfo {
let txn_info = TxnInfo {
hash: *hash,
max_fee_per_gas: txn.max_fee_per_gas.map(Into::into),
max_priority_fee_per_gas: txn.max_priority_fee_per_gas.map(Into::into),
@ -116,7 +116,10 @@ where
sender: txn.from.into(),
recipient: txn.to.map(Into::into),
receipt,
})
raw_input_data: Some(txn.input.to_vec()),
};
Ok(txn_info)
}
#[instrument(err, skip(self))]

@ -381,6 +381,7 @@ impl HyperlaneProvider for FuelProvider {
gas_price: Some(gas_price.into()),
recipient,
receipt: None,
raw_input_data: None,
})
}
None => Err(ChainCommunicationError::CustomError(format!(

@ -116,6 +116,7 @@ impl HyperlaneProvider for SealevelProvider {
sender: Default::default(),
recipient: None,
receipt: Some(receipt),
raw_input_data: None,
})
}

@ -4,6 +4,7 @@ use derive_new::new;
use eyre::{bail, Result};
use hyperlane_core::{ReorgEvent, SignedAnnouncement, SignedCheckpointWithMessageId};
use std::fmt;
use tracing::{error, info, instrument};
use ya_gcp::{
storage::{
api::{error::HttpStatusError, http::StatusCode, Error},
@ -16,6 +17,7 @@ const LATEST_INDEX_KEY: &str = "gcsLatestIndexKey";
const METADATA_KEY: &str = "gcsMetadataKey";
const ANNOUNCEMENT_KEY: &str = "gcsAnnouncementKey";
const REORG_FLAG_KEY: &str = "gcsReorgFlagKey";
/// Path to GCS users_secret file
pub const GCS_USER_SECRET: &str = "GCS_USER_SECRET";
/// Path to GCS Service account key
@ -80,12 +82,14 @@ pub struct GcsStorageClient {
inner: StorageClient,
// bucket name of this client's storage
bucket: String,
// folder name of this client's storage
folder: Option<String>,
}
impl GcsStorageClientBuilder {
/// Instantiates `ya_gcp:StorageClient` based on provided auth method
/// # Param
/// * `baucket_name` - String name of target bucket to work with, will be used by all store and get ops
/// * `bucket_name` - String name of target bucket to work with, will be used by all store and get ops
pub async fn build(
self,
bucket_name: impl Into<String>,
@ -94,21 +98,71 @@ impl GcsStorageClientBuilder {
let inner = ClientBuilder::new(ClientBuilderConfig::new().auth_flow(self.auth))
.await?
.build_storage_client();
let bucket = if let Some(folder) = folder {
format! {"{}/{}", bucket_name.into(), folder}
} else {
bucket_name.into()
};
Ok(GcsStorageClient { inner, bucket })
let bucket = bucket_name.into();
let mut processed_folder = folder;
if let Some(ref mut folder_str) = processed_folder {
if folder_str.ends_with('/') {
folder_str.truncate(folder_str.trim_end_matches('/').len());
info!(
"Trimmed trailing '/' from folder name. New folder: '{}'",
folder_str
);
}
}
GcsStorageClient::validate_bucket_name(&bucket)?;
Ok(GcsStorageClient {
inner,
bucket,
folder: processed_folder,
})
}
}
impl GcsStorageClient {
// convenience formatter
// Convenience formatter
fn get_checkpoint_key(index: u32) -> String {
format!("checkpoint_{index}_with_id.json")
}
fn object_path(&self, object_name: &str) -> String {
if let Some(folder) = &self.folder {
format!("{}/{}", folder, object_name)
} else {
object_name.to_string()
}
}
fn validate_bucket_name(bucket: &str) -> Result<()> {
if bucket.contains('/') {
error!("Bucket name '{}' has an invalid symbol '/'", bucket);
bail!("Bucket name '{}' has an invalid symbol '/'", bucket)
} else {
Ok(())
}
}
/// Uploads data to GCS and logs the result.
#[instrument(skip(self, data))]
async fn upload_and_log(&self, object_name: &str, data: Vec<u8>) -> Result<()> {
match self
.inner
.insert_object(&self.bucket, object_name, data)
.await
{
Ok(_) => {
info!("Successfully uploaded to '{}'", object_name);
Ok(())
}
Err(e) => {
error!("Failed to upload to '{}': {:?}", object_name, e);
Err(e.into())
}
}
}
// #test only method[s]
#[cfg(test)]
pub(crate) async fn get_by_path(&self, path: impl AsRef<str>) -> Result<()> {
@ -117,11 +171,12 @@ impl GcsStorageClient {
}
}
// required by `CheckpointSyncer`
// Required by `CheckpointSyncer`
impl fmt::Debug for GcsStorageClient {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("S3Storage")
f.debug_struct("GcsStorageClient")
.field("bucket", &self.bucket)
.field("folder", &self.folder)
.finish()
}
}
@ -129,6 +184,7 @@ impl fmt::Debug for GcsStorageClient {
#[async_trait]
impl CheckpointSyncer for GcsStorageClient {
/// Read the highest index of this Syncer
#[instrument(skip(self))]
async fn latest_index(&self) -> Result<Option<u32>> {
match self.inner.get_object(&self.bucket, LATEST_INDEX_KEY).await {
Ok(data) => Ok(Some(serde_json::from_slice(data.as_ref())?)),
@ -144,15 +200,14 @@ impl CheckpointSyncer for GcsStorageClient {
}
/// Writes the highest index of this Syncer
#[instrument(skip(self, index))]
async fn write_latest_index(&self, index: u32) -> Result<()> {
let d = serde_json::to_vec(&index)?;
self.inner
.insert_object(&self.bucket, LATEST_INDEX_KEY, d)
.await?;
Ok(())
let data = serde_json::to_vec(&index)?;
self.upload_and_log(LATEST_INDEX_KEY, data).await
}
/// Update the latest index of this syncer if necessary
#[instrument(skip(self, index))]
async fn update_latest_index(&self, index: u32) -> Result<()> {
let curr = self.latest_index().await?.unwrap_or(0);
if index > curr {
@ -162,6 +217,7 @@ impl CheckpointSyncer for GcsStorageClient {
}
/// Attempt to fetch the signed (checkpoint, messageId) tuple at this index
#[instrument(skip(self, index))]
async fn fetch_checkpoint(&self, index: u32) -> Result<Option<SignedCheckpointWithMessageId>> {
match self
.inner
@ -179,56 +235,64 @@ impl CheckpointSyncer for GcsStorageClient {
}
/// Write the signed (checkpoint, messageId) tuple to this syncer
#[instrument(skip(self, signed_checkpoint))]
async fn write_checkpoint(
&self,
signed_checkpoint: &SignedCheckpointWithMessageId,
) -> Result<()> {
self.inner
.insert_object(
&self.bucket,
GcsStorageClient::get_checkpoint_key(signed_checkpoint.value.index),
serde_json::to_vec(signed_checkpoint)?,
)
.await?;
Ok(())
let object_name = Self::get_checkpoint_key(signed_checkpoint.value.index);
let data = serde_json::to_vec(signed_checkpoint)?;
self.upload_and_log(&object_name, data).await
}
/// Write the agent metadata to this syncer
#[instrument(skip(self, metadata))]
async fn write_metadata(&self, metadata: &AgentMetadata) -> Result<()> {
let serialized_metadata = serde_json::to_string_pretty(metadata)?;
self.inner
.insert_object(&self.bucket, METADATA_KEY, serialized_metadata)
.await?;
Ok(())
let object_name = self.object_path(METADATA_KEY);
let data = serde_json::to_string_pretty(metadata)?.into_bytes();
self.upload_and_log(&object_name, data).await
}
/// Write the signed announcement to this syncer
async fn write_announcement(&self, signed_announcement: &SignedAnnouncement) -> Result<()> {
self.inner
.insert_object(
&self.bucket,
ANNOUNCEMENT_KEY,
serde_json::to_string(signed_announcement)?,
)
.await?;
Ok(())
#[instrument(skip(self, announcement))]
async fn write_announcement(&self, announcement: &SignedAnnouncement) -> Result<()> {
let object_name = self.object_path(ANNOUNCEMENT_KEY);
let data = serde_json::to_string(announcement)?.into_bytes();
self.upload_and_log(&object_name, data).await
}
/// Return the announcement storage location for this syncer
#[instrument(skip(self))]
fn announcement_location(&self) -> String {
format!("gs://{}/{}", &self.bucket, ANNOUNCEMENT_KEY)
let location = format!(
"gs://{}/{}",
&self.bucket,
self.object_path(ANNOUNCEMENT_KEY)
);
info!("Announcement storage location: '{}'", location);
location
}
async fn write_reorg_status(&self, reorged_event: &ReorgEvent) -> Result<()> {
let serialized_metadata = serde_json::to_string_pretty(reorged_event)?;
self.inner
.insert_object(&self.bucket, REORG_FLAG_KEY, serialized_metadata)
.await?;
Ok(())
/// Write the reorg status to this syncer
#[instrument(skip(self, reorg_event))]
async fn write_reorg_status(&self, reorg_event: &ReorgEvent) -> Result<()> {
let object_name = REORG_FLAG_KEY;
let data = serde_json::to_string_pretty(reorg_event)?.into_bytes();
self.upload_and_log(object_name, data).await
}
/// Read the reorg status from this syncer
#[instrument(skip(self))]
async fn reorg_status(&self) -> Result<Option<ReorgEvent>> {
Ok(None)
match self.inner.get_object(&self.bucket, REORG_FLAG_KEY).await {
Ok(data) => Ok(Some(serde_json::from_slice(data.as_ref())?)),
Err(e) => match e {
ObjectError::Failure(Error::HttpStatus(HttpStatusError(StatusCode::NOT_FOUND))) => {
Ok(None)
}
_ => bail!(e),
},
}
}
}

@ -49,6 +49,8 @@ pub struct TxnInfo {
/// If the txn has been processed, we can also report some additional
/// information.
pub receipt: Option<TxnReceiptInfo>,
/// Raw input data of a transaction
pub raw_input_data: Option<Vec<u8>>,
}
/// Information about the execution of a transaction.

@ -484,7 +484,7 @@ const hyperlane: RootAgentConfig = {
rpcConsensusType: RpcConsensusType.Fallback,
docker: {
repo,
tag: '45399a3-20241025-210128',
tag: '38bd1ae-20241031-125333',
},
resources: scraperResources,
},

Loading…
Cancel
Save