fix: review refactor

pull/9168/head
Kirill Fedoseev 10 months ago
parent b24b20cc97
commit 15e827e69c
  1. 2
      .dialyzer-ignore
  2. 1
      CHANGELOG.md
  3. 7
      apps/block_scout_web/lib/block_scout_web/controllers/api/v2/blob_controller.ex
  4. 11
      apps/block_scout_web/lib/block_scout_web/views/api/v2/blob_view.ex
  5. 5
      apps/block_scout_web/lib/block_scout_web/views/api/v2/transaction_view.ex
  6. 1
      apps/explorer/lib/explorer/chain.ex
  7. 14
      apps/explorer/lib/explorer/chain/beacon/blob.ex
  8. 82
      apps/explorer/lib/explorer/chain/beacon/reader.ex
  9. 2
      apps/explorer/lib/explorer/chain/import/runner/beacon/blob_transactions.ex
  10. 4
      apps/explorer/test/support/factory.ex
  11. 22
      apps/indexer/lib/indexer/fetcher/beacon/blob.ex
  12. 3
      apps/indexer/lib/indexer/fetcher/beacon/client.ex
  13. 12
      config/runtime.exs
  14. 7
      docker-compose/envs/common-blockscout.env

@ -23,4 +23,4 @@ lib/indexer/fetcher/zkevm/transaction_batch.ex:156
lib/indexer/fetcher/zkevm/transaction_batch.ex:252
lib/block_scout_web/views/api/v2/transaction_view.ex:431
lib/block_scout_web/views/api/v2/transaction_view.ex:472
lib/explorer/chain/transaction.ex:170
lib/explorer/chain/transaction.ex:171

@ -4,6 +4,7 @@
### Features
- [#9168](https://github.com/blockscout/blockscout/pull/9168) - Support EIP4844 blobs indexing & API
- [#9155](https://github.com/blockscout/blockscout/pull/9155) - Allow bypassing avg block time in proxy implementation re-fetch ttl calculation
- [#9131](https://github.com/blockscout/blockscout/pull/9131) - Merge addresses stage with address referencing
- [#9072](https://github.com/blockscout/blockscout/pull/9072) - Add tracing by block logic for geth

@ -1,13 +1,6 @@
defmodule BlockScoutWeb.API.V2.BlobController do
use BlockScoutWeb, :controller
import BlockScoutWeb.Chain,
only: [
next_page_params: 3,
paging_options: 1,
split_list_by_page: 1
]
alias Explorer.Chain
alias Explorer.Chain.Beacon.Reader

@ -1,7 +1,6 @@
defmodule BlockScoutWeb.API.V2.BlobView do
use BlockScoutWeb, :view
alias BlockScoutWeb.API.V2.Helper
alias Explorer.Chain.Beacon.Blob
def render("blob.json", %{blob: blob, transaction_hashes: transaction_hashes}) do
@ -20,13 +19,9 @@ defmodule BlockScoutWeb.API.V2.BlobView do
def prepare_blob(blob) do
%{
"hash" => blob.hash,
"blob_data" => encode_binary(blob.blob_data),
"kzg_commitment" => encode_binary(blob.kzg_commitment),
"kzg_proof" => encode_binary(blob.kzg_proof)
"blob_data" => blob.blob_data,
"kzg_commitment" => blob.kzg_commitment,
"kzg_proof" => blob.kzg_proof
}
end
defp encode_binary(binary) do
"0x" <> Base.encode16(binary, case: :lower)
end
end

@ -510,7 +510,7 @@ defmodule BlockScoutWeb.API.V2.TransactionView do
|> Map.put(
"execution_node",
Helper.address_with_info(
single_tx? && conn,
conn,
transaction.execution_node,
transaction.execution_node_hash,
single_tx?,
@ -522,7 +522,7 @@ defmodule BlockScoutWeb.API.V2.TransactionView do
"nonce" => transaction.wrapped_nonce,
"to" =>
Helper.address_with_info(
single_tx? && conn,
conn,
transaction.wrapped_to_address,
transaction.wrapped_to_address_hash,
single_tx?,
@ -783,6 +783,7 @@ defmodule BlockScoutWeb.API.V2.TransactionView do
def tx_types(tx, types \\ [], stage \\ :blob_transaction)
def tx_types(%Transaction{type: type} = tx, types, :blob_transaction) do
# EIP-2718 blob transaction type
types =
if type == 3 do
[:blob_transaction | types]

@ -5067,6 +5067,7 @@ defmodule Explorer.Chain do
end
def filter_blob_transaction_dynamic(dynamic) do
# EIP-2718 blob transaction type
dynamic([tx], ^dynamic or tx.type == 3)
end

@ -3,22 +3,22 @@ defmodule Explorer.Chain.Beacon.Blob do
use Explorer.Schema
alias Explorer.Chain.Hash
alias Explorer.Chain.{Data, Hash}
@required_attrs ~w(hash blob_data kzg_commitment kzg_proof)a
@type t :: %__MODULE__{
hash: Hash.t(),
blob_data: binary(),
kzg_commitment: binary(),
kzg_proof: binary()
blob_data: Data.t(),
kzg_commitment: Data.t(),
kzg_proof: Data.t()
}
@primary_key {:hash, Hash.Full, autogenerate: false}
schema "beacon_blobs" do
field(:blob_data, :binary)
field(:kzg_commitment, :binary)
field(:kzg_proof, :binary)
field(:blob_data, Data)
field(:kzg_commitment, Data)
field(:kzg_proof, Data)
timestamps(updated_at: false)
end

@ -4,7 +4,7 @@ defmodule Explorer.Chain.Beacon.Reader do
import Ecto.Query,
only: [
subquery: 1,
preload: 2,
distinct: 3,
from: 2,
limit: 2,
order_by: 3,
@ -16,10 +16,11 @@ defmodule Explorer.Chain.Beacon.Reader do
import Explorer.Chain, only: [select_repo: 1]
alias Explorer.{Chain, Repo}
alias Explorer.Chain.{DenormalizationHelper, Hash, Transaction}
alias Explorer.Chain.Beacon.{Blob, BlobTransaction}
alias Explorer.{Chain, PagingOptions, Repo}
alias Explorer.Chain.{Hash, Transaction}
@spec blob(Hash.Full.t(), [Chain.api?()]) :: {:error, :not_found} | {:ok, Blob.t()}
def blob(hash, options) when is_list(options) do
Blob
|> where(hash: ^hash)
@ -30,20 +31,48 @@ defmodule Explorer.Chain.Beacon.Reader do
end
end
@spec blob_hash_to_transactions(Hash.Full.t(), [Chain.api?()]) :: [
%{
block_consensus: boolean(),
transaction_hash: Hash.Full.t()
}
]
def blob_hash_to_transactions(hash, options) when is_list(options) do
BlobTransaction
|> where(type(^hash, Hash.Full) == fragment("any(blob_versioned_hashes)"))
|> join(:inner, [bt], transaction in Transaction, on: bt.hash == transaction.hash)
|> order_by([bt, transaction], desc: transaction.block_consensus, desc: transaction.block_number)
|> limit(10)
|> select([bt, transaction], %{
block_consensus: transaction.block_consensus,
transaction_hash: transaction.hash
})
|> select_repo(options).all()
query =
BlobTransaction
|> where(type(^hash, Hash.Full) == fragment("any(blob_versioned_hashes)"))
|> join(:inner, [bt], transaction in Transaction, on: bt.hash == transaction.hash)
|> order_by([bt, transaction], desc: transaction.block_consensus, desc: transaction.block_number)
|> limit(10)
query_with_denormalization =
if DenormalizationHelper.denormalization_finished?() do
query
|> select([bt, transaction], %{
block_consensus: transaction.block_consensus,
transaction_hash: transaction.hash
})
else
query
|> join(:inner, [bt, transaction], block in Block, on: block.hash == transaction.block_hash)
|> select([bt, transaction, block], %{
block_consensus: block.consensus,
transaction_hash: transaction.hash
})
end
query_with_denormalization |> select_repo(options).all()
end
def stream_missed_blob_transactions_timestamps(min_block, max_block, initial, reducer, options \\ [])
@spec stream_missed_blob_transactions_timestamps(
initial :: accumulator,
reducer :: (entry :: Hash.Address.t(), accumulator -> accumulator),
min_block :: integer() | nil,
max_block :: integer() | nil,
options :: []
) :: {:ok, accumulator}
when accumulator: term()
def stream_missed_blob_transactions_timestamps(initial, reducer, min_block, max_block, options \\ [])
when is_list(options) do
query =
from(
@ -58,23 +87,34 @@ defmodule Explorer.Chain.Beacon.Reader do
),
inner_join: transaction in Transaction,
on: transaction_blob.transaction_hash == transaction.hash,
# EIP-2718 blob transaction type
where: transaction.type == 3,
left_join: blob in Blob,
on: blob.hash == transaction_blob.blob_hash,
where: is_nil(blob.hash),
distinct: transaction.block_timestamp,
select: transaction.block_timestamp
where: is_nil(blob.hash)
)
query
query_with_denormalization =
if DenormalizationHelper.denormalization_finished?() do
query
|> distinct([transaction_blob, transaction, blob], transaction.block_timestamp)
|> select([transaction_blob, transaction, blob], transaction.block_timestamp)
else
query
|> join(:inner, [transaction_blob, transaction, blob], block in Block, on: block.hash == transaction.block_hash)
|> distinct([transaction_blob, transaction, blob, block], block.timestamp)
|> select([transaction_blob, transaction, blob, block], block.timestamp)
end
query_with_denormalization
|> add_min_block_filter(min_block)
|> add_max_block_filter(min_block)
|> add_max_block_filter(max_block)
|> Repo.stream_reduce(initial, reducer)
end
defp add_min_block_filter(query, block_number) do
if is_integer(block_number) do
query |> where([_, transaction], transaction.block_number <= ^block_number)
query |> where([_, transaction], transaction.block_number >= ^block_number)
else
query
end
@ -82,7 +122,7 @@ defmodule Explorer.Chain.Beacon.Reader do
defp add_max_block_filter(query, block_number) do
if is_integer(block_number) and block_number > 0 do
query |> where([_, transaction], transaction.block_number >= ^block_number)
query |> where([_, transaction], transaction.block_number <= ^block_number)
else
query
end

@ -9,7 +9,7 @@ defmodule Explorer.Chain.Import.Runner.Beacon.BlobTransactions do
alias Explorer.Chain.Beacon.BlobTransaction
alias Ecto.{Multi, Repo}
alias Explorer.Chain.{Block, Hash, Import}
alias Explorer.Chain.{Hash, Import}
alias Explorer.Prometheus.Instrumenter
@behaviour Import.Runner

@ -65,8 +65,8 @@ defmodule Explorer.Factory do
end
def auth_factory do
%{
info: %{
%Auth{
info: %Info{
birthday: nil,
description: nil,
email: sequence(:email, &"test_user-#{&1}@blockscout.com"),

@ -61,14 +61,14 @@ defmodule Indexer.Fetcher.Beacon.Blob do
def init(initial, reducer, state) do
{:ok, final} =
Reader.stream_missed_blob_transactions_timestamps(
state.start_block,
state.end_block,
initial,
fn fields, acc ->
fields
|> entry()
|> reducer.(acc)
end
end,
state.start_block,
state.end_block
)
final
@ -91,13 +91,13 @@ defmodule Indexer.Fetcher.Beacon.Blob do
|> Enum.map(&timestamp_to_slot(&1, state))
|> Client.get_blob_sidecars()
|> case do
{:ok, fetched_blobs, retries} ->
{:ok, fetched_blobs, retry_indices} ->
run_fetched_blobs(fetched_blobs)
if Enum.empty?(retries) do
if Enum.empty?(retry_indices) do
:ok
else
{:retry, retries |> Enum.map(&Enum.at(entries, &1))}
{:retry, retry_indices |> Enum.map(&Enum.at(entries, &1))}
end
end
end
@ -123,7 +123,7 @@ defmodule Indexer.Fetcher.Beacon.Blob do
Repo.insert_all(Blob, blobs, on_conflict: :nothing, conflict_target: [:hash])
end
def blob_entry(%{
defp blob_entry(%{
"blob" => blob,
"kzg_commitment" => kzg_commitment,
"kzg_proof" => kzg_proof
@ -134,13 +134,13 @@ defmodule Indexer.Fetcher.Beacon.Blob do
%{
hash: blob_hash(kzg_commitment.bytes),
blob_data: blob.bytes,
kzg_commitment: kzg_commitment.bytes,
kzg_proof: kzg_proof.bytes
blob_data: blob,
kzg_commitment: kzg_commitment,
kzg_proof: kzg_proof
}
end
def blob_hash(kzg_commitment) do
defp blob_hash(kzg_commitment) do
raw_hash = :crypto.hash(:sha256, kzg_commitment)
<<_::size(8), rest::binary>> = raw_hash
{:ok, hash} = Hash.Full.cast(<<1>> <> rest)

@ -31,6 +31,7 @@ defmodule Indexer.Fetcher.Beacon.Client do
end
end
@spec get_blob_sidecars([integer()]) :: {:ok, list(), [integer()]}
def get_blob_sidecars(slots) when is_list(slots) do
{oks, errors_with_retries} =
slots
@ -53,6 +54,7 @@ defmodule Indexer.Fetcher.Beacon.Client do
{:ok, oks |> Enum.map(fn {_, blob} -> blob end), retries}
end
@spec get_blob_sidecars(integer()) :: {:error, any()} | {:ok, any()}
def get_blob_sidecars(slot) do
http_get_request(blob_sidecars_url(slot))
end
@ -63,6 +65,7 @@ defmodule Indexer.Fetcher.Beacon.Client do
defp successful?({:ok, _}), do: true
defp successful?(_), do: false
@spec get_header(integer()) :: {:error, any()} | {:ok, any()}
def get_header(slot) do
http_get_request(header_url(slot))
end

@ -684,15 +684,15 @@ config :indexer, Indexer.Fetcher.Beacon, beacon_rpc: System.get_env("INDEXER_BEA
config :indexer, Indexer.Fetcher.Beacon.Blob.Supervisor,
disabled?:
ConfigHelper.chain_type() != "ethereum" ||
ConfigHelper.parse_bool_env_var("INDEXER_DISABLE_BEACON_BLOB_SANITIZE_FETCHER")
ConfigHelper.parse_bool_env_var("INDEXER_DISABLE_BEACON_BLOB_FETCHER")
config :indexer, Indexer.Fetcher.Beacon.Blob,
slot_duration: ConfigHelper.parse_integer_env_var("INDEXER_BEACON_BLOB_SANITIZE_FETCHER_SLOT_DURATION", 12),
reference_slot: ConfigHelper.parse_integer_env_var("INDEXER_BEACON_BLOB_SANITIZE_FETCHER_REFERENCE_SLOT", 8_206_822),
slot_duration: ConfigHelper.parse_integer_env_var("INDEXER_BEACON_BLOB_FETCHER_SLOT_DURATION", 12),
reference_slot: ConfigHelper.parse_integer_env_var("INDEXER_BEACON_BLOB_FETCHER_REFERENCE_SLOT", 8_206_822),
reference_timestamp:
ConfigHelper.parse_integer_env_var("INDEXER_BEACON_BLOB_SANITIZE_FETCHER_REFERENCE_TIMESTAMP", 1_705_305_887),
start_block: ConfigHelper.parse_integer_env_var("INDEXER_BEACON_BLOB_SANITIZE_FETCHER_START_BLOCK", 8_206_822),
end_block: ConfigHelper.parse_integer_env_var("INDEXER_BEACON_BLOB_SANITIZE_FETCHER_END_BLOCK", 0)
ConfigHelper.parse_integer_env_var("INDEXER_BEACON_BLOB_FETCHER_REFERENCE_TIMESTAMP", 1_705_305_887),
start_block: ConfigHelper.parse_integer_env_var("INDEXER_BEACON_BLOB_FETCHER_START_BLOCK", 8_206_822),
end_block: ConfigHelper.parse_integer_env_var("INDEXER_BEACON_BLOB_FETCHER_END_BLOCK", 0)
Code.require_file("#{config_env()}.exs", "config/runtime")

@ -181,6 +181,13 @@ INDEXER_DISABLE_INTERNAL_TRANSACTIONS_FETCHER=false
# INDEXER_ROOTSTOCK_DATA_FETCHER_BATCH_SIZE=
# INDEXER_ROOTSTOCK_DATA_FETCHER_CONCURRENCY=
# INDEXER_ROOTSTOCK_DATA_FETCHER_DB_BATCH_SIZE=
# INDEXER_BEACON_RPC_URL=
# INDEXER_DISABLE_BEACON_BLOB_FETCHER=
# INDEXER_BEACON_BLOB_FETCHER_SLOT_DURATION=12
# INDEXER_BEACON_BLOB_FETCHER_REFERENCE_SLOT=8206822
# INDEXER_BEACON_BLOB_FETCHER_REFERENCE_TIMESTAMP=1705305887
# INDEXER_BEACON_BLOB_FETCHER_START_BLOCK=8206822
# INDEXER_BEACON_BLOB_FETCHER_END_BLOCK=0
# TOKEN_ID_MIGRATION_FIRST_BLOCK=
# TOKEN_ID_MIGRATION_CONCURRENCY=
# TOKEN_ID_MIGRATION_BATCH_SIZE=

Loading…
Cancel
Save