diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/polygon_zkevm_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/polygon_zkevm_view.ex index 051851bf0e..ccf6e33573 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/polygon_zkevm_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/polygon_zkevm_view.ex @@ -1,6 +1,7 @@ defmodule BlockScoutWeb.API.V2.PolygonZkevmView do use BlockScoutWeb, :view + alias Explorer.Chain.PolygonZkevm.Reader alias Explorer.Chain.Transaction @doc """ @@ -87,8 +88,8 @@ defmodule BlockScoutWeb.API.V2.PolygonZkevmView do decimals = cond do - not is_nil(Map.get(l1_token, :decimals)) -> Map.get(l1_token, :decimals) - not is_nil(Map.get(l2_token, :decimals)) -> Map.get(l2_token, :decimals) + not is_nil(Map.get(l1_token, :decimals)) -> Reader.sanitize_decimals(Map.get(l1_token, :decimals)) + not is_nil(Map.get(l2_token, :decimals)) -> Reader.sanitize_decimals(Map.get(l2_token, :decimals)) true -> env[:native_decimals] end diff --git a/apps/explorer/lib/explorer/chain/polygon_zkevm/reader.ex b/apps/explorer/lib/explorer/chain/polygon_zkevm/reader.ex index 60a8d2b8ed..209fb6719e 100644 --- a/apps/explorer/lib/explorer/chain/polygon_zkevm/reader.ex +++ b/apps/explorer/lib/explorer/chain/polygon_zkevm/reader.ex @@ -307,6 +307,18 @@ defmodule Explorer.Chain.PolygonZkevm.Reader do select_repo(options).aggregate(query, :count, timeout: :infinity) end + @doc """ + Filters token decimals value (cannot be greater than 0xFF). + """ + @spec sanitize_decimals(non_neg_integer()) :: non_neg_integer() + def sanitize_decimals(decimals) do + if decimals > 0xFF do + 0 + else + decimals + end + end + defp page_batches(query, %PagingOptions{key: nil}), do: query defp page_batches(query, %PagingOptions{key: {number}}) do diff --git a/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge.ex b/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge.ex index 2b24ffc3ee..17c77dc98b 100644 --- a/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge.ex +++ b/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge.ex @@ -30,8 +30,12 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do @bridge_event_params [{:uint, 8}, {:uint, 32}, :address, {:uint, 32}, :address, {:uint, 256}, :bytes, {:uint, 32}] # 32-byte signature of the event ClaimEvent(uint32 index, uint32 originNetwork, address originAddress, address destinationAddress, uint256 amount) - @claim_event "0x25308c93ceeed162da955b3f7ce3e3f93606579e40fb92029faa9efe27545983" - @claim_event_params [{:uint, 32}, {:uint, 32}, :address, :address, {:uint, 256}] + @claim_event_v1 "0x25308c93ceeed162da955b3f7ce3e3f93606579e40fb92029faa9efe27545983" + @claim_event_v1_params [{:uint, 32}, {:uint, 32}, :address, :address, {:uint, 256}] + + # 32-byte signature of the event ClaimEvent(uint256 globalIndex, uint32 originNetwork, address originAddress, address destinationAddress, uint256 amount) + @claim_event_v2 "0x1df3f2a973a00d6635911755c260704e95e8a5876997546798770f76396fda4d" + @claim_event_v2_params [{:uint, 256}, {:uint, 32}, :address, :address, {:uint, 256}] @symbol_method_selector "95d89b41" @decimals_method_selector "313ce567" @@ -65,7 +69,7 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do def filter_bridge_events(events, bridge_contract) do Enum.filter(events, fn event -> Helper.address_hash_to_string(event.address_hash, true) == bridge_contract and - Enum.member?([@bridge_event, @claim_event], Helper.log_topic_to_string(event.first_topic)) + Enum.member?([@bridge_event, @claim_event_v1, @claim_event_v2], Helper.log_topic_to_string(event.first_topic)) end) end @@ -80,7 +84,7 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do chunk_start, chunk_end, bridge_contract, - [[@bridge_event, @claim_event]], + [[@bridge_event, @claim_event_v1, @claim_event_v2]], json_rpc_named_arguments ) @@ -134,60 +138,82 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do Converts the list of zkEVM bridge events to the list of operations preparing them for importing to the database. """ - @spec prepare_operations(list(), list() | nil, list(), map() | nil) :: list() - def prepare_operations(events, json_rpc_named_arguments, json_rpc_named_arguments_l1, block_to_timestamp \\ nil) do + @spec prepare_operations( + list(), + non_neg_integer(), + non_neg_integer(), + non_neg_integer() | nil, + non_neg_integer(), + list() | nil, + list(), + map() | nil + ) :: + list() + def prepare_operations( + events, + rollup_network_id_l1, + rollup_network_id_l2, + rollup_index_l1, + rollup_index_l2, + json_rpc_named_arguments, + json_rpc_named_arguments_l1, + block_to_timestamp \\ nil + ) do + is_l1 = json_rpc_named_arguments == json_rpc_named_arguments_l1 + + events = filter_events(events, is_l1, rollup_network_id_l1, rollup_network_id_l2, rollup_index_l1, rollup_index_l2) + {block_to_timestamp, token_address_to_id} = if is_nil(block_to_timestamp) do + # this function is called by the catchup indexer, + # so here we can use RPC calls as it's not so critical for delays as in realtime bridge_events = Enum.filter(events, fn event -> event.first_topic == @bridge_event end) - - l1_token_addresses = - bridge_events - |> Enum.reduce(%MapSet{}, fn event, acc -> - case bridge_event_parse(event) do - {{nil, _}, _, _} -> acc - {{token_address, nil}, _, _} -> MapSet.put(acc, token_address) - end - end) - |> MapSet.to_list() + l1_token_addresses = l1_token_addresses_from_bridge_events(bridge_events, rollup_network_id_l2) { blocks_to_timestamps(bridge_events, json_rpc_named_arguments), token_addresses_to_ids(l1_token_addresses, json_rpc_named_arguments_l1) } else - # this is called in realtime + # this function is called in realtime by the transformer, + # so we don't use RPC calls to avoid delays and fetch token data + # in a separate fetcher {block_to_timestamp, %{}} end - Enum.map(events, fn event -> + events + |> Enum.map(fn event -> {index, l1_token_id, l1_token_address, l2_token_address, amount, block_number, block_timestamp} = - if event.first_topic == @bridge_event do - { - {l1_token_address, l2_token_address}, - amount, - deposit_count - } = bridge_event_parse(event) - - l1_token_id = Map.get(token_address_to_id, l1_token_address) - block_number = quantity_to_integer(event.block_number) - block_timestamp = Map.get(block_to_timestamp, block_number) - - # credo:disable-for-lines:2 Credo.Check.Refactor.Nesting - l1_token_address = - if is_nil(l1_token_id) do - l1_token_address - end - - {deposit_count, l1_token_id, l1_token_address, l2_token_address, amount, block_number, block_timestamp} - else - [index, _origin_network, _origin_address, _destination_address, amount] = - decode_data(event.data, @claim_event_params) - - {index, nil, nil, nil, amount, nil, nil} + case event.first_topic do + @bridge_event -> + { + {l1_token_address, l2_token_address}, + amount, + deposit_count, + _destination_network + } = bridge_event_parse(event, rollup_network_id_l2) + + l1_token_id = Map.get(token_address_to_id, l1_token_address) + block_number = quantity_to_integer(event.block_number) + block_timestamp = Map.get(block_to_timestamp, block_number) + + # credo:disable-for-lines:2 Credo.Check.Refactor.Nesting + l1_token_address = + if is_nil(l1_token_id) do + l1_token_address + end + + {deposit_count, l1_token_id, l1_token_address, l2_token_address, amount, block_number, block_timestamp} + + @claim_event_v1 -> + {index, amount} = claim_event_v1_parse(event) + {index, nil, nil, nil, amount, nil, nil} + + @claim_event_v2 -> + {_mainnet_bit, _rollup_idx, index, _origin_network, amount} = claim_event_v2_parse(event) + {index, nil, nil, nil, amount, nil, nil} end - is_l1 = json_rpc_named_arguments == json_rpc_named_arguments_l1 - result = %{ type: operation_type(event.first_topic, is_l1), index: index, @@ -221,19 +247,98 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do end) end - defp bridge_event_parse(event) do + defp bridge_event_parse(event, rollup_network_id_l2) do [ leaf_type, origin_network, origin_address, - _destination_network, + destination_network, _destination_address, amount, _metadata, deposit_count ] = decode_data(event.data, @bridge_event_params) - {token_address_by_origin_address(origin_address, origin_network, leaf_type), amount, deposit_count} + {token_address_by_origin_address(origin_address, origin_network, leaf_type, rollup_network_id_l2), amount, + deposit_count, destination_network} + end + + defp claim_event_v1_parse(event) do + [index, _origin_network, _origin_address, _destination_address, amount] = + decode_data(event.data, @claim_event_v1_params) + + {index, amount} + end + + defp claim_event_v2_parse(event) do + [global_index, origin_network, _origin_address, _destination_address, amount] = + decode_data(event.data, @claim_event_v2_params) + + mainnet_bit = Bitwise.band(Bitwise.bsr(global_index, 64), 1) + + bitmask_4bytes = 0xFFFFFFFF + + rollup_index = Bitwise.band(Bitwise.bsr(global_index, 32), bitmask_4bytes) + + index = Bitwise.band(global_index, bitmask_4bytes) + + {mainnet_bit, rollup_index, index, origin_network, amount} + end + + defp filter_events(events, is_l1, rollup_network_id_l1, rollup_network_id_l2, rollup_index_l1, rollup_index_l2) do + Enum.filter(events, fn event -> + case {event.first_topic, is_l1} do + {@bridge_event, true} -> filter_bridge_event_l1(event, rollup_network_id_l2) + {@bridge_event, false} -> filter_bridge_event_l2(event, rollup_network_id_l1, rollup_network_id_l2) + {@claim_event_v2, true} -> filter_claim_event_l1(event, rollup_index_l2) + {@claim_event_v2, false} -> filter_claim_event_l2(event, rollup_network_id_l1, rollup_index_l1) + _ -> true + end + end) + end + + defp filter_bridge_event_l1(event, rollup_network_id_l2) do + {_, _, _, destination_network} = bridge_event_parse(event, rollup_network_id_l2) + # skip the Deposit event if it's for another rollup + destination_network == rollup_network_id_l2 + end + + defp filter_bridge_event_l2(event, rollup_network_id_l1, rollup_network_id_l2) do + {_, _, _, destination_network} = bridge_event_parse(event, rollup_network_id_l2) + # skip the Withdrawal event if it's for another L1 chain + destination_network == rollup_network_id_l1 + end + + defp filter_claim_event_l1(event, rollup_index_l2) do + {mainnet_bit, rollup_idx, _index, _origin_network, _amount} = claim_event_v2_parse(event) + + if mainnet_bit != 0 do + Logger.error( + "L1 ClaimEvent has non-zero mainnet bit in the transaction #{event.transaction_hash}. This event will be ignored." + ) + end + + # skip the Withdrawal event if it's for another rollup or the source network is Ethereum Mainnet + rollup_idx == rollup_index_l2 and mainnet_bit == 0 + end + + defp filter_claim_event_l2(event, rollup_network_id_l1, rollup_index_l1) do + {mainnet_bit, rollup_idx, _index, origin_network, _amount} = claim_event_v2_parse(event) + + # skip the Deposit event if it's from another L1 chain + (mainnet_bit == 1 and rollup_network_id_l1 == 0) or + (mainnet_bit == 0 and (rollup_idx == rollup_index_l1 or origin_network == rollup_network_id_l1)) + end + + defp l1_token_addresses_from_bridge_events(events, rollup_network_id_l2) do + events + |> Enum.reduce(%MapSet{}, fn event, acc -> + case bridge_event_parse(event, rollup_network_id_l2) do + {{nil, _}, _, _, _} -> acc + {{token_address, nil}, _, _, _} -> MapSet.put(acc, token_address) + end + end) + |> MapSet.to_list() end defp operation_type(first_topic, is_l1) do @@ -291,11 +396,11 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do |> Map.merge(tokens_inserted_outside) end - defp token_address_by_origin_address(origin_address, origin_network, leaf_type) do - with true <- leaf_type != 1 and origin_network <= 1, + defp token_address_by_origin_address(origin_address, origin_network, leaf_type, rollup_network_id_l2) do + with true <- leaf_type != 1, token_address = "0x" <> Base.encode16(origin_address, case: :lower), true <- token_address != burn_address_hash_string() do - if origin_network == 0 do + if origin_network != rollup_network_id_l2 do # this is L1 address {token_address, nil} else @@ -392,7 +497,7 @@ defmodule Indexer.Fetcher.PolygonZkevm.Bridge do if atomized_key(request.method_id) == :symbol do Map.put(data, :symbol, response) else - Map.put(data, :decimals, response) + Map.put(data, :decimals, Reader.sanitize_decimals(response)) end end diff --git a/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge_l1.ex b/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge_l1.ex index 9899c70991..6e01b28d43 100644 --- a/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge_l1.ex +++ b/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge_l1.ex @@ -12,7 +12,7 @@ defmodule Indexer.Fetcher.PolygonZkevm.BridgeL1 do import Explorer.Helper, only: [parse_integer: 1] import Indexer.Fetcher.PolygonZkevm.Bridge, - only: [get_logs_all: 3, import_operations: 1, prepare_operations: 3] + only: [get_logs_all: 3, import_operations: 1, prepare_operations: 7] alias Explorer.Chain.PolygonZkevm.{Bridge, Reader} alias Explorer.Repo @@ -53,11 +53,18 @@ defmodule Indexer.Fetcher.PolygonZkevm.BridgeL1 do @impl GenServer def handle_info(:init_with_delay, _state) do env = Application.get_all_env(:indexer)[__MODULE__] + env_l2 = Application.get_all_env(:indexer)[Indexer.Fetcher.PolygonZkevm.BridgeL2] with {:start_block_undefined, false} <- {:start_block_undefined, is_nil(env[:start_block])}, {:reorg_monitor_started, true} <- {:reorg_monitor_started, !is_nil(Process.whereis(RollupL1ReorgMonitor))}, rpc = env[:rpc], {:rpc_undefined, false} <- {:rpc_undefined, is_nil(rpc)}, + {:rollup_network_id_l1_is_valid, true} <- + {:rollup_network_id_l1_is_valid, !is_nil(env[:rollup_network_id_l1]) and env[:rollup_network_id_l1] >= 0}, + {:rollup_network_id_l2_is_valid, true} <- + {:rollup_network_id_l2_is_valid, + !is_nil(env_l2[:rollup_network_id_l2]) and env_l2[:rollup_network_id_l2] > 0}, + {:rollup_index_l2_undefined, false} <- {:rollup_index_l2_undefined, is_nil(env_l2[:rollup_index_l2])}, {:bridge_contract_address_is_valid, true} <- {:bridge_contract_address_is_valid, Helper.address_correct?(env[:bridge_contract])}, start_block = parse_integer(env[:start_block]), @@ -80,7 +87,11 @@ defmodule Indexer.Fetcher.PolygonZkevm.BridgeL1 do bridge_contract: env[:bridge_contract], json_rpc_named_arguments: json_rpc_named_arguments, end_block: safe_block, - start_block: max(start_block, last_l1_block_number) + start_block: max(start_block, last_l1_block_number), + rollup_network_id_l1: env[:rollup_network_id_l1], + rollup_network_id_l2: env_l2[:rollup_network_id_l2], + rollup_index_l1: env[:rollup_index_l1], + rollup_index_l2: env_l2[:rollup_index_l2] }} else {:start_block_undefined, true} -> @@ -95,6 +106,27 @@ defmodule Indexer.Fetcher.PolygonZkevm.BridgeL1 do Logger.error("L1 RPC URL is not defined.") {:stop, :normal, %{}} + {:rollup_network_id_l1_is_valid, false} -> + Logger.error( + "Invalid network ID for L1. Please, check INDEXER_POLYGON_ZKEVM_L1_BRIDGE_NETWORK_ID env variable." + ) + + {:stop, :normal, %{}} + + {:rollup_network_id_l2_is_valid, false} -> + Logger.error( + "Invalid network ID for L2. Please, check INDEXER_POLYGON_ZKEVM_L2_BRIDGE_NETWORK_ID env variable." + ) + + {:stop, :normal, %{}} + + {:rollup_index_l2_undefined, true} -> + Logger.error( + "Rollup index is undefined for L2. Please, check INDEXER_POLYGON_ZKEVM_L2_BRIDGE_ROLLUP_INDEX env variable." + ) + + {:stop, :normal, %{}} + {:bridge_contract_address_is_valid, false} -> Logger.error("PolygonZkEVMBridge contract address is invalid or not defined.") {:stop, :normal, %{}} @@ -133,7 +165,11 @@ defmodule Indexer.Fetcher.PolygonZkevm.BridgeL1 do block_check_interval: block_check_interval, start_block: start_block, end_block: end_block, - json_rpc_named_arguments: json_rpc_named_arguments + json_rpc_named_arguments: json_rpc_named_arguments, + rollup_network_id_l1: rollup_network_id_l1, + rollup_network_id_l2: rollup_network_id_l2, + rollup_index_l1: rollup_index_l1, + rollup_index_l2: rollup_index_l2 } = state ) do time_before = Timex.now() @@ -151,7 +187,14 @@ defmodule Indexer.Fetcher.PolygonZkevm.BridgeL1 do operations = {chunk_start, chunk_end} |> get_logs_all(bridge_contract, json_rpc_named_arguments) - |> prepare_operations(json_rpc_named_arguments, json_rpc_named_arguments) + |> prepare_operations( + rollup_network_id_l1, + rollup_network_id_l2, + rollup_index_l1, + rollup_index_l2, + json_rpc_named_arguments, + json_rpc_named_arguments + ) import_operations(operations) diff --git a/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge_l2.ex b/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge_l2.ex index 2cd121881b..66bdba42d4 100644 --- a/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge_l2.ex +++ b/apps/indexer/lib/indexer/fetcher/polygon_zkevm/bridge_l2.ex @@ -12,10 +12,11 @@ defmodule Indexer.Fetcher.PolygonZkevm.BridgeL2 do import Explorer.Helper, only: [parse_integer: 1] import Indexer.Fetcher.PolygonZkevm.Bridge, - only: [get_logs_all: 3, import_operations: 1, prepare_operations: 3] + only: [get_logs_all: 3, import_operations: 1, prepare_operations: 7] alias Explorer.Chain.PolygonZkevm.{Bridge, Reader} alias Explorer.Repo + alias Indexer.Fetcher.PolygonZkevm.BridgeL1 alias Indexer.Helper @eth_get_logs_range_size 1000 @@ -53,10 +54,17 @@ defmodule Indexer.Fetcher.PolygonZkevm.BridgeL2 do @impl GenServer def handle_info(:init_with_delay, %{json_rpc_named_arguments: json_rpc_named_arguments} = state) do env = Application.get_all_env(:indexer)[__MODULE__] + env_l1 = Application.get_all_env(:indexer)[BridgeL1] with {:start_block_undefined, false} <- {:start_block_undefined, is_nil(env[:start_block])}, - rpc_l1 = Application.get_all_env(:indexer)[Indexer.Fetcher.PolygonZkevm.BridgeL1][:rpc], + rpc_l1 = env_l1[:rpc], {:rpc_l1_undefined, false} <- {:rpc_l1_undefined, is_nil(rpc_l1)}, + {:rollup_network_id_l1_is_valid, true} <- + {:rollup_network_id_l1_is_valid, + !is_nil(env_l1[:rollup_network_id_l1]) and env_l1[:rollup_network_id_l1] >= 0}, + {:rollup_network_id_l2_is_valid, true} <- + {:rollup_network_id_l2_is_valid, !is_nil(env[:rollup_network_id_l2]) and env[:rollup_network_id_l2] > 0}, + {:rollup_index_l2_undefined, false} <- {:rollup_index_l2_undefined, is_nil(env[:rollup_index_l2])}, {:bridge_contract_address_is_valid, true} <- {:bridge_contract_address_is_valid, Helper.address_correct?(env[:bridge_contract])}, start_block = parse_integer(env[:start_block]), @@ -77,7 +85,11 @@ defmodule Indexer.Fetcher.PolygonZkevm.BridgeL2 do json_rpc_named_arguments: json_rpc_named_arguments, json_rpc_named_arguments_l1: Helper.json_rpc_named_arguments(rpc_l1), end_block: latest_block, - start_block: max(start_block, last_l2_block_number) + start_block: max(start_block, last_l2_block_number), + rollup_network_id_l1: env_l1[:rollup_network_id_l1], + rollup_network_id_l2: env[:rollup_network_id_l2], + rollup_index_l1: env_l1[:rollup_index_l1], + rollup_index_l2: env[:rollup_index_l2] }} else {:start_block_undefined, true} -> @@ -88,6 +100,27 @@ defmodule Indexer.Fetcher.PolygonZkevm.BridgeL2 do Logger.error("L1 RPC URL is not defined.") {:stop, :normal, state} + {:rollup_network_id_l1_is_valid, false} -> + Logger.error( + "Invalid network ID for L1. Please, check INDEXER_POLYGON_ZKEVM_L1_BRIDGE_NETWORK_ID env variable." + ) + + {:stop, :normal, %{}} + + {:rollup_network_id_l2_is_valid, false} -> + Logger.error( + "Invalid network ID for L2. Please, check INDEXER_POLYGON_ZKEVM_L2_BRIDGE_NETWORK_ID env variable." + ) + + {:stop, :normal, %{}} + + {:rollup_index_l2_undefined, true} -> + Logger.error( + "Rollup index is undefined for L2. Please, check INDEXER_POLYGON_ZKEVM_L2_BRIDGE_ROLLUP_INDEX env variable." + ) + + {:stop, :normal, %{}} + {:bridge_contract_address_is_valid, false} -> Logger.error("PolygonZkEVMBridge contract address is invalid or not defined.") {:stop, :normal, state} @@ -124,7 +157,11 @@ defmodule Indexer.Fetcher.PolygonZkevm.BridgeL2 do start_block: start_block, end_block: end_block, json_rpc_named_arguments: json_rpc_named_arguments, - json_rpc_named_arguments_l1: json_rpc_named_arguments_l1 + json_rpc_named_arguments_l1: json_rpc_named_arguments_l1, + rollup_network_id_l1: rollup_network_id_l1, + rollup_network_id_l2: rollup_network_id_l2, + rollup_index_l1: rollup_index_l1, + rollup_index_l2: rollup_index_l2 } = state ) do start_block..end_block @@ -139,7 +176,14 @@ defmodule Indexer.Fetcher.PolygonZkevm.BridgeL2 do operations = {chunk_start, chunk_end} |> get_logs_all(bridge_contract, json_rpc_named_arguments) - |> prepare_operations(json_rpc_named_arguments, json_rpc_named_arguments_l1) + |> prepare_operations( + rollup_network_id_l1, + rollup_network_id_l2, + rollup_index_l1, + rollup_index_l2, + json_rpc_named_arguments, + json_rpc_named_arguments_l1 + ) import_operations(operations) diff --git a/apps/indexer/lib/indexer/transform/polygon_zkevm/bridge.ex b/apps/indexer/lib/indexer/transform/polygon_zkevm/bridge.ex index 4ee7fa4126..863b7153cd 100644 --- a/apps/indexer/lib/indexer/transform/polygon_zkevm/bridge.ex +++ b/apps/indexer/lib/indexer/transform/polygon_zkevm/bridge.ex @@ -6,7 +6,7 @@ defmodule Indexer.Transform.PolygonZkevm.Bridge do require Logger import Indexer.Fetcher.PolygonZkevm.Bridge, - only: [filter_bridge_events: 2, prepare_operations: 4] + only: [filter_bridge_events: 2, prepare_operations: 8] alias Indexer.Fetcher.PolygonZkevm.{BridgeL1, BridgeL2} alias Indexer.Helper @@ -24,6 +24,15 @@ defmodule Indexer.Transform.PolygonZkevm.Bridge do false <- Application.get_env(:explorer, :chain_type) != "polygon_zkevm", rpc_l1 = Application.get_all_env(:indexer)[BridgeL1][:rpc], {:rpc_l1_undefined, false} <- {:rpc_l1_undefined, is_nil(rpc_l1)}, + rollup_network_id_l1 = Application.get_all_env(:indexer)[BridgeL1][:rollup_network_id_l1], + rollup_network_id_l2 = Application.get_all_env(:indexer)[BridgeL2][:rollup_network_id_l2], + rollup_index_l1 = Application.get_all_env(:indexer)[BridgeL1][:rollup_index_l1], + rollup_index_l2 = Application.get_all_env(:indexer)[BridgeL2][:rollup_index_l2], + {:rollup_network_id_l1_is_valid, true} <- + {:rollup_network_id_l1_is_valid, !is_nil(rollup_network_id_l1) and rollup_network_id_l1 >= 0}, + {:rollup_network_id_l2_is_valid, true} <- + {:rollup_network_id_l2_is_valid, !is_nil(rollup_network_id_l2) and rollup_network_id_l2 > 0}, + {:rollup_index_l2_is_valid, true} <- {:rollup_index_l2_is_valid, !is_nil(rollup_index_l2)}, bridge_contract = Application.get_env(:indexer, BridgeL2)[:bridge_contract], {:bridge_contract_address_is_valid, true} <- {:bridge_contract_address_is_valid, Helper.address_correct?(bridge_contract)} do @@ -42,7 +51,15 @@ defmodule Indexer.Transform.PolygonZkevm.Bridge do items = logs |> filter_bridge_events(bridge_contract) - |> prepare_operations(nil, json_rpc_named_arguments_l1, block_to_timestamp) + |> prepare_operations( + rollup_network_id_l1, + rollup_network_id_l2, + rollup_index_l1, + rollup_index_l2, + nil, + json_rpc_named_arguments_l1, + block_to_timestamp + ) Helper.log_blocks_chunk_handling( start_block, @@ -62,6 +79,27 @@ defmodule Indexer.Transform.PolygonZkevm.Bridge do Logger.error("L1 RPC URL is not defined. Cannot use #{__MODULE__} for parsing logs.") [] + {:rollup_network_id_l1_is_valid, false} -> + Logger.error( + "Invalid network ID for L1. Please, check INDEXER_POLYGON_ZKEVM_L1_BRIDGE_NETWORK_ID env variable." + ) + + [] + + {:rollup_network_id_l2_is_valid, false} -> + Logger.error( + "Invalid network ID for L2. Please, check INDEXER_POLYGON_ZKEVM_L2_BRIDGE_NETWORK_ID env variable." + ) + + [] + + {:rollup_index_l2_is_valid, false} -> + Logger.error( + "Rollup index is undefined for L2. Please, check INDEXER_POLYGON_ZKEVM_L2_BRIDGE_ROLLUP_INDEX env variable." + ) + + [] + {:bridge_contract_address_is_valid, false} -> Logger.error( "PolygonZkEVMBridge contract address is invalid or not defined. Cannot use #{__MODULE__} for parsing logs." diff --git a/apps/indexer/lib/indexer/transform/transaction_actions.ex b/apps/indexer/lib/indexer/transform/transaction_actions.ex index eef2cd6c63..44d782e775 100644 --- a/apps/indexer/lib/indexer/transform/transaction_actions.ex +++ b/apps/indexer/lib/indexer/transform/transaction_actions.ex @@ -120,6 +120,9 @@ defmodule Indexer.Transform.TransactionActions do # 32-byte signature of the event Swap(address indexed sender, address indexed recipient, int256 amount0, int256 amount1, uint160 sqrtPriceX96, uint128 liquidity, int24 tick); @uniswap_v3_swap_event "0xc42079f94a6350d7e6235f29174924f928cc2ac818eb64fed8004e115fbcca67" + # max number of token decimals + @decimals_max 0xFF + @doc """ Returns a list of transaction actions given a list of logs. """ @@ -807,7 +810,7 @@ defmodule Indexer.Transform.TransactionActions do |> get_token_data_from_rpc() if Enum.any?(token_data, fn {_, token} -> - is_nil(token.symbol) or token.symbol == "" or is_nil(token.decimals) + Map.get(token, :symbol, "") == "" or Map.get(token, :decimals) > @decimals_max end) do false else @@ -862,7 +865,7 @@ defmodule Indexer.Transform.TransactionActions do new_data = %{symbol: symbol, decimals: decimals} - TransactionActionTokensData.put_to_cache(contract_address_hash, new_data) + put_to_cache(contract_address_hash, new_data) Map.put(token_data_acc, contract_address_hash, new_data) end) @@ -887,6 +890,12 @@ defmodule Indexer.Transform.TransactionActions do end end + defp put_to_cache(contract_address_hash, new_data) do + if Map.get(new_data, :decimals, 0) <= @decimals_max do + TransactionActionTokensData.put_to_cache(contract_address_hash, new_data) + end + end + defp get_token_data_from_rpc(token_data) do token_addresses = token_data @@ -911,7 +920,7 @@ defmodule Indexer.Transform.TransactionActions do new_data = get_new_data(data, request, response) - TransactionActionTokensData.put_to_cache(request.contract_address, new_data) + put_to_cache(request.contract_address, new_data) Map.put(token_data_acc, request.contract_address, new_data) else diff --git a/config/runtime.exs b/config/runtime.exs index 8f6c12b549..630aa1ca09 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -857,7 +857,9 @@ config :indexer, Indexer.Fetcher.PolygonZkevm.BridgeL1, start_block: System.get_env("INDEXER_POLYGON_ZKEVM_L1_BRIDGE_START_BLOCK"), bridge_contract: System.get_env("INDEXER_POLYGON_ZKEVM_L1_BRIDGE_CONTRACT"), native_symbol: System.get_env("INDEXER_POLYGON_ZKEVM_L1_BRIDGE_NATIVE_SYMBOL", "ETH"), - native_decimals: ConfigHelper.parse_integer_env_var("INDEXER_POLYGON_ZKEVM_L1_BRIDGE_NATIVE_DECIMALS", 18) + native_decimals: ConfigHelper.parse_integer_env_var("INDEXER_POLYGON_ZKEVM_L1_BRIDGE_NATIVE_DECIMALS", 18), + rollup_network_id_l1: ConfigHelper.parse_integer_or_nil_env_var("INDEXER_POLYGON_ZKEVM_L1_BRIDGE_NETWORK_ID"), + rollup_index_l1: ConfigHelper.parse_integer_or_nil_env_var("INDEXER_POLYGON_ZKEVM_L1_BRIDGE_ROLLUP_INDEX") config :indexer, Indexer.Fetcher.PolygonZkevm.BridgeL1.Supervisor, enabled: ConfigHelper.chain_type() == "polygon_zkevm" @@ -866,7 +868,9 @@ config :indexer, Indexer.Fetcher.PolygonZkevm.BridgeL1Tokens.Supervisor, config :indexer, Indexer.Fetcher.PolygonZkevm.BridgeL2, start_block: System.get_env("INDEXER_POLYGON_ZKEVM_L2_BRIDGE_START_BLOCK"), - bridge_contract: System.get_env("INDEXER_POLYGON_ZKEVM_L2_BRIDGE_CONTRACT") + bridge_contract: System.get_env("INDEXER_POLYGON_ZKEVM_L2_BRIDGE_CONTRACT"), + rollup_network_id_l2: ConfigHelper.parse_integer_or_nil_env_var("INDEXER_POLYGON_ZKEVM_L2_BRIDGE_NETWORK_ID"), + rollup_index_l2: ConfigHelper.parse_integer_or_nil_env_var("INDEXER_POLYGON_ZKEVM_L2_BRIDGE_ROLLUP_INDEX") config :indexer, Indexer.Fetcher.PolygonZkevm.BridgeL2.Supervisor, enabled: ConfigHelper.chain_type() == "polygon_zkevm" diff --git a/cspell.json b/cspell.json index c2dc8885d3..1019f0516b 100644 --- a/cspell.json +++ b/cspell.json @@ -18,6 +18,7 @@ "Asfpp", "Autodetection", "Autonity", + "bitmask", "Blockchair", "CALLCODE", "CBOR", diff --git a/docker-compose/envs/common-blockscout.env b/docker-compose/envs/common-blockscout.env index 7d4bf2ed20..9596c89247 100644 --- a/docker-compose/envs/common-blockscout.env +++ b/docker-compose/envs/common-blockscout.env @@ -193,8 +193,12 @@ INDEXER_DISABLE_INTERNAL_TRANSACTIONS_FETCHER=false # INDEXER_POLYGON_ZKEVM_L1_BRIDGE_CONTRACT= # INDEXER_POLYGON_ZKEVM_L1_BRIDGE_NATIVE_SYMBOL= # INDEXER_POLYGON_ZKEVM_L1_BRIDGE_NATIVE_DECIMALS= +# INDEXER_POLYGON_ZKEVM_L1_BRIDGE_NETWORK_ID= +# INDEXER_POLYGON_ZKEVM_L1_BRIDGE_ROLLUP_INDEX= # INDEXER_POLYGON_ZKEVM_L2_BRIDGE_START_BLOCK= # INDEXER_POLYGON_ZKEVM_L2_BRIDGE_CONTRACT= +# INDEXER_POLYGON_ZKEVM_L2_BRIDGE_NETWORK_ID= +# INDEXER_POLYGON_ZKEVM_L2_BRIDGE_ROLLUP_INDEX= # INDEXER_ZKSYNC_BATCHES_ENABLED= # INDEXER_ZKSYNC_BATCHES_CHUNK_SIZE= # INDEXER_ZKSYNC_NEW_BATCHES_MAX_RANGE=