@ -11,7 +11,7 @@ defmodule Indexer.BlockFetcher do
alias EthereumJSONRPC
alias Explorer.Chain
alias Indexer . { BalanceFetcher , AddressExtraction , InternalTransactionFetcher , Sequence }
alias Indexer . { BalanceFetcher , AddressExtraction , BoundInterval , InternalTransactionFetcher , Sequence }
# dialyzer thinks that Logger.debug functions always have no_local_return
@dialyzer { :nowarn_function , import_range : 3 }
@ -60,6 +60,17 @@ defmodule Indexer.BlockFetcher do
GenServer . start_link ( __MODULE__ , opts , name : __MODULE__ )
end
defstruct json_rpc_named_arguments : [ ] ,
catchup_task : nil ,
catchup_block_number : nil ,
catchup_bound_interval : nil ,
realtime_tasks : [ ] ,
realtime_interval : nil ,
blocks_batch_size : @blocks_batch_size ,
blocks_concurrency : @blocks_concurrency ,
receipts_batch_size : @receipts_batch_size ,
receipts_concurrency : @receipts_concurrency
@impl GenServer
def init ( opts ) do
opts =
@ -67,48 +78,81 @@ defmodule Indexer.BlockFetcher do
|> Application . get_all_env ( )
|> Keyword . merge ( opts )
state = %{
interval = div ( opts [ :block_interval ] || @block_interval , 2 )
state = % __MODULE__ {
json_rpc_named_arguments : Keyword . fetch! ( opts , :json_rpc_named_arguments ) ,
catchup_task : nil ,
realtime_tasks : [ ] ,
realtime_interval : div ( opts [ :block_interval ] || @block_interval , 2 ) ,
catchup_bound_interval : BoundInterval . within ( interval . . ( interval * 10 ) ) ,
realtime_interval : interval ,
blocks_batch_size : Keyword . get ( opts , :blocks_batch_size , @blocks_batch_size ) ,
blocks_concurrency : Keyword . get ( opts , :blocks_concurrency , @blocks_concurrency ) ,
receipts_batch_size : Keyword . get ( opts , :receipts_batch_size , @receipts_batch_size ) ,
receipts_concurrency : Keyword . get ( opts , :receipts_concurrency , @receipts_concurrency )
}
send ( self ( ) , :catchup_index )
{ :ok , _ } = :timer . send_interval ( state . realtime_interval , :realtime_index )
{ :ok , schedule_next_catchup_index ( s tate ) }
{ :ok , state }
end
@impl GenServer
def handle_info ( :catchup_index , % {} = state ) do
def handle_info ( :catchup_index , % __MODULE__ { } = state ) do
catchup_task = Task.Supervisor . async_nolink ( Indexer.TaskSupervisor , fn -> catchup_task ( state ) end )
{ :noreply , %{ state | catchup_task : catchup_task } }
end
def handle_info ( :realtime_index , % {realtime_tasks : realtime_tasks } = state ) when is_list ( realtime_tasks ) do
def handle_info ( :realtime_index , % __MODULE__ { realtime_tasks : realtime_tasks } = state ) when is_list ( realtime_tasks ) do
realtime_task = Task.Supervisor . async_nolink ( Indexer.TaskSupervisor , fn -> realtime_task ( state ) end )
{ :noreply , %{ state | realtime_tasks : [ realtime_task | realtime_tasks ] } }
end
def handle_info ( { :DOWN , ref , :process , pid , :normal } , %{ catchup_task : % Task { pid : pid , ref : ref } } = state ) do
Logger . info ( fn -> " Finished index down to genesis. Transitioning to only realtime index. " end )
def handle_info (
{ ref , missing_block_count } ,
% __MODULE__ {
catchup_block_number : catchup_block_number ,
catchup_bound_interval : catchup_bound_interval ,
catchup_task : % Task { ref : ref }
} = state
)
when is_integer ( missing_block_count ) do
new_catchup_bound_interval =
case missing_block_count do
0 ->
Logger . info ( " Index already caught up in #{ catchup_block_number } -0 " )
BoundInterval . increase ( catchup_bound_interval )
_ ->
Logger . info ( " Index had to catch up #{ missing_block_count } blocks in #{ catchup_block_number } -0 " )
BoundInterval . decrease ( catchup_bound_interval )
end
Process . demonitor ( ref , [ :flush ] )
interval = new_catchup_bound_interval . current
Logger . info ( fn ->
" Checking if index needs to catch up in #{ interval } ms "
end )
Process . send_after ( self ( ) , :catchup_index , interval )
{ :noreply , %{ state | catchup_task : nil } }
{ :noreply , %{ state | catchup_bound_interval : new_catchup_bound_interval , catchup_ task : nil } }
end
def handle_info ( { :DOWN , ref , :process , pid , reason } , %{ catchup_task : % Task { pid : pid , ref : ref } } = state ) do
Logger . error ( fn -> " catchup index stream exited with reason ( #{ inspect ( reason ) } ). Restarting " end )
def handle_info ( { :DOWN , ref , :process , pid , reason } , % __MODULE__ { catchup_task : % Task { pid : pid , ref : ref } } = state ) do
Logger . error ( fn -> " C atchup index stream exited with reason (#{ inspect ( reason ) } ). Restarting " end )
{ :noreply , schedule_next_catchup_index ( %{ state | catchup_task : nil } ) }
send ( self ( ) , :catchup_index )
{ :noreply , % __MODULE__ { state | catchup_task : nil } }
end
def handle_info ( { :DOWN , ref , :process , pid , reason } , %{ realtime_tasks : realtime_tasks } = state )
def handle_info ( { :DOWN , ref , :process , pid , reason } , % __MODULE__ { realtime_tasks : realtime_tasks } = state )
when is_list ( realtime_tasks ) do
{ down_realtime_tasks , running_realtime_tasks } =
Enum . split_with ( realtime_tasks , fn
@ -127,7 +171,7 @@ defmodule Indexer.BlockFetcher do
Logger . error ( fn -> " Unexpected pid ( #{ inspect ( pid ) } ) exited with reason ( #{ inspect ( reason ) } ). " end )
end
{ :noreply , % {state | realtime_tasks : running_realtime_tasks } }
{ :noreply , % __MODULE__ { state | realtime_tasks : running_realtime_tasks } }
end
defp cap_seq ( seq , next , range ) do
@ -145,9 +189,12 @@ defmodule Indexer.BlockFetcher do
:ok
end
defp fetch_transaction_receipts ( _state , [ ] ) , do : { :ok , %{ logs : [ ] , receipts : [ ] } }
defp fetch_transaction_receipts ( % __MODULE__ { } = _state , [ ] ) , do : { :ok , %{ logs : [ ] , receipts : [ ] } }
defp fetch_transaction_receipts ( %{ json_rpc_named_arguments : json_rpc_named_arguments } = state , transaction_params ) do
defp fetch_transaction_receipts (
% __MODULE__ { json_rpc_named_arguments : json_rpc_named_arguments } = state ,
transaction_params
) do
debug ( fn -> " fetching #{ length ( transaction_params ) } transaction receipts " end )
stream_opts = [ max_concurrency : state . receipts_concurrency , timeout : :infinity ]
@ -166,17 +213,42 @@ defmodule Indexer.BlockFetcher do
end )
end
defp catchup_task ( %{ json_rpc_named_arguments : json_rpc_named_arguments } = state ) do
# Returns number of missing blocks that had to be caught up
defp catchup_task ( % __MODULE__ { json_rpc_named_arguments : json_rpc_named_arguments } = state ) do
{ :ok , latest_block_number } = EthereumJSONRPC . fetch_block_number_by_tag ( " latest " , json_rpc_named_arguments )
missing_ranges = Chain . missing_block_number_ranges ( latest_block_number . . 0 )
count = Enum . count ( missing_ranges )
debug ( fn -> " #{ count } missed block ranges between #{ latest_block_number } and genesis " end )
case latest_block_number do
# let realtime indexer get the genesis block
0 ->
0
_ ->
# realtime indexer gets the current latest block
first = latest_block_number - 1
last = 0
missing_ranges = Chain . missing_block_number_ranges ( first . . last )
range_count = Enum . count ( missing_ranges )
missing_block_count =
missing_ranges
|> Stream . map ( & Enum . count / 1 )
|> Enum . sum ( )
debug ( fn -> " #{ missing_block_count } missed blocks in #{ range_count } ranges between #{ first } and #{ last } " end )
{ :ok , seq } = Sequence . start_link ( ranges : missing_ranges , step : - 1 * state . blocks_batch_size )
Sequence . cap ( seq )
case missing_block_count do
0 ->
:ok
stream_import ( state , seq , max_concurrency : state . blocks_concurrency )
_ ->
{ :ok , seq } = Sequence . start_link ( ranges : missing_ranges , step : - 1 * state . blocks_batch_size )
Sequence . cap ( seq )
stream_import ( state , seq , max_concurrency : state . blocks_concurrency )
end
missing_block_count
end
end
defp insert ( seq , range , options ) when is_list ( options ) do
@ -253,13 +325,13 @@ defmodule Indexer.BlockFetcher do
|> InternalTransactionFetcher . async_fetch ( 10_000 )
end
defp realtime_task ( % {json_rpc_named_arguments : json_rpc_named_arguments } = state ) do
defp realtime_task ( % __MODULE__ { json_rpc_named_arguments : json_rpc_named_arguments } = state ) do
{ :ok , latest_block_number } = EthereumJSONRPC . fetch_block_number_by_tag ( " latest " , json_rpc_named_arguments )
{ :ok , seq } = Sequence . start_link ( first : latest_block_number , step : 2 )
stream_import ( state , seq , max_concurrency : 1 )
end
defp stream_import ( state , seq , task_opts ) do
defp stream_import ( % __MODULE__ { } = state , seq , task_opts ) do
seq
|> Sequence . build_stream ( )
|> Task . async_stream (
@ -272,7 +344,7 @@ defmodule Indexer.BlockFetcher do
# Run at state.blocks_concurrency max_concurrency when called by `stream_import/3`
# Only public for testing
@doc false
def import_range ( range , % {json_rpc_named_arguments : json_rpc_named_arguments } = state , seq ) do
def import_range ( range , % __MODULE__ { json_rpc_named_arguments : json_rpc_named_arguments } = state , seq ) do
with { :blocks , { :ok , next , result } } <-
{ :blocks , EthereumJSONRPC . fetch_blocks_by_range ( range , json_rpc_named_arguments ) } ,
%{ blocks : blocks , transactions : transactions_without_receipts } = result ,
@ -321,9 +393,4 @@ defmodule Indexer.BlockFetcher do
Map . merge ( transaction_params , Map . fetch! ( transaction_hash_to_receipt_params , transaction_hash ) )
end )
end
defp schedule_next_catchup_index ( state ) do
send ( self ( ) , :catchup_index )
state
end
end