|
|
|
@ -9,7 +9,7 @@ defmodule Indexer.Fetcher.TokenBalance do |
|
|
|
|
It behaves as a `BufferedTask`, so we can configure the `max_batch_size` and the `max_concurrency` to control how many |
|
|
|
|
token balances will be fetched at the same time. |
|
|
|
|
|
|
|
|
|
Also, this module set a `retries_count` for each token balance and increment this number to avoid fetching the ones |
|
|
|
|
Also, this module set a `refetch_after` for each token balance in case of failure to avoid fetching the ones |
|
|
|
|
that always raise errors interacting with the Smart Contract. |
|
|
|
|
""" |
|
|
|
|
|
|
|
|
@ -32,8 +32,6 @@ defmodule Indexer.Fetcher.TokenBalance do |
|
|
|
|
|
|
|
|
|
@timeout :timer.minutes(10) |
|
|
|
|
|
|
|
|
|
@max_retries 3 |
|
|
|
|
|
|
|
|
|
@spec async_fetch( |
|
|
|
|
[ |
|
|
|
|
%{ |
|
|
|
@ -93,7 +91,7 @@ defmodule Indexer.Fetcher.TokenBalance do |
|
|
|
|
@doc """ |
|
|
|
|
Fetches the given entries (token_balances) from the Smart Contract and import them in our database. |
|
|
|
|
|
|
|
|
|
It also increments the `retries_count` to avoid fetching token balances that always raise errors |
|
|
|
|
It also set the `refetch_after` in case of failure to avoid fetching token balances that always raise errors |
|
|
|
|
when reading their balance in the Smart Contract. |
|
|
|
|
""" |
|
|
|
|
@impl BufferedTask |
|
|
|
@ -110,7 +108,6 @@ defmodule Indexer.Fetcher.TokenBalance do |
|
|
|
|
result = |
|
|
|
|
params |
|
|
|
|
|> MissingBalanceOfToken.filter_token_balances_params(true, missing_balance_of_tokens) |
|
|
|
|
|> increase_retries_count() |
|
|
|
|
|> fetch_from_blockchain(missing_balance_of_tokens) |
|
|
|
|
|> import_token_balances() |
|
|
|
|
|
|
|
|
@ -122,45 +119,22 @@ defmodule Indexer.Fetcher.TokenBalance do |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def fetch_from_blockchain(params_list, missing_balance_of_tokens) do |
|
|
|
|
retryable_params_list = |
|
|
|
|
params_list |
|
|
|
|
|> Enum.filter(&(&1.retries_count <= @max_retries)) |
|
|
|
|
|> Enum.uniq_by(&Map.take(&1, [:token_contract_address_hash, :token_id, :address_hash, :block_number])) |
|
|
|
|
|
|
|
|
|
Logger.metadata(count: Enum.count(retryable_params_list)) |
|
|
|
|
|
|
|
|
|
%{fetched_token_balances: fetched_token_balances, failed_token_balances: _failed_token_balances} = |
|
|
|
|
1..@max_retries |
|
|
|
|
|> Enum.reduce_while(%{fetched_token_balances: [], failed_token_balances: retryable_params_list}, fn _x, acc -> |
|
|
|
|
{:ok, %{fetched_token_balances: fetched_token_balances, failed_token_balances: failed_token_balances}} = |
|
|
|
|
TokenBalances.fetch_token_balances_from_blockchain(acc.failed_token_balances) |
|
|
|
|
|
|
|
|
|
all_token_balances = %{ |
|
|
|
|
fetched_token_balances: acc.fetched_token_balances ++ fetched_token_balances, |
|
|
|
|
failed_token_balances: failed_token_balances |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
handle_success_balances(fetched_token_balances, missing_balance_of_tokens) |
|
|
|
|
|
|
|
|
|
if Enum.empty?(failed_token_balances) do |
|
|
|
|
{:halt, all_token_balances} |
|
|
|
|
else |
|
|
|
|
failed_token_balances = |
|
|
|
|
failed_token_balances |
|
|
|
|
|> handle_failed_balances() |
|
|
|
|
|> increase_retries_count() |
|
|
|
|
|
|
|
|
|
token_balances_updated_retries_count = |
|
|
|
|
all_token_balances |
|
|
|
|
|> Map.put(:failed_token_balances, failed_token_balances) |
|
|
|
|
|
|
|
|
|
{:cont, token_balances_updated_retries_count} |
|
|
|
|
end |
|
|
|
|
end) |
|
|
|
|
params_list = |
|
|
|
|
Enum.uniq_by(params_list, &Map.take(&1, [:token_contract_address_hash, :token_id, :address_hash, :block_number])) |
|
|
|
|
|
|
|
|
|
Logger.metadata(count: Enum.count(params_list)) |
|
|
|
|
|
|
|
|
|
{:ok, %{fetched_token_balances: fetched_token_balances, failed_token_balances: failed_token_balances}} = |
|
|
|
|
TokenBalances.fetch_token_balances_from_blockchain(params_list) |
|
|
|
|
|
|
|
|
|
handle_success_balances(fetched_token_balances, missing_balance_of_tokens) |
|
|
|
|
failed_balances_to_keep = handle_failed_balances(failed_token_balances) |
|
|
|
|
|
|
|
|
|
fetched_token_balances |
|
|
|
|
fetched_token_balances ++ failed_balances_to_keep |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
defp handle_success_balances([], _missing_balance_of_tokens), do: :ok |
|
|
|
|
|
|
|
|
|
defp handle_success_balances(fetched_token_balances, missing_balance_of_tokens) do |
|
|
|
|
successful_token_hashes = |
|
|
|
|
fetched_token_balances |
|
|
|
@ -178,7 +152,15 @@ defmodule Indexer.Fetcher.TokenBalance do |
|
|
|
|
|> MissingBalanceOfToken.mark_as_implemented() |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
defp handle_failed_balances([]), do: [] |
|
|
|
|
|
|
|
|
|
defp handle_failed_balances(failed_token_balances) do |
|
|
|
|
failed_token_balances |
|
|
|
|
|> handle_missing_balance_of_tokens() |
|
|
|
|
|> handle_other_errors() |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
defp handle_missing_balance_of_tokens(failed_token_balances) do |
|
|
|
|
{missing_balance_of_balances, other_failed_balances} = |
|
|
|
|
Enum.split_with(failed_token_balances, fn |
|
|
|
|
%{error: :unable_to_decode} -> true |
|
|
|
@ -201,9 +183,27 @@ defmodule Indexer.Fetcher.TokenBalance do |
|
|
|
|
other_failed_balances |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
defp increase_retries_count(params_list) do |
|
|
|
|
params_list |
|
|
|
|
|> Enum.map(&Map.put(&1, :retries_count, &1.retries_count + 1)) |
|
|
|
|
defp handle_other_errors(failed_token_balances) do |
|
|
|
|
Enum.map(failed_token_balances, fn token_balance_params -> |
|
|
|
|
new_retries_count = token_balance_params.retries_count + 1 |
|
|
|
|
|
|
|
|
|
Map.merge(token_balance_params, %{ |
|
|
|
|
retries_count: new_retries_count, |
|
|
|
|
refetch_after: define_refetch_after(new_retries_count) |
|
|
|
|
}) |
|
|
|
|
end) |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
defp define_refetch_after(retries_count) do |
|
|
|
|
config = Application.get_env(:indexer, __MODULE__) |
|
|
|
|
|
|
|
|
|
coef = config[:exp_timeout_coeff] |
|
|
|
|
max_refetch_interval = config[:max_refetch_interval] |
|
|
|
|
max_retries_count = :math.log(max_refetch_interval / 1000 / coef) |
|
|
|
|
|
|
|
|
|
value = floor(coef * :math.exp(min(retries_count, max_retries_count))) |
|
|
|
|
|
|
|
|
|
Timex.shift(Timex.now(), seconds: value) |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def import_token_balances(token_balances_params) do |
|
|
|
@ -259,17 +259,14 @@ defmodule Indexer.Fetcher.TokenBalance do |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
defp entry( |
|
|
|
|
%{ |
|
|
|
|
token_contract_address_hash: token_contract_address_hash, |
|
|
|
|
address_hash: address_hash, |
|
|
|
|
block_number: block_number, |
|
|
|
|
token_type: token_type, |
|
|
|
|
token_id: token_id |
|
|
|
|
} = token_balance |
|
|
|
|
) do |
|
|
|
|
retries_count = Map.get(token_balance, :retries_count, 0) |
|
|
|
|
|
|
|
|
|
defp entry(%{ |
|
|
|
|
token_contract_address_hash: token_contract_address_hash, |
|
|
|
|
address_hash: address_hash, |
|
|
|
|
block_number: block_number, |
|
|
|
|
token_type: token_type, |
|
|
|
|
token_id: token_id, |
|
|
|
|
retries_count: retries_count |
|
|
|
|
}) do |
|
|
|
|
token_id_int = |
|
|
|
|
case token_id do |
|
|
|
|
%Decimal{} -> Decimal.to_integer(token_id) |
|
|
|
@ -277,7 +274,7 @@ defmodule Indexer.Fetcher.TokenBalance do |
|
|
|
|
_ -> token_id |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
{address_hash.bytes, token_contract_address_hash.bytes, block_number, token_type, token_id_int, retries_count} |
|
|
|
|
{address_hash.bytes, token_contract_address_hash.bytes, block_number, token_type, token_id_int, retries_count || 0} |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
defp format_params( |
|
|
|
|