diff --git a/README.md b/README.md
index 2d10d4ce0f..9b08f7c246 100644
--- a/README.md
+++ b/README.md
@@ -245,6 +245,22 @@ To view Modules and API Reference documentation:
2. View the generated docs.
`open doc/index.html`
+## Front-end
+
+### Javascript
+
+All Javascript files are under [apps/block_scout_web/assets/js](https://github.com/poanetwork/blockscout/tree/master/apps/block_scout_web/assets/js) and the main file is [app.js](https://github.com/poanetwork/blockscout/blob/master/apps/block_scout_web/assets/js/app.js). This file imports all javascript used in the application. If you want to create a new JS file consider creating into [/js/pages](https://github.com/poanetwork/blockscout/tree/master/apps/block_scout_web/assets/js/pages) or [/js/lib](https://github.com/poanetwork/blockscout/tree/master/apps/block_scout_web/assets/js/lib), as follows:
+
+#### js/lib
+This folder contains all scripts that can be reused in any page or can be used as a helper to some component.
+
+#### js/pages
+This folder contains the scripts that are specific for some page.
+
+#### Redux
+This project uses Redux to control the state in some pages. There are pages that have things happening in real-time thanks to the Phoenix channels, e.g. Address page, so the page state changes a lot depending on which events it is listening. The redux is also used to load some contents asynchronous, see [async_listing_load.js](https://github.com/poanetwork/blockscout/blob/master/apps/block_scout_web/assets/js/lib/async_listing_load.js).
+
+To understand how to build new pages that need redux in this project, see the [redux_helpers.js](https://github.com/poanetwork/blockscout/blob/master/apps/block_scout_web/assets/js/lib/redux_helpers.js)
## Internationalization
diff --git a/apps/block_scout_web/assets/js/lib/redux_helpers.js b/apps/block_scout_web/assets/js/lib/redux_helpers.js
index e45fa7d516..fdf7c659b3 100644
--- a/apps/block_scout_web/assets/js/lib/redux_helpers.js
+++ b/apps/block_scout_web/assets/js/lib/redux_helpers.js
@@ -2,10 +2,99 @@ import $ from 'jquery'
import _ from 'lodash'
import { createStore as reduxCreateStore } from 'redux'
+/**
+ * Create a redux store given the reducer. It also enables the Redux dev tools.
+ */
export function createStore (reducer) {
return reduxCreateStore(reducer, window.__REDUX_DEVTOOLS_EXTENSION__ && window.__REDUX_DEVTOOLS_EXTENSION__())
}
+/**
+ * Connect elements with the redux store. It must receive an object with the following attributes:
+ *
+ * elements: It is an object with elements that are going to react to the redux state or add something
+ * to the initial state.
+ *
+ * ```javascript
+ * const elements = {
+ * // The JQuery selector for finding elements in the page.
+ * '[data-counter]': {
+ * // Useful to put things from the page to the redux state.
+ * load ($element) {...},
+ * // Check for state changes and manipulates the DOM accordingly.
+ * render ($el, state, oldState) {...}
+ * }
+ * }
+ * ```
+ *
+ * The load and render functions are optional, you can have both or just one of them. It depends
+ * on if you want to load something to the state in the first render and/or that the element should
+ * react to the redux state. Notice that you can include more elements if you want to since elements
+ * also is an object.
+ *
+ * store: It is the redux store that the elements should be connected with.
+ * ```javascript
+ * const store = createStore(reducer)
+ * ```
+ *
+ * action: The first action that the store is going to dispatch. Optional, by default 'ELEMENTS_LOAD'
+ * is going to be dispatched.
+ *
+ * ### Examples
+ *
+ * Given the markup:
+ * ```HTML
+ *
+ * 1
+ *
+ * ```
+ *
+ * The reducer:
+ * ```javascript
+ * function reducer (state = { number: null }, action) {
+ * switch (action.type) {
+ * case 'ELEMENTS_LOAD': {
+ * return Object.assign({}, state, { number: action.number })
+ * }
+ * case 'INCREMENT': {
+ * return Object.assign({}, state, { number: state.number + 1 })
+ * }
+ * default:
+ * return state
+ * }
+ * }
+ * ```
+ *
+ * The elements:
+ * ```javascript
+ * const elements = {
+ * // '[data-counter]' is the element that will be connected to the redux store.
+ * '[data-counter]': {
+ * // Find the number within data-counter and add to the state.
+ * load ($el) {
+ * return { number: $el.find('.number').val() }
+ * },
+ * // React to redux state. Case the number in the state changes, it is going to render the
+ * // new number.
+ * render ($el, state, oldState) {
+ * if (state.number === oldState.number) return
+ *
+ * $el.html(state.number)
+ * }
+ * }
+ * }
+ *
+ * All we need to do is connecting the store and the elements using this function.
+ * ```javascript
+ * connectElements({store, elements})
+ * ```
+ *
+ * Now, if we dispatch the `INCREMENT` action, the state is going to change and the [data-counter]
+ * element is going to re-render since they are connected.
+ * ```javascript
+ * store.dispatch({type: 'INCREMENT'})
+ * ```
+ */
export function connectElements ({ elements, store, action = 'ELEMENTS_LOAD' }) {
function loadElements () {
return _.reduce(elements, (pageLoadParams, { load }, selector) => {
@@ -16,6 +105,7 @@ export function connectElements ({ elements, store, action = 'ELEMENTS_LOAD' })
return _.isObject(morePageLoadParams) ? Object.assign(pageLoadParams, morePageLoadParams) : pageLoadParams
}, {})
}
+
function renderElements (state, oldState) {
_.forIn(elements, ({ render }, selector) => {
if (!render) return
@@ -24,11 +114,13 @@ export function connectElements ({ elements, store, action = 'ELEMENTS_LOAD' })
render($el, state, oldState)
})
}
+
let oldState = store.getState()
store.subscribe(() => {
const state = store.getState()
renderElements(state, oldState)
oldState = state
})
+
store.dispatch(Object.assign(loadElements(), { type: action }))
}
diff --git a/apps/block_scout_web/config/config.exs b/apps/block_scout_web/config/config.exs
index b49733662d..38d5b3cd1f 100644
--- a/apps/block_scout_web/config/config.exs
+++ b/apps/block_scout_web/config/config.exs
@@ -48,7 +48,9 @@ config :ex_cldr,
config :logger, :block_scout_web,
# keep synced with `config/config.exs`
format: "$dateT$time $metadata[$level] $message\n",
- metadata: ~w(application fetcher request_id)a,
+ metadata:
+ ~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
+ block_number step count error_count shrunk)a,
metadata_filter: [application: :block_scout_web]
config :spandex_phoenix, tracer: BlockScoutWeb.Tracer
diff --git a/apps/ethereum_jsonrpc/config/config.exs b/apps/ethereum_jsonrpc/config/config.exs
index 7f234ef7b3..42e20c2b7b 100644
--- a/apps/ethereum_jsonrpc/config/config.exs
+++ b/apps/ethereum_jsonrpc/config/config.exs
@@ -17,7 +17,9 @@ config :ethereum_jsonrpc, EthereumJSONRPC.Tracer,
config :logger, :ethereum_jsonrpc,
# keep synced with `config/config.exs`
format: "$dateT$time $metadata[$level] $message\n",
- metadata: ~w(application fetcher request_id)a,
+ metadata:
+ ~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
+ block_number step count error_count shrunk)a,
metadata_filter: [application: :ethereum_jsonrpc]
# Import environment specific config. This must remain at the bottom
diff --git a/apps/explorer/config/config.exs b/apps/explorer/config/config.exs
index 439fdad4cd..61c197f876 100644
--- a/apps/explorer/config/config.exs
+++ b/apps/explorer/config/config.exs
@@ -50,7 +50,9 @@ config :explorer,
config :logger, :explorer,
# keep synced with `config/config.exs`
format: "$dateT$time $metadata[$level] $message\n",
- metadata: ~w(application fetcher request_id)a,
+ metadata:
+ ~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
+ block_number step count error_count shrunk)a,
metadata_filter: [application: :explorer]
config :spandex_ecto, SpandexEcto.EctoLogger,
diff --git a/apps/explorer/lib/explorer/chain/address/current_token_balance.ex b/apps/explorer/lib/explorer/chain/address/current_token_balance.ex
index 3091d3056d..2a89d327a7 100644
--- a/apps/explorer/lib/explorer/chain/address/current_token_balance.ex
+++ b/apps/explorer/lib/explorer/chain/address/current_token_balance.ex
@@ -1,6 +1,9 @@
defmodule Explorer.Chain.Address.CurrentTokenBalance do
@moduledoc """
Represents the current token balance from addresses according to the last block.
+
+ In this table we can see only the last balance from addresses. If you want to see the history of
+ token balances look at the `Address.TokenBalance` instead.
"""
use Ecto.Schema
diff --git a/apps/explorer/lib/explorer/chain/address/token_balance.ex b/apps/explorer/lib/explorer/chain/address/token_balance.ex
index 100b9a158d..d15c2161ac 100644
--- a/apps/explorer/lib/explorer/chain/address/token_balance.ex
+++ b/apps/explorer/lib/explorer/chain/address/token_balance.ex
@@ -1,6 +1,10 @@
defmodule Explorer.Chain.Address.TokenBalance do
@moduledoc """
Represents a token balance from an address.
+
+ In this table we can see all token balances that a specific addreses had acording to the block
+ numbers. If you want to show only the last balance from an address, consider querying against
+ `Address.CurrentTokenBalance` instead.
"""
use Ecto.Schema
diff --git a/apps/indexer/config/config.exs b/apps/indexer/config/config.exs
index b08cb0ef23..619d66f561 100644
--- a/apps/indexer/config/config.exs
+++ b/apps/indexer/config/config.exs
@@ -19,7 +19,9 @@ config :indexer, Indexer.Tracer,
config :logger, :indexer,
# keep synced with `config/config.exs`
format: "$dateT$time $metadata[$level] $message\n",
- metadata: ~w(application fetcher request_id)a,
+ metadata:
+ ~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
+ block_number step count error_count shrunk)a,
metadata_filter: [application: :indexer]
# Import environment specific config. This must remain at the bottom
diff --git a/apps/indexer/lib/indexer/block/catchup/bound_interval_supervisor.ex b/apps/indexer/lib/indexer/block/catchup/bound_interval_supervisor.ex
index 4dc800c812..f1b216aa2b 100644
--- a/apps/indexer/lib/indexer/block/catchup/bound_interval_supervisor.ex
+++ b/apps/indexer/lib/indexer/block/catchup/bound_interval_supervisor.ex
@@ -184,7 +184,8 @@ defmodule Indexer.Block.Catchup.BoundIntervalSupervisor do
end
def handle_info(
- {ref, %{first_block_number: first_block_number, missing_block_count: missing_block_count, shrunk: false}},
+ {ref,
+ %{first_block_number: first_block_number, missing_block_count: missing_block_count, shrunk: false = shrunk}},
%__MODULE__{
bound_interval: bound_interval,
task: %Task{ref: ref}
@@ -194,20 +195,23 @@ defmodule Indexer.Block.Catchup.BoundIntervalSupervisor do
new_bound_interval =
case missing_block_count do
0 ->
- Logger.info(fn -> ["Index already caught up in ", to_string(first_block_number), "-0."] end)
+ Logger.info("Index already caught up.",
+ first_block_number: first_block_number,
+ last_block_number: 0,
+ missing_block_count: 0,
+ shrunk: shrunk
+ )
BoundInterval.increase(bound_interval)
_ ->
- Logger.info(fn ->
- [
- "Index had to catch up ",
- to_string(missing_block_count),
- " blocks in ",
- to_string(first_block_number),
- "-0."
- ]
- end)
+ Logger.info(
+ "Index had to catch up.",
+ first_block_number: first_block_number,
+ last_block_number: 0,
+ missing_block_count: missing_block_count,
+ shrunk: shrunk
+ )
BoundInterval.decrease(bound_interval)
end
@@ -226,7 +230,8 @@ defmodule Indexer.Block.Catchup.BoundIntervalSupervisor do
end
def handle_info(
- {ref, %{first_block_number: first_block_number, missing_block_count: missing_block_count, shrunk: true}},
+ {ref,
+ %{first_block_number: first_block_number, missing_block_count: missing_block_count, shrunk: true = shrunk}},
%__MODULE__{
task: %Task{ref: ref}
} = state
@@ -234,15 +239,13 @@ defmodule Indexer.Block.Catchup.BoundIntervalSupervisor do
when is_integer(missing_block_count) do
Process.demonitor(ref, [:flush])
- Logger.info(fn ->
- [
- "Index had to catch up ",
- to_string(missing_block_count),
- " blocks in ",
- to_string(first_block_number),
- "-0, but the sequence was shrunk to save memory, so retrying immediately."
- ]
- end)
+ Logger.info(
+ "Index had to catch up, but the sequence was shrunk to save memory, so retrying immediately.",
+ first_block_number: first_block_number,
+ last_block_number: 0,
+ missing_block_count: missing_block_count,
+ shrunk: shrunk
+ )
send(self(), :catchup_index)
diff --git a/apps/indexer/lib/indexer/block/catchup/fetcher.ex b/apps/indexer/lib/indexer/block/catchup/fetcher.ex
index 3e4b88a308..f7206dc094 100644
--- a/apps/indexer/lib/indexer/block/catchup/fetcher.ex
+++ b/apps/indexer/lib/indexer/block/catchup/fetcher.ex
@@ -71,6 +71,9 @@ defmodule Indexer.Block.Catchup.Fetcher do
# realtime indexer gets the current latest block
first = latest_block_number - 1
last = 0
+
+ Logger.metadata(first_block_number: first, last_block_number: last)
+
missing_ranges = Chain.missing_block_number_ranges(first..last)
range_count = Enum.count(missing_ranges)
@@ -79,9 +82,10 @@ defmodule Indexer.Block.Catchup.Fetcher do
|> Stream.map(&Enum.count/1)
|> Enum.sum()
- Logger.debug(fn ->
- "#{missing_block_count} missed blocks in #{range_count} ranges between #{first} and #{last}"
- end)
+ Logger.debug(fn -> "Missed blocks in ranges." end,
+ missing_block_range_count: range_count,
+ missing_block_count: missing_block_count
+ )
shrunk =
case missing_block_count do
@@ -171,23 +175,25 @@ defmodule Indexer.Block.Catchup.Fetcher do
)
defp fetch_and_import_range_from_sequence(
%__MODULE__{block_fetcher: %Block.Fetcher{} = block_fetcher},
- _.._ = range,
+ first..last = range,
sequence
) do
- Logger.metadata(fetcher: :block_catchup)
+ Logger.metadata(fetcher: :block_catchup, first_block_number: first, last_block_number: last)
case fetch_and_import_range(block_fetcher, range) do
{:ok, %{inserted: inserted, errors: errors}} ->
- errors = cap_seq(sequence, errors, range)
+ errors = cap_seq(sequence, errors)
retry(sequence, errors)
{:ok, inserted: inserted}
{:error, {step, reason}} = error ->
- Logger.error(fn ->
- first..last = range
- "failed to fetch #{step} for blocks #{first} - #{last}: #{inspect(reason)}. Retrying block range."
- end)
+ Logger.error(
+ fn ->
+ ["failed to fetch: ", inspect(reason), ". Retrying."]
+ end,
+ step: step
+ )
push_back(sequence, range)
@@ -195,7 +201,7 @@ defmodule Indexer.Block.Catchup.Fetcher do
{:error, changesets} = error when is_list(changesets) ->
Logger.error(fn ->
- "failed to validate blocks #{inspect(range)}: #{inspect(changesets)}. Retrying"
+ ["failed to validate: ", inspect(changesets), ". Retrying."]
end)
push_back(sequence, range)
@@ -203,9 +209,12 @@ defmodule Indexer.Block.Catchup.Fetcher do
error
{:error, {step, failed_value, _changes_so_far}} = error ->
- Logger.error(fn ->
- "failed to insert blocks during #{step} #{inspect(range)}: #{inspect(failed_value)}. Retrying"
- end)
+ Logger.error(
+ fn ->
+ ["failed to insert: ", inspect(failed_value), ". Retrying."]
+ end,
+ step: step
+ )
push_back(sequence, range)
@@ -213,7 +222,7 @@ defmodule Indexer.Block.Catchup.Fetcher do
end
end
- defp cap_seq(seq, errors, range) do
+ defp cap_seq(seq, errors) do
{not_founds, other_errors} =
Enum.split_with(errors, fn
%{code: 404, data: %{number: _}} -> true
@@ -222,10 +231,7 @@ defmodule Indexer.Block.Catchup.Fetcher do
case not_founds do
[] ->
- Logger.debug(fn ->
- first_block_number..last_block_number = range
- "got blocks #{first_block_number} - #{last_block_number}"
- end)
+ Logger.debug("got blocks")
other_errors
@@ -239,7 +245,7 @@ defmodule Indexer.Block.Catchup.Fetcher do
defp push_back(sequence, range) do
case Sequence.push_back(sequence, range) do
:ok -> :ok
- {:error, reason} -> Logger.error(fn -> ["Could not push block range to back to Sequence: ", inspect(reason)] end)
+ {:error, reason} -> Logger.error(fn -> ["Could not push back to Sequence: ", inspect(reason)] end)
end
end
diff --git a/apps/indexer/lib/indexer/block/fetcher/receipts.ex b/apps/indexer/lib/indexer/block/fetcher/receipts.ex
index 4095fa763a..56ec86ecc6 100644
--- a/apps/indexer/lib/indexer/block/fetcher/receipts.ex
+++ b/apps/indexer/lib/indexer/block/fetcher/receipts.ex
@@ -13,7 +13,7 @@ defmodule Indexer.Block.Fetcher.Receipts do
%Block.Fetcher{json_rpc_named_arguments: json_rpc_named_arguments} = state,
transaction_params
) do
- Logger.debug(fn -> "fetching #{length(transaction_params)} transaction receipts" end)
+ Logger.debug("fetching transaction receipts", count: Enum.count(transaction_params))
stream_opts = [max_concurrency: state.receipts_concurrency, timeout: :infinity]
transaction_params
diff --git a/apps/indexer/lib/indexer/block/realtime/fetcher.ex b/apps/indexer/lib/indexer/block/realtime/fetcher.ex
index a1381871d4..0380c7504d 100644
--- a/apps/indexer/lib/indexer/block/realtime/fetcher.ex
+++ b/apps/indexer/lib/indexer/block/realtime/fetcher.ex
@@ -159,15 +159,19 @@ defmodule Indexer.Block.Realtime.Fetcher do
@decorate trace(name: "fetch", resource: "Indexer.Block.Realtime.Fetcher.fetch_and_import_block/3", tracer: Tracer)
def fetch_and_import_block(block_number_to_fetch, block_fetcher, reorg?, retry \\ 3) do
- Logger.metadata(fetcher: :block_realtime)
-
- if reorg? do
- # give previous fetch attempt (for same block number) a chance to finish
- # before fetching again, to reduce block consensus mistakes
- :timer.sleep(@reorg_delay)
- end
+ Indexer.Logger.metadata(
+ fn ->
+ if reorg? do
+ # give previous fetch attempt (for same block number) a chance to finish
+ # before fetching again, to reduce block consensus mistakes
+ :timer.sleep(@reorg_delay)
+ end
- do_fetch_and_import_block(block_number_to_fetch, block_fetcher, retry)
+ do_fetch_and_import_block(block_number_to_fetch, block_fetcher, retry)
+ end,
+ fetcher: :block_realtime,
+ block_number: block_number_to_fetch
+ )
end
@decorate span(tracer: Tracer)
@@ -179,33 +183,28 @@ defmodule Indexer.Block.Realtime.Fetcher do
Task.Supervisor.start_child(TaskSupervisor, ConsensusEnsurer, :perform, args)
end
- Logger.debug(fn ->
- ["fetched and imported block ", to_string(block_number_to_fetch)]
- end)
+ Logger.debug("Fetched and imported.")
{:ok, %{inserted: _, errors: [_ | _] = errors}} ->
Logger.error(fn ->
[
- "failed to fetch block ",
- to_string(block_number_to_fetch),
- ": ",
+ "failed to fetch block: ",
inspect(errors),
". Block will be retried by catchup indexer."
]
end)
{:error, {step, reason}} ->
- Logger.error(fn ->
- [
- "failed to fetch ",
- to_string(step),
- " for block ",
- to_string(block_number_to_fetch),
- ": ",
- inspect(reason),
- ". Block will be retried by catchup indexer."
- ]
- end)
+ Logger.error(
+ fn ->
+ [
+ "failed to fetch: ",
+ inspect(reason),
+ ". Block will be retried by catchup indexer."
+ ]
+ end,
+ step: step
+ )
{:error, [%Changeset{} | _] = changesets} ->
params = %{
@@ -228,17 +227,16 @@ defmodule Indexer.Block.Realtime.Fetcher do
end
{:error, {step, failed_value, _changes_so_far}} ->
- Logger.error(fn ->
- [
- "failed to insert ",
- to_string(step),
- " for block ",
- to_string(block_number_to_fetch),
- ": ",
- inspect(failed_value),
- ". Block will be retried by catchup indexer."
- ]
- end)
+ Logger.error(
+ fn ->
+ [
+ "failed to insert: ",
+ inspect(failed_value),
+ ". Block will be retried by catchup indexer."
+ ]
+ end,
+ step: step
+ )
end
end
diff --git a/apps/indexer/lib/indexer/block/uncle/fetcher.ex b/apps/indexer/lib/indexer/block/uncle/fetcher.ex
index 9f0d6f269f..efa94c1d20 100644
--- a/apps/indexer/lib/indexer/block/uncle/fetcher.ex
+++ b/apps/indexer/lib/indexer/block/uncle/fetcher.ex
@@ -74,16 +74,22 @@ defmodule Indexer.Block.Uncle.Fetcher do
# the same block could be included as an uncle on multiple blocks, but we only want to fetch it once
unique_hashes = Enum.uniq(hashes)
- Logger.debug(fn -> "fetching #{length(unique_hashes)}" end)
+ unique_hash_count = Enum.count(unique_hashes)
+ Logger.metadata(count: unique_hash_count)
+
+ Logger.debug("fetching")
case EthereumJSONRPC.fetch_blocks_by_hash(unique_hashes, json_rpc_named_arguments) do
{:ok, blocks} ->
run_blocks(blocks, block_fetcher, unique_hashes)
{:error, reason} ->
- Logger.error(fn ->
- ["failed to fetch ", unique_hashes |> length |> to_string(), ": ", inspect(reason)]
- end)
+ Logger.error(
+ fn ->
+ ["failed to fetch: ", inspect(reason)]
+ end,
+ error_count: unique_hash_count
+ )
{:retry, unique_hashes}
end
@@ -110,19 +116,13 @@ defmodule Indexer.Block.Uncle.Fetcher do
transactions: %{params: transactions_params, on_conflict: :nothing}
}) do
{:ok, _} ->
- retry(errors, original_entries)
+ retry(errors)
{:error, step, failed_value, _changes_so_far} ->
- Logger.error(fn ->
- [
- "failed to import ",
- original_entries |> length() |> to_string(),
- " in step ",
- inspect(step),
- ": ",
- inspect(failed_value)
- ]
- end)
+ Logger.error(fn -> ["failed to import: ", inspect(failed_value)] end,
+ step: step,
+ error_count: Enum.count(original_entries)
+ )
{:retry, original_entries}
end
@@ -185,21 +185,20 @@ defmodule Indexer.Block.Uncle.Fetcher do
end)
end
- defp retry([], _), do: :ok
+ defp retry([]), do: :ok
- defp retry(errors, original_entries) when is_list(errors) do
+ defp retry(errors) when is_list(errors) do
retried_entries = errors_to_entries(errors)
- Logger.error(fn ->
- [
- "failed to fetch ",
- retried_entries |> length() |> to_string(),
- "/",
- original_entries |> length() |> to_string(),
- ": ",
- errors_to_iodata(errors)
- ]
- end)
+ Logger.error(
+ fn ->
+ [
+ "failed to fetch: ",
+ errors_to_iodata(errors)
+ ]
+ end,
+ error_count: Enum.count(retried_entries)
+ )
end
defp errors_to_entries(errors) when is_list(errors) do
diff --git a/apps/indexer/lib/indexer/coin_balance/fetcher.ex b/apps/indexer/lib/indexer/coin_balance/fetcher.ex
index be3e778e8a..9fda896ee6 100644
--- a/apps/indexer/lib/indexer/coin_balance/fetcher.ex
+++ b/apps/indexer/lib/indexer/coin_balance/fetcher.ex
@@ -74,7 +74,10 @@ defmodule Indexer.CoinBalance.Fetcher do
# `{address, block}`, so take unique params only
unique_entries = Enum.uniq(entries)
- Logger.debug(fn -> ["fetching ", unique_entries |> length() |> to_string()] end)
+ unique_entry_count = Enum.count(unique_entries)
+ Logger.metadata(count: unique_entry_count)
+
+ Logger.debug(fn -> "fetching" end)
unique_entries
|> Enum.map(&entry_to_params/1)
@@ -84,9 +87,12 @@ defmodule Indexer.CoinBalance.Fetcher do
run_fetched_balances(fetched_balances, unique_entries)
{:error, reason} ->
- Logger.error(fn ->
- ["failed to fetch ", unique_entries |> length() |> to_string(), ": ", inspect(reason)]
- end)
+ Logger.error(
+ fn ->
+ ["failed to fetch: ", inspect(reason)]
+ end,
+ error_count: unique_entry_count
+ )
{:retry, unique_entries}
end
@@ -115,7 +121,7 @@ defmodule Indexer.CoinBalance.Fetcher do
defp run_fetched_balances(%FetchedBalances{params_list: []}, original_entries), do: {:retry, original_entries}
- defp run_fetched_balances(%FetchedBalances{params_list: params_list, errors: errors}, original_entries) do
+ defp run_fetched_balances(%FetchedBalances{params_list: params_list, errors: errors}, _) do
value_fetched_at = DateTime.utc_now()
importable_balances_params = Enum.map(params_list, &Map.put(&1, :value_fetched_at, value_fetched_at))
@@ -128,24 +134,23 @@ defmodule Indexer.CoinBalance.Fetcher do
address_coin_balances: %{params: importable_balances_params}
})
- retry(errors, original_entries)
+ retry(errors)
end
- defp retry([], _), do: :ok
+ defp retry([]), do: :ok
- defp retry(errors, original_entries) when is_list(errors) do
+ defp retry(errors) when is_list(errors) do
retried_entries = fetched_balances_errors_to_entries(errors)
- Logger.error(fn ->
- [
- "failed to fetch ",
- retried_entries |> length() |> to_string(),
- "/",
- original_entries |> length() |> to_string(),
- ": ",
- fetched_balance_errors_to_iodata(errors)
- ]
- end)
+ Logger.error(
+ fn ->
+ [
+ "failed to fetch: ",
+ fetched_balance_errors_to_iodata(errors)
+ ]
+ end,
+ error_count: Enum.count(retried_entries)
+ )
{:retry, retried_entries}
end
diff --git a/apps/indexer/lib/indexer/internal_transaction/fetcher.ex b/apps/indexer/lib/indexer/internal_transaction/fetcher.ex
index 76b659e2da..5cf6df501e 100644
--- a/apps/indexer/lib/indexer/internal_transaction/fetcher.ex
+++ b/apps/indexer/lib/indexer/internal_transaction/fetcher.ex
@@ -103,7 +103,10 @@ defmodule Indexer.InternalTransaction.Fetcher do
def run(entries, json_rpc_named_arguments) do
unique_entries = unique_entries(entries)
- Logger.debug(fn -> "fetching internal transactions for #{length(unique_entries)} transactions" end)
+ unique_entries_count = Enum.count(unique_entries)
+ Logger.metadata(count: unique_entries_count)
+
+ Logger.debug("fetching internal transactions for transactions")
unique_entries
|> Enum.map(¶ms/1)
@@ -128,25 +131,25 @@ defmodule Indexer.InternalTransaction.Fetcher do
})
else
{:error, step, reason, _changes_so_far} ->
- Logger.error(fn ->
- [
- "failed to import internal transactions for ",
- to_string(length(entries)),
- " transactions at ",
- to_string(step),
- ": ",
- inspect(reason)
- ]
- end)
+ Logger.error(
+ fn ->
+ [
+ "failed to import internal transactions for transactions: ",
+ inspect(reason)
+ ]
+ end,
+ step: step,
+ error_count: unique_entries_count
+ )
# re-queue the de-duped entries
{:retry, unique_entries}
end
{:error, reason} ->
- Logger.error(fn ->
- "failed to fetch internal transactions for #{length(entries)} transactions: #{inspect(reason)}"
- end)
+ Logger.error(fn -> ["failed to fetch internal transactions for transactions: ", inspect(reason)] end,
+ error_count: unique_entries_count
+ )
# re-queue the de-duped entries
{:retry, unique_entries}
diff --git a/apps/indexer/lib/indexer/logger.ex b/apps/indexer/lib/indexer/logger.ex
index 7d344e4f88..a34bcd6e47 100644
--- a/apps/indexer/lib/indexer/logger.ex
+++ b/apps/indexer/lib/indexer/logger.ex
@@ -1,8 +1,22 @@
defmodule Indexer.Logger do
@moduledoc """
- Helpers for formatting `Logger` data as `t:iodata/0`.
+ Helpers for `Logger`.
"""
+ @doc """
+ Sets `keyword` in `Logger.metadata/1` around `fun`.
+ """
+ def metadata(fun, keyword) when is_function(fun, 0) and is_list(keyword) do
+ metadata_before = Logger.metadata()
+
+ try do
+ Logger.metadata(keyword)
+ fun.()
+ after
+ Logger.reset_metadata(metadata_before)
+ end
+ end
+
@doc """
The PID and its registered name (if it has one) as `t:iodata/0`.
"""
diff --git a/apps/indexer/lib/indexer/token_balance/fetcher.ex b/apps/indexer/lib/indexer/token_balance/fetcher.ex
index a57d1a235b..69a53d8a30 100644
--- a/apps/indexer/lib/indexer/token_balance/fetcher.ex
+++ b/apps/indexer/lib/indexer/token_balance/fetcher.ex
@@ -93,10 +93,11 @@ defmodule Indexer.TokenBalance.Fetcher do
end
def fetch_from_blockchain(params_list) do
- {:ok, token_balances} =
- params_list
- |> Enum.filter(&(&1.retries_count <= @max_retries))
- |> TokenBalances.fetch_token_balances_from_blockchain()
+ retryable_params_list = Enum.filter(params_list, &(&1.retries_count <= @max_retries))
+
+ Logger.metadata(count: Enum.count(retryable_params_list))
+
+ {:ok, token_balances} = TokenBalances.fetch_token_balances_from_blockchain(retryable_params_list)
token_balances
end
@@ -116,7 +117,9 @@ defmodule Indexer.TokenBalance.Fetcher do
:ok
{:error, reason} ->
- Logger.debug(fn -> "failed to import #{length(token_balances_params)} token balances, #{inspect(reason)}" end)
+ Logger.debug(fn -> ["failed to import token balances: ", inspect(reason)] end,
+ error_count: Enum.count(token_balances_params)
+ )
:error
end
diff --git a/apps/indexer/lib/indexer/token_balances.ex b/apps/indexer/lib/indexer/token_balances.ex
index a00345138f..5fa843761f 100644
--- a/apps/indexer/lib/indexer/token_balances.ex
+++ b/apps/indexer/lib/indexer/token_balances.ex
@@ -34,7 +34,7 @@ defmodule Indexer.TokenBalances do
@decorate span(tracer: Tracer)
def fetch_token_balances_from_blockchain(token_balances, opts \\ []) do
- Logger.debug(fn -> "fetching #{Enum.count(token_balances)} token balances" end)
+ Logger.debug("fetching token balances", count: Enum.count(token_balances))
task_timeout = Keyword.get(opts, :timeout, @task_timeout)
diff --git a/config/config.exs b/config/config.exs
index 5a4e9ec4f6..a2784b5e86 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -32,19 +32,25 @@ config :logger,
config :logger, :console,
# Use same format for all loggers, even though the level should only ever be `:error` for `:error` backend
format: "$dateT$time $metadata[$level] $message\n",
- metadata: ~w(application fetcher request_id)a
+ metadata:
+ ~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
+ block_number step count error_count shrunk)a
config :logger, :ecto,
# Use same format for all loggers, even though the level should only ever be `:error` for `:error` backend
format: "$dateT$time $metadata[$level] $message\n",
- metadata: ~w(application fetcher request_id)a,
+ metadata:
+ ~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
+ block_number step count error_count shrunk)a,
metadata_filter: [application: :ecto]
config :logger, :error,
# Use same format for all loggers, even though the level should only ever be `:error` for `:error` backend
format: "$dateT$time $metadata[$level] $message\n",
level: :error,
- metadata: ~w(application fetcher request_id)a
+ metadata:
+ ~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
+ block_number step count error_count shrunk)a
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.