Merge branch 'master' into fix-displayed-address-length

pull/1206/head
Andrew Cravenho 6 years ago committed by GitHub
commit a919502d67
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 16
      README.md
  2. 92
      apps/block_scout_web/assets/js/lib/redux_helpers.js
  3. 4
      apps/block_scout_web/config/config.exs
  4. 4
      apps/ethereum_jsonrpc/config/config.exs
  5. 4
      apps/explorer/config/config.exs
  6. 3
      apps/explorer/lib/explorer/chain/address/current_token_balance.ex
  7. 4
      apps/explorer/lib/explorer/chain/address/token_balance.ex
  8. 4
      apps/indexer/config/config.exs
  9. 45
      apps/indexer/lib/indexer/block/catchup/bound_interval_supervisor.ex
  10. 46
      apps/indexer/lib/indexer/block/catchup/fetcher.ex
  11. 2
      apps/indexer/lib/indexer/block/fetcher/receipts.ex
  12. 70
      apps/indexer/lib/indexer/block/realtime/fetcher.ex
  13. 53
      apps/indexer/lib/indexer/block/uncle/fetcher.ex
  14. 41
      apps/indexer/lib/indexer/coin_balance/fetcher.ex
  15. 31
      apps/indexer/lib/indexer/internal_transaction/fetcher.ex
  16. 16
      apps/indexer/lib/indexer/logger.ex
  17. 13
      apps/indexer/lib/indexer/token_balance/fetcher.ex
  18. 2
      apps/indexer/lib/indexer/token_balances.ex
  19. 12
      config/config.exs

@ -245,6 +245,22 @@ To view Modules and API Reference documentation:
2. View the generated docs. 2. View the generated docs.
`open doc/index.html` `open doc/index.html`
## Front-end
### Javascript
All Javascript files are under [apps/block_scout_web/assets/js](https://github.com/poanetwork/blockscout/tree/master/apps/block_scout_web/assets/js) and the main file is [app.js](https://github.com/poanetwork/blockscout/blob/master/apps/block_scout_web/assets/js/app.js). This file imports all javascript used in the application. If you want to create a new JS file consider creating into [/js/pages](https://github.com/poanetwork/blockscout/tree/master/apps/block_scout_web/assets/js/pages) or [/js/lib](https://github.com/poanetwork/blockscout/tree/master/apps/block_scout_web/assets/js/lib), as follows:
#### js/lib
This folder contains all scripts that can be reused in any page or can be used as a helper to some component.
#### js/pages
This folder contains the scripts that are specific for some page.
#### Redux
This project uses Redux to control the state in some pages. There are pages that have things happening in real-time thanks to the Phoenix channels, e.g. Address page, so the page state changes a lot depending on which events it is listening. The redux is also used to load some contents asynchronous, see [async_listing_load.js](https://github.com/poanetwork/blockscout/blob/master/apps/block_scout_web/assets/js/lib/async_listing_load.js).
To understand how to build new pages that need redux in this project, see the [redux_helpers.js](https://github.com/poanetwork/blockscout/blob/master/apps/block_scout_web/assets/js/lib/redux_helpers.js)
## Internationalization ## Internationalization

@ -2,10 +2,99 @@ import $ from 'jquery'
import _ from 'lodash' import _ from 'lodash'
import { createStore as reduxCreateStore } from 'redux' import { createStore as reduxCreateStore } from 'redux'
/**
* Create a redux store given the reducer. It also enables the Redux dev tools.
*/
export function createStore (reducer) { export function createStore (reducer) {
return reduxCreateStore(reducer, window.__REDUX_DEVTOOLS_EXTENSION__ && window.__REDUX_DEVTOOLS_EXTENSION__()) return reduxCreateStore(reducer, window.__REDUX_DEVTOOLS_EXTENSION__ && window.__REDUX_DEVTOOLS_EXTENSION__())
} }
/**
* Connect elements with the redux store. It must receive an object with the following attributes:
*
* elements: It is an object with elements that are going to react to the redux state or add something
* to the initial state.
*
* ```javascript
* const elements = {
* // The JQuery selector for finding elements in the page.
* '[data-counter]': {
* // Useful to put things from the page to the redux state.
* load ($element) {...},
* // Check for state changes and manipulates the DOM accordingly.
* render ($el, state, oldState) {...}
* }
* }
* ```
*
* The load and render functions are optional, you can have both or just one of them. It depends
* on if you want to load something to the state in the first render and/or that the element should
* react to the redux state. Notice that you can include more elements if you want to since elements
* also is an object.
*
* store: It is the redux store that the elements should be connected with.
* ```javascript
* const store = createStore(reducer)
* ```
*
* action: The first action that the store is going to dispatch. Optional, by default 'ELEMENTS_LOAD'
* is going to be dispatched.
*
* ### Examples
*
* Given the markup:
* ```HTML
* <div data-counter>
* <span class="number">1</span>
* </div>
* ```
*
* The reducer:
* ```javascript
* function reducer (state = { number: null }, action) {
* switch (action.type) {
* case 'ELEMENTS_LOAD': {
* return Object.assign({}, state, { number: action.number })
* }
* case 'INCREMENT': {
* return Object.assign({}, state, { number: state.number + 1 })
* }
* default:
* return state
* }
* }
* ```
*
* The elements:
* ```javascript
* const elements = {
* // '[data-counter]' is the element that will be connected to the redux store.
* '[data-counter]': {
* // Find the number within data-counter and add to the state.
* load ($el) {
* return { number: $el.find('.number').val() }
* },
* // React to redux state. Case the number in the state changes, it is going to render the
* // new number.
* render ($el, state, oldState) {
* if (state.number === oldState.number) return
*
* $el.html(state.number)
* }
* }
* }
*
* All we need to do is connecting the store and the elements using this function.
* ```javascript
* connectElements({store, elements})
* ```
*
* Now, if we dispatch the `INCREMENT` action, the state is going to change and the [data-counter]
* element is going to re-render since they are connected.
* ```javascript
* store.dispatch({type: 'INCREMENT'})
* ```
*/
export function connectElements ({ elements, store, action = 'ELEMENTS_LOAD' }) { export function connectElements ({ elements, store, action = 'ELEMENTS_LOAD' }) {
function loadElements () { function loadElements () {
return _.reduce(elements, (pageLoadParams, { load }, selector) => { return _.reduce(elements, (pageLoadParams, { load }, selector) => {
@ -16,6 +105,7 @@ export function connectElements ({ elements, store, action = 'ELEMENTS_LOAD' })
return _.isObject(morePageLoadParams) ? Object.assign(pageLoadParams, morePageLoadParams) : pageLoadParams return _.isObject(morePageLoadParams) ? Object.assign(pageLoadParams, morePageLoadParams) : pageLoadParams
}, {}) }, {})
} }
function renderElements (state, oldState) { function renderElements (state, oldState) {
_.forIn(elements, ({ render }, selector) => { _.forIn(elements, ({ render }, selector) => {
if (!render) return if (!render) return
@ -24,11 +114,13 @@ export function connectElements ({ elements, store, action = 'ELEMENTS_LOAD' })
render($el, state, oldState) render($el, state, oldState)
}) })
} }
let oldState = store.getState() let oldState = store.getState()
store.subscribe(() => { store.subscribe(() => {
const state = store.getState() const state = store.getState()
renderElements(state, oldState) renderElements(state, oldState)
oldState = state oldState = state
}) })
store.dispatch(Object.assign(loadElements(), { type: action })) store.dispatch(Object.assign(loadElements(), { type: action }))
} }

@ -48,7 +48,9 @@ config :ex_cldr,
config :logger, :block_scout_web, config :logger, :block_scout_web,
# keep synced with `config/config.exs` # keep synced with `config/config.exs`
format: "$dateT$time $metadata[$level] $message\n", format: "$dateT$time $metadata[$level] $message\n",
metadata: ~w(application fetcher request_id)a, metadata:
~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
block_number step count error_count shrunk)a,
metadata_filter: [application: :block_scout_web] metadata_filter: [application: :block_scout_web]
config :spandex_phoenix, tracer: BlockScoutWeb.Tracer config :spandex_phoenix, tracer: BlockScoutWeb.Tracer

@ -17,7 +17,9 @@ config :ethereum_jsonrpc, EthereumJSONRPC.Tracer,
config :logger, :ethereum_jsonrpc, config :logger, :ethereum_jsonrpc,
# keep synced with `config/config.exs` # keep synced with `config/config.exs`
format: "$dateT$time $metadata[$level] $message\n", format: "$dateT$time $metadata[$level] $message\n",
metadata: ~w(application fetcher request_id)a, metadata:
~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
block_number step count error_count shrunk)a,
metadata_filter: [application: :ethereum_jsonrpc] metadata_filter: [application: :ethereum_jsonrpc]
# Import environment specific config. This must remain at the bottom # Import environment specific config. This must remain at the bottom

@ -50,7 +50,9 @@ config :explorer,
config :logger, :explorer, config :logger, :explorer,
# keep synced with `config/config.exs` # keep synced with `config/config.exs`
format: "$dateT$time $metadata[$level] $message\n", format: "$dateT$time $metadata[$level] $message\n",
metadata: ~w(application fetcher request_id)a, metadata:
~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
block_number step count error_count shrunk)a,
metadata_filter: [application: :explorer] metadata_filter: [application: :explorer]
config :spandex_ecto, SpandexEcto.EctoLogger, config :spandex_ecto, SpandexEcto.EctoLogger,

@ -1,6 +1,9 @@
defmodule Explorer.Chain.Address.CurrentTokenBalance do defmodule Explorer.Chain.Address.CurrentTokenBalance do
@moduledoc """ @moduledoc """
Represents the current token balance from addresses according to the last block. Represents the current token balance from addresses according to the last block.
In this table we can see only the last balance from addresses. If you want to see the history of
token balances look at the `Address.TokenBalance` instead.
""" """
use Ecto.Schema use Ecto.Schema

@ -1,6 +1,10 @@
defmodule Explorer.Chain.Address.TokenBalance do defmodule Explorer.Chain.Address.TokenBalance do
@moduledoc """ @moduledoc """
Represents a token balance from an address. Represents a token balance from an address.
In this table we can see all token balances that a specific addreses had acording to the block
numbers. If you want to show only the last balance from an address, consider querying against
`Address.CurrentTokenBalance` instead.
""" """
use Ecto.Schema use Ecto.Schema

@ -19,7 +19,9 @@ config :indexer, Indexer.Tracer,
config :logger, :indexer, config :logger, :indexer,
# keep synced with `config/config.exs` # keep synced with `config/config.exs`
format: "$dateT$time $metadata[$level] $message\n", format: "$dateT$time $metadata[$level] $message\n",
metadata: ~w(application fetcher request_id)a, metadata:
~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
block_number step count error_count shrunk)a,
metadata_filter: [application: :indexer] metadata_filter: [application: :indexer]
# Import environment specific config. This must remain at the bottom # Import environment specific config. This must remain at the bottom

@ -184,7 +184,8 @@ defmodule Indexer.Block.Catchup.BoundIntervalSupervisor do
end end
def handle_info( def handle_info(
{ref, %{first_block_number: first_block_number, missing_block_count: missing_block_count, shrunk: false}}, {ref,
%{first_block_number: first_block_number, missing_block_count: missing_block_count, shrunk: false = shrunk}},
%__MODULE__{ %__MODULE__{
bound_interval: bound_interval, bound_interval: bound_interval,
task: %Task{ref: ref} task: %Task{ref: ref}
@ -194,20 +195,23 @@ defmodule Indexer.Block.Catchup.BoundIntervalSupervisor do
new_bound_interval = new_bound_interval =
case missing_block_count do case missing_block_count do
0 -> 0 ->
Logger.info(fn -> ["Index already caught up in ", to_string(first_block_number), "-0."] end) Logger.info("Index already caught up.",
first_block_number: first_block_number,
last_block_number: 0,
missing_block_count: 0,
shrunk: shrunk
)
BoundInterval.increase(bound_interval) BoundInterval.increase(bound_interval)
_ -> _ ->
Logger.info(fn -> Logger.info(
[ "Index had to catch up.",
"Index had to catch up ", first_block_number: first_block_number,
to_string(missing_block_count), last_block_number: 0,
" blocks in ", missing_block_count: missing_block_count,
to_string(first_block_number), shrunk: shrunk
"-0." )
]
end)
BoundInterval.decrease(bound_interval) BoundInterval.decrease(bound_interval)
end end
@ -226,7 +230,8 @@ defmodule Indexer.Block.Catchup.BoundIntervalSupervisor do
end end
def handle_info( def handle_info(
{ref, %{first_block_number: first_block_number, missing_block_count: missing_block_count, shrunk: true}}, {ref,
%{first_block_number: first_block_number, missing_block_count: missing_block_count, shrunk: true = shrunk}},
%__MODULE__{ %__MODULE__{
task: %Task{ref: ref} task: %Task{ref: ref}
} = state } = state
@ -234,15 +239,13 @@ defmodule Indexer.Block.Catchup.BoundIntervalSupervisor do
when is_integer(missing_block_count) do when is_integer(missing_block_count) do
Process.demonitor(ref, [:flush]) Process.demonitor(ref, [:flush])
Logger.info(fn -> Logger.info(
[ "Index had to catch up, but the sequence was shrunk to save memory, so retrying immediately.",
"Index had to catch up ", first_block_number: first_block_number,
to_string(missing_block_count), last_block_number: 0,
" blocks in ", missing_block_count: missing_block_count,
to_string(first_block_number), shrunk: shrunk
"-0, but the sequence was shrunk to save memory, so retrying immediately." )
]
end)
send(self(), :catchup_index) send(self(), :catchup_index)

@ -71,6 +71,9 @@ defmodule Indexer.Block.Catchup.Fetcher do
# realtime indexer gets the current latest block # realtime indexer gets the current latest block
first = latest_block_number - 1 first = latest_block_number - 1
last = 0 last = 0
Logger.metadata(first_block_number: first, last_block_number: last)
missing_ranges = Chain.missing_block_number_ranges(first..last) missing_ranges = Chain.missing_block_number_ranges(first..last)
range_count = Enum.count(missing_ranges) range_count = Enum.count(missing_ranges)
@ -79,9 +82,10 @@ defmodule Indexer.Block.Catchup.Fetcher do
|> Stream.map(&Enum.count/1) |> Stream.map(&Enum.count/1)
|> Enum.sum() |> Enum.sum()
Logger.debug(fn -> Logger.debug(fn -> "Missed blocks in ranges." end,
"#{missing_block_count} missed blocks in #{range_count} ranges between #{first} and #{last}" missing_block_range_count: range_count,
end) missing_block_count: missing_block_count
)
shrunk = shrunk =
case missing_block_count do case missing_block_count do
@ -171,23 +175,25 @@ defmodule Indexer.Block.Catchup.Fetcher do
) )
defp fetch_and_import_range_from_sequence( defp fetch_and_import_range_from_sequence(
%__MODULE__{block_fetcher: %Block.Fetcher{} = block_fetcher}, %__MODULE__{block_fetcher: %Block.Fetcher{} = block_fetcher},
_.._ = range, first..last = range,
sequence sequence
) do ) do
Logger.metadata(fetcher: :block_catchup) Logger.metadata(fetcher: :block_catchup, first_block_number: first, last_block_number: last)
case fetch_and_import_range(block_fetcher, range) do case fetch_and_import_range(block_fetcher, range) do
{:ok, %{inserted: inserted, errors: errors}} -> {:ok, %{inserted: inserted, errors: errors}} ->
errors = cap_seq(sequence, errors, range) errors = cap_seq(sequence, errors)
retry(sequence, errors) retry(sequence, errors)
{:ok, inserted: inserted} {:ok, inserted: inserted}
{:error, {step, reason}} = error -> {:error, {step, reason}} = error ->
Logger.error(fn -> Logger.error(
first..last = range fn ->
"failed to fetch #{step} for blocks #{first} - #{last}: #{inspect(reason)}. Retrying block range." ["failed to fetch: ", inspect(reason), ". Retrying."]
end) end,
step: step
)
push_back(sequence, range) push_back(sequence, range)
@ -195,7 +201,7 @@ defmodule Indexer.Block.Catchup.Fetcher do
{:error, changesets} = error when is_list(changesets) -> {:error, changesets} = error when is_list(changesets) ->
Logger.error(fn -> Logger.error(fn ->
"failed to validate blocks #{inspect(range)}: #{inspect(changesets)}. Retrying" ["failed to validate: ", inspect(changesets), ". Retrying."]
end) end)
push_back(sequence, range) push_back(sequence, range)
@ -203,9 +209,12 @@ defmodule Indexer.Block.Catchup.Fetcher do
error error
{:error, {step, failed_value, _changes_so_far}} = error -> {:error, {step, failed_value, _changes_so_far}} = error ->
Logger.error(fn -> Logger.error(
"failed to insert blocks during #{step} #{inspect(range)}: #{inspect(failed_value)}. Retrying" fn ->
end) ["failed to insert: ", inspect(failed_value), ". Retrying."]
end,
step: step
)
push_back(sequence, range) push_back(sequence, range)
@ -213,7 +222,7 @@ defmodule Indexer.Block.Catchup.Fetcher do
end end
end end
defp cap_seq(seq, errors, range) do defp cap_seq(seq, errors) do
{not_founds, other_errors} = {not_founds, other_errors} =
Enum.split_with(errors, fn Enum.split_with(errors, fn
%{code: 404, data: %{number: _}} -> true %{code: 404, data: %{number: _}} -> true
@ -222,10 +231,7 @@ defmodule Indexer.Block.Catchup.Fetcher do
case not_founds do case not_founds do
[] -> [] ->
Logger.debug(fn -> Logger.debug("got blocks")
first_block_number..last_block_number = range
"got blocks #{first_block_number} - #{last_block_number}"
end)
other_errors other_errors
@ -239,7 +245,7 @@ defmodule Indexer.Block.Catchup.Fetcher do
defp push_back(sequence, range) do defp push_back(sequence, range) do
case Sequence.push_back(sequence, range) do case Sequence.push_back(sequence, range) do
:ok -> :ok :ok -> :ok
{:error, reason} -> Logger.error(fn -> ["Could not push block range to back to Sequence: ", inspect(reason)] end) {:error, reason} -> Logger.error(fn -> ["Could not push back to Sequence: ", inspect(reason)] end)
end end
end end

@ -13,7 +13,7 @@ defmodule Indexer.Block.Fetcher.Receipts do
%Block.Fetcher{json_rpc_named_arguments: json_rpc_named_arguments} = state, %Block.Fetcher{json_rpc_named_arguments: json_rpc_named_arguments} = state,
transaction_params transaction_params
) do ) do
Logger.debug(fn -> "fetching #{length(transaction_params)} transaction receipts" end) Logger.debug("fetching transaction receipts", count: Enum.count(transaction_params))
stream_opts = [max_concurrency: state.receipts_concurrency, timeout: :infinity] stream_opts = [max_concurrency: state.receipts_concurrency, timeout: :infinity]
transaction_params transaction_params

@ -159,15 +159,19 @@ defmodule Indexer.Block.Realtime.Fetcher do
@decorate trace(name: "fetch", resource: "Indexer.Block.Realtime.Fetcher.fetch_and_import_block/3", tracer: Tracer) @decorate trace(name: "fetch", resource: "Indexer.Block.Realtime.Fetcher.fetch_and_import_block/3", tracer: Tracer)
def fetch_and_import_block(block_number_to_fetch, block_fetcher, reorg?, retry \\ 3) do def fetch_and_import_block(block_number_to_fetch, block_fetcher, reorg?, retry \\ 3) do
Logger.metadata(fetcher: :block_realtime) Indexer.Logger.metadata(
fn ->
if reorg? do if reorg? do
# give previous fetch attempt (for same block number) a chance to finish # give previous fetch attempt (for same block number) a chance to finish
# before fetching again, to reduce block consensus mistakes # before fetching again, to reduce block consensus mistakes
:timer.sleep(@reorg_delay) :timer.sleep(@reorg_delay)
end end
do_fetch_and_import_block(block_number_to_fetch, block_fetcher, retry) do_fetch_and_import_block(block_number_to_fetch, block_fetcher, retry)
end,
fetcher: :block_realtime,
block_number: block_number_to_fetch
)
end end
@decorate span(tracer: Tracer) @decorate span(tracer: Tracer)
@ -179,33 +183,28 @@ defmodule Indexer.Block.Realtime.Fetcher do
Task.Supervisor.start_child(TaskSupervisor, ConsensusEnsurer, :perform, args) Task.Supervisor.start_child(TaskSupervisor, ConsensusEnsurer, :perform, args)
end end
Logger.debug(fn -> Logger.debug("Fetched and imported.")
["fetched and imported block ", to_string(block_number_to_fetch)]
end)
{:ok, %{inserted: _, errors: [_ | _] = errors}} -> {:ok, %{inserted: _, errors: [_ | _] = errors}} ->
Logger.error(fn -> Logger.error(fn ->
[ [
"failed to fetch block ", "failed to fetch block: ",
to_string(block_number_to_fetch),
": ",
inspect(errors), inspect(errors),
". Block will be retried by catchup indexer." ". Block will be retried by catchup indexer."
] ]
end) end)
{:error, {step, reason}} -> {:error, {step, reason}} ->
Logger.error(fn -> Logger.error(
[ fn ->
"failed to fetch ", [
to_string(step), "failed to fetch: ",
" for block ", inspect(reason),
to_string(block_number_to_fetch), ". Block will be retried by catchup indexer."
": ", ]
inspect(reason), end,
". Block will be retried by catchup indexer." step: step
] )
end)
{:error, [%Changeset{} | _] = changesets} -> {:error, [%Changeset{} | _] = changesets} ->
params = %{ params = %{
@ -228,17 +227,16 @@ defmodule Indexer.Block.Realtime.Fetcher do
end end
{:error, {step, failed_value, _changes_so_far}} -> {:error, {step, failed_value, _changes_so_far}} ->
Logger.error(fn -> Logger.error(
[ fn ->
"failed to insert ", [
to_string(step), "failed to insert: ",
" for block ", inspect(failed_value),
to_string(block_number_to_fetch), ". Block will be retried by catchup indexer."
": ", ]
inspect(failed_value), end,
". Block will be retried by catchup indexer." step: step
] )
end)
end end
end end

@ -74,16 +74,22 @@ defmodule Indexer.Block.Uncle.Fetcher do
# the same block could be included as an uncle on multiple blocks, but we only want to fetch it once # the same block could be included as an uncle on multiple blocks, but we only want to fetch it once
unique_hashes = Enum.uniq(hashes) unique_hashes = Enum.uniq(hashes)
Logger.debug(fn -> "fetching #{length(unique_hashes)}" end) unique_hash_count = Enum.count(unique_hashes)
Logger.metadata(count: unique_hash_count)
Logger.debug("fetching")
case EthereumJSONRPC.fetch_blocks_by_hash(unique_hashes, json_rpc_named_arguments) do case EthereumJSONRPC.fetch_blocks_by_hash(unique_hashes, json_rpc_named_arguments) do
{:ok, blocks} -> {:ok, blocks} ->
run_blocks(blocks, block_fetcher, unique_hashes) run_blocks(blocks, block_fetcher, unique_hashes)
{:error, reason} -> {:error, reason} ->
Logger.error(fn -> Logger.error(
["failed to fetch ", unique_hashes |> length |> to_string(), ": ", inspect(reason)] fn ->
end) ["failed to fetch: ", inspect(reason)]
end,
error_count: unique_hash_count
)
{:retry, unique_hashes} {:retry, unique_hashes}
end end
@ -110,19 +116,13 @@ defmodule Indexer.Block.Uncle.Fetcher do
transactions: %{params: transactions_params, on_conflict: :nothing} transactions: %{params: transactions_params, on_conflict: :nothing}
}) do }) do
{:ok, _} -> {:ok, _} ->
retry(errors, original_entries) retry(errors)
{:error, step, failed_value, _changes_so_far} -> {:error, step, failed_value, _changes_so_far} ->
Logger.error(fn -> Logger.error(fn -> ["failed to import: ", inspect(failed_value)] end,
[ step: step,
"failed to import ", error_count: Enum.count(original_entries)
original_entries |> length() |> to_string(), )
" in step ",
inspect(step),
": ",
inspect(failed_value)
]
end)
{:retry, original_entries} {:retry, original_entries}
end end
@ -185,21 +185,20 @@ defmodule Indexer.Block.Uncle.Fetcher do
end) end)
end end
defp retry([], _), do: :ok defp retry([]), do: :ok
defp retry(errors, original_entries) when is_list(errors) do defp retry(errors) when is_list(errors) do
retried_entries = errors_to_entries(errors) retried_entries = errors_to_entries(errors)
Logger.error(fn -> Logger.error(
[ fn ->
"failed to fetch ", [
retried_entries |> length() |> to_string(), "failed to fetch: ",
"/", errors_to_iodata(errors)
original_entries |> length() |> to_string(), ]
": ", end,
errors_to_iodata(errors) error_count: Enum.count(retried_entries)
] )
end)
end end
defp errors_to_entries(errors) when is_list(errors) do defp errors_to_entries(errors) when is_list(errors) do

@ -74,7 +74,10 @@ defmodule Indexer.CoinBalance.Fetcher do
# `{address, block}`, so take unique params only # `{address, block}`, so take unique params only
unique_entries = Enum.uniq(entries) unique_entries = Enum.uniq(entries)
Logger.debug(fn -> ["fetching ", unique_entries |> length() |> to_string()] end) unique_entry_count = Enum.count(unique_entries)
Logger.metadata(count: unique_entry_count)
Logger.debug(fn -> "fetching" end)
unique_entries unique_entries
|> Enum.map(&entry_to_params/1) |> Enum.map(&entry_to_params/1)
@ -84,9 +87,12 @@ defmodule Indexer.CoinBalance.Fetcher do
run_fetched_balances(fetched_balances, unique_entries) run_fetched_balances(fetched_balances, unique_entries)
{:error, reason} -> {:error, reason} ->
Logger.error(fn -> Logger.error(
["failed to fetch ", unique_entries |> length() |> to_string(), ": ", inspect(reason)] fn ->
end) ["failed to fetch: ", inspect(reason)]
end,
error_count: unique_entry_count
)
{:retry, unique_entries} {:retry, unique_entries}
end end
@ -115,7 +121,7 @@ defmodule Indexer.CoinBalance.Fetcher do
defp run_fetched_balances(%FetchedBalances{params_list: []}, original_entries), do: {:retry, original_entries} defp run_fetched_balances(%FetchedBalances{params_list: []}, original_entries), do: {:retry, original_entries}
defp run_fetched_balances(%FetchedBalances{params_list: params_list, errors: errors}, original_entries) do defp run_fetched_balances(%FetchedBalances{params_list: params_list, errors: errors}, _) do
value_fetched_at = DateTime.utc_now() value_fetched_at = DateTime.utc_now()
importable_balances_params = Enum.map(params_list, &Map.put(&1, :value_fetched_at, value_fetched_at)) importable_balances_params = Enum.map(params_list, &Map.put(&1, :value_fetched_at, value_fetched_at))
@ -128,24 +134,23 @@ defmodule Indexer.CoinBalance.Fetcher do
address_coin_balances: %{params: importable_balances_params} address_coin_balances: %{params: importable_balances_params}
}) })
retry(errors, original_entries) retry(errors)
end end
defp retry([], _), do: :ok defp retry([]), do: :ok
defp retry(errors, original_entries) when is_list(errors) do defp retry(errors) when is_list(errors) do
retried_entries = fetched_balances_errors_to_entries(errors) retried_entries = fetched_balances_errors_to_entries(errors)
Logger.error(fn -> Logger.error(
[ fn ->
"failed to fetch ", [
retried_entries |> length() |> to_string(), "failed to fetch: ",
"/", fetched_balance_errors_to_iodata(errors)
original_entries |> length() |> to_string(), ]
": ", end,
fetched_balance_errors_to_iodata(errors) error_count: Enum.count(retried_entries)
] )
end)
{:retry, retried_entries} {:retry, retried_entries}
end end

@ -103,7 +103,10 @@ defmodule Indexer.InternalTransaction.Fetcher do
def run(entries, json_rpc_named_arguments) do def run(entries, json_rpc_named_arguments) do
unique_entries = unique_entries(entries) unique_entries = unique_entries(entries)
Logger.debug(fn -> "fetching internal transactions for #{length(unique_entries)} transactions" end) unique_entries_count = Enum.count(unique_entries)
Logger.metadata(count: unique_entries_count)
Logger.debug("fetching internal transactions for transactions")
unique_entries unique_entries
|> Enum.map(&params/1) |> Enum.map(&params/1)
@ -128,25 +131,25 @@ defmodule Indexer.InternalTransaction.Fetcher do
}) })
else else
{:error, step, reason, _changes_so_far} -> {:error, step, reason, _changes_so_far} ->
Logger.error(fn -> Logger.error(
[ fn ->
"failed to import internal transactions for ", [
to_string(length(entries)), "failed to import internal transactions for transactions: ",
" transactions at ", inspect(reason)
to_string(step), ]
": ", end,
inspect(reason) step: step,
] error_count: unique_entries_count
end) )
# re-queue the de-duped entries # re-queue the de-duped entries
{:retry, unique_entries} {:retry, unique_entries}
end end
{:error, reason} -> {:error, reason} ->
Logger.error(fn -> Logger.error(fn -> ["failed to fetch internal transactions for transactions: ", inspect(reason)] end,
"failed to fetch internal transactions for #{length(entries)} transactions: #{inspect(reason)}" error_count: unique_entries_count
end) )
# re-queue the de-duped entries # re-queue the de-duped entries
{:retry, unique_entries} {:retry, unique_entries}

@ -1,8 +1,22 @@
defmodule Indexer.Logger do defmodule Indexer.Logger do
@moduledoc """ @moduledoc """
Helpers for formatting `Logger` data as `t:iodata/0`. Helpers for `Logger`.
""" """
@doc """
Sets `keyword` in `Logger.metadata/1` around `fun`.
"""
def metadata(fun, keyword) when is_function(fun, 0) and is_list(keyword) do
metadata_before = Logger.metadata()
try do
Logger.metadata(keyword)
fun.()
after
Logger.reset_metadata(metadata_before)
end
end
@doc """ @doc """
The PID and its registered name (if it has one) as `t:iodata/0`. The PID and its registered name (if it has one) as `t:iodata/0`.
""" """

@ -93,10 +93,11 @@ defmodule Indexer.TokenBalance.Fetcher do
end end
def fetch_from_blockchain(params_list) do def fetch_from_blockchain(params_list) do
{:ok, token_balances} = retryable_params_list = Enum.filter(params_list, &(&1.retries_count <= @max_retries))
params_list
|> Enum.filter(&(&1.retries_count <= @max_retries)) Logger.metadata(count: Enum.count(retryable_params_list))
|> TokenBalances.fetch_token_balances_from_blockchain()
{:ok, token_balances} = TokenBalances.fetch_token_balances_from_blockchain(retryable_params_list)
token_balances token_balances
end end
@ -116,7 +117,9 @@ defmodule Indexer.TokenBalance.Fetcher do
:ok :ok
{:error, reason} -> {:error, reason} ->
Logger.debug(fn -> "failed to import #{length(token_balances_params)} token balances, #{inspect(reason)}" end) Logger.debug(fn -> ["failed to import token balances: ", inspect(reason)] end,
error_count: Enum.count(token_balances_params)
)
:error :error
end end

@ -34,7 +34,7 @@ defmodule Indexer.TokenBalances do
@decorate span(tracer: Tracer) @decorate span(tracer: Tracer)
def fetch_token_balances_from_blockchain(token_balances, opts \\ []) do def fetch_token_balances_from_blockchain(token_balances, opts \\ []) do
Logger.debug(fn -> "fetching #{Enum.count(token_balances)} token balances" end) Logger.debug("fetching token balances", count: Enum.count(token_balances))
task_timeout = Keyword.get(opts, :timeout, @task_timeout) task_timeout = Keyword.get(opts, :timeout, @task_timeout)

@ -32,19 +32,25 @@ config :logger,
config :logger, :console, config :logger, :console,
# Use same format for all loggers, even though the level should only ever be `:error` for `:error` backend # Use same format for all loggers, even though the level should only ever be `:error` for `:error` backend
format: "$dateT$time $metadata[$level] $message\n", format: "$dateT$time $metadata[$level] $message\n",
metadata: ~w(application fetcher request_id)a metadata:
~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
block_number step count error_count shrunk)a
config :logger, :ecto, config :logger, :ecto,
# Use same format for all loggers, even though the level should only ever be `:error` for `:error` backend # Use same format for all loggers, even though the level should only ever be `:error` for `:error` backend
format: "$dateT$time $metadata[$level] $message\n", format: "$dateT$time $metadata[$level] $message\n",
metadata: ~w(application fetcher request_id)a, metadata:
~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
block_number step count error_count shrunk)a,
metadata_filter: [application: :ecto] metadata_filter: [application: :ecto]
config :logger, :error, config :logger, :error,
# Use same format for all loggers, even though the level should only ever be `:error` for `:error` backend # Use same format for all loggers, even though the level should only ever be `:error` for `:error` backend
format: "$dateT$time $metadata[$level] $message\n", format: "$dateT$time $metadata[$level] $message\n",
level: :error, level: :error,
metadata: ~w(application fetcher request_id)a metadata:
~w(application fetcher request_id first_block_number last_block_number missing_block_range_count missing_block_count
block_number step count error_count shrunk)a
# Import environment specific config. This must remain at the bottom # Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above. # of this file so it overrides the configuration defined above.

Loading…
Cancel
Save