Dedupe and log duplicate transaction hashes before fetching internal

Fixes #289

I can't reliably reproduce #289 using the transaction hash or the block
number alone, so a fix for the proximate cause of duplicate
`transaction_hashes` being passed to `InternalTransactionFetcher.run/2`
by passing only the unique transaction hashes to
`EthereumJSONRPC.fetch_internal_transactions` while logging the
duplicates as errors.
pull/308/head
Luke Imhoff 7 years ago
parent 01b5fb2120
commit 45529f7346
  1. 55
      apps/explorer/lib/explorer/indexer/internal_transaction_fetcher.ex
  2. 49
      apps/explorer/test/explorer/indexer/internal_transaction_fetcher_test.exs
  3. 2
      coveralls.json

@ -5,6 +5,8 @@ defmodule Explorer.Indexer.InternalTransactionFetcher do
See `async_fetch/1` for details on configuring limits.
"""
require Logger
alias Explorer.{BufferedTask, Chain, Indexer}
alias Explorer.Indexer.{AddressBalanceFetcher, AddressExtraction}
alias Explorer.Chain.{Block, Hash}
@ -72,9 +74,11 @@ defmodule Explorer.Indexer.InternalTransactionFetcher do
@impl BufferedTask
def run(transactions_params, _retries) do
Indexer.debug(fn -> "fetching internal transactions for #{length(transactions_params)} transactions" end)
unique_transactions_params = unique_transactions_params(transactions_params)
Indexer.debug(fn -> "fetching internal transactions for #{length(unique_transactions_params)} transactions" end)
case EthereumJSONRPC.fetch_internal_transactions(transactions_params) do
case EthereumJSONRPC.fetch_internal_transactions(unique_transactions_params) do
{:ok, internal_transactions_params} ->
addresses_params = AddressExtraction.extract_addresses(%{internal_transactions: internal_transactions_params})
@ -83,7 +87,7 @@ defmodule Explorer.Indexer.InternalTransactionFetcher do
{hash, block_number}
end)
transaction_hashes = Enum.map(transactions_params, &Map.fetch!(&1, :hash_data))
transaction_hashes = Enum.map(unique_transactions_params, &Map.fetch!(&1, :hash_data))
with {:ok, %{addresses: address_hashes}} <-
Chain.import_internal_transactions(
@ -110,7 +114,8 @@ defmodule Explorer.Indexer.InternalTransactionFetcher do
]
end)
:retry
# re-queue the de-duped transactions_params
{:retry, unique_transactions_params}
end
{:error, reason} ->
@ -118,7 +123,47 @@ defmodule Explorer.Indexer.InternalTransactionFetcher do
"failed to fetch internal transactions for #{length(transactions_params)} transactions: #{inspect(reason)}"
end)
:retry
# re-queue the de-duped transactions_params
{:retry, unique_transactions_params}
end
end
# Protection and improved reporting for https://github.com/poanetwork/poa-explorer/issues/289
defp unique_transactions_params(transactions_params) do
transaactions_params_by_hash_data = Enum.group_by(transactions_params, fn %{hash_data: hash_data} -> hash_data end)
if map_size(transaactions_params_by_hash_data) < length(transactions_params) do
{unique_transactions_params, duplicate_transactions_params} =
transaactions_params_by_hash_data
|> Map.values()
|> uniques_and_duplicates()
Logger.error(fn ->
duplicate_transactions_params
|> Stream.with_index()
|> Enum.reduce(
["Duplicate transaction params being used to fetch internal transactions:\n"],
fn {transaction_params, index}, acc ->
[acc, " ", to_string(index + 1), ". ", inspect(transaction_params), "\n"]
end
)
end)
unique_transactions_params
else
transactions_params
end
end
defp uniques_and_duplicates(groups) do
Enum.reduce(groups, {[], []}, fn group, {acc_uniques, acc_duplicates} ->
case group do
[unique] ->
{[unique | acc_uniques], acc_duplicates}
[unique | _] = duplicates ->
{[unique | acc_uniques], duplicates ++ acc_duplicates}
end
end)
end
end

@ -1,9 +1,13 @@
defmodule Explorer.Indexer.InternalTransactionFetcherTest do
use Explorer.DataCase, async: false
import ExUnit.CaptureLog
alias Explorer.Chain.Transaction
alias Explorer.Indexer.{AddressBalanceFetcherCase, InternalTransactionFetcher, PendingTransactionFetcher}
@moduletag :capture_log
test "does not try to fetch pending transactions from Explorer.Indexer.PendingTransactionFetcher" do
start_supervised!({Task.Supervisor, name: Explorer.Indexer.TaskSupervisor})
AddressBalanceFetcherCase.start_supervised!()
@ -50,4 +54,49 @@ defmodule Explorer.Indexer.InternalTransactionFetcherTest do
assert InternalTransactionFetcher.init([], fn hash_string, acc -> [hash_string | acc] end) == []
end
end
describe "run/2" do
test "duplicate transaction hashes are logged" do
start_supervised!({Task.Supervisor, name: Explorer.Indexer.TaskSupervisor})
AddressBalanceFetcherCase.start_supervised!()
insert(:transaction, hash: "0x03cd5899a63b6f6222afda8705d059fd5a7d126bcabe962fb654d9736e6bcafa")
log =
capture_log(fn ->
InternalTransactionFetcher.run(
[
%{block_number: 1, hash_data: "0x03cd5899a63b6f6222afda8705d059fd5a7d126bcabe962fb654d9736e6bcafa"},
%{block_number: 1, hash_data: "0x03cd5899a63b6f6222afda8705d059fd5a7d126bcabe962fb654d9736e6bcafa"}
],
0
)
end)
assert log =~
"""
Duplicate transaction params being used to fetch internal transactions:
1. %{block_number: 1, hash_data: \"0x03cd5899a63b6f6222afda8705d059fd5a7d126bcabe962fb654d9736e6bcafa\"}
2. %{block_number: 1, hash_data: \"0x03cd5899a63b6f6222afda8705d059fd5a7d126bcabe962fb654d9736e6bcafa\"}
"""
end
test "duplicate transaction hashes only retry uniques" do
start_supervised!({Task.Supervisor, name: Explorer.Indexer.TaskSupervisor})
AddressBalanceFetcherCase.start_supervised!()
# not a real transaction hash, so that it fails
insert(:transaction, hash: "0x0000000000000000000000000000000000000000000000000000000000000001")
assert InternalTransactionFetcher.run(
[
%{block_number: 1, hash_data: "0x0000000000000000000000000000000000000000000000000000000000000001"},
%{block_number: 1, hash_data: "0x0000000000000000000000000000000000000000000000000000000000000001"}
],
0
) ==
{:retry,
[%{block_number: 1, hash_data: "0x0000000000000000000000000000000000000000000000000000000000000001"}]}
end
end
end

@ -1,7 +1,7 @@
{
"coverage_options": {
"treat_no_relevant_lines_as_covered": true,
"minimum_coverage": 93.0
"minimum_coverage": 93.1
},
"terminal_options": {
"file_column_width": 120

Loading…
Cancel
Save