Missed token transfer cataloger (#807)

* Rename token transfer params parser

* Rename token transfer parsing function

* Requeue blocks with incomplete token transfer indexing on start up

* Add additional test for worker

* Add missed token transfers to the front of the catchup queue

* Address code review concerns

* Remove unused alias

* Add process names for uncataloged token transfer processes

* Improved handling of when sequence isn't avaialbe to queue

* Move Uncataloged.Supervisor name out of init and into start_link call

Name is not a valid option to init as it is a GenServer.option, which
means it is passed to the 3rd argument to Supervisor.start_link.
pull/849/head
Alex Garibay 6 years ago committed by GitHub
parent b53d846e68
commit d7e38a223e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 26
      apps/explorer/lib/explorer/chain.ex
  2. 8
      apps/explorer/test/explorer/chain_test.exs
  3. 1
      apps/explorer/test/support/factory.ex
  4. 6
      apps/indexer/lib/indexer/application.ex
  5. 21
      apps/indexer/lib/indexer/block/catchup/fetcher.ex
  6. 2
      apps/indexer/lib/indexer/block/fetcher.ex
  7. 79
      apps/indexer/lib/indexer/sequence.ex
  8. 126
      apps/indexer/lib/indexer/token_transfer/parser.ex
  9. 44
      apps/indexer/lib/indexer/token_transfer/uncataloged/supervisor.ex
  10. 90
      apps/indexer/lib/indexer/token_transfer/uncataloged/worker.ex
  11. 123
      apps/indexer/lib/indexer/token_transfers.ex
  12. 39
      apps/indexer/test/indexer/sequence_test.exs
  13. 14
      apps/indexer/test/indexer/token_transfer/parser_test.exs
  14. 76
      apps/indexer/test/indexer/token_transfer/uncataloged/worker_test.exs

@ -1814,6 +1814,32 @@ defmodule Explorer.Chain do
)
end
@doc """
Returns a list of block numbers token transfer `t:Log.t/0`s that don't have an
associated `t:TokenTransfer.t/0` record.
"""
def uncataloged_token_transfer_block_numbers do
query =
from(l in Log,
join: t in assoc(l, :transaction),
left_join: tf in TokenTransfer,
on: tf.transaction_hash == l.transaction_hash and tf.log_index == l.index,
where: l.first_topic == unquote(TokenTransfer.constant()),
where: is_nil(tf.id),
select: t.block_number,
distinct: t.block_number
)
Repo.transaction(
fn ->
query
|> Repo.stream(timeout: :infinity)
|> Enum.reduce([], &[&1 | &2])
end,
timeout: :infinity
)
end
@doc """
Fetches a `t:Token.t/0` by an address hash.
"""

@ -2935,4 +2935,12 @@ defmodule Explorer.ChainTest do
assert unique_tokens_ids_paginated == [second_page.token_id]
end
end
describe "uncataloged_token_transfer_block_numbers/0" do
test "returns a list of block numbers" do
log = insert(:token_transfer_log)
block_number = log.transaction.block_number
assert {:ok, [^block_number]} = Chain.uncataloged_token_transfer_block_numbers()
end
end
end

@ -328,6 +328,7 @@ defmodule Explorer.Factory do
second_topic: zero_padded_address_hash_string(from_address.hash),
third_topic: zero_padded_address_hash_string(to_address.hash),
address_hash: token_contract_address.hash,
address: nil,
data: "0x0000000000000000000000000000000000000000000000000de0b6b3a7640000",
transaction: transaction
}

@ -11,7 +11,8 @@ defmodule Indexer.Application do
InternalTransaction,
PendingTransaction,
Token,
TokenBalance
TokenBalance,
TokenTransfer
}
@impl Application
@ -36,7 +37,8 @@ defmodule Indexer.Application do
{Token.Supervisor, [[json_rpc_named_arguments: json_rpc_named_arguments], [name: Token.Supervisor]]},
{TokenBalance.Supervisor,
[[json_rpc_named_arguments: json_rpc_named_arguments], [name: TokenBalance.Supervisor]]},
{Block.Supervisor, [block_fetcher_supervisor_named_arguments, [name: Block.Supervisor]]}
{Block.Supervisor, [block_fetcher_supervisor_named_arguments, [name: Block.Supervisor]]},
{TokenTransfer.Uncataloged.Supervisor, [[], [name: TokenTransfer.Uncataloged.Supervisor]]}
]
opts = [strategy: :one_for_one, name: Indexer.Supervisor]

@ -25,6 +25,7 @@ defmodule Indexer.Block.Catchup.Fetcher do
@blocks_batch_size 10
@blocks_concurrency 10
@sequence_name :block_catchup_sequencer
defstruct blocks_batch_size: @blocks_batch_size,
blocks_concurrency: @blocks_concurrency,
@ -88,7 +89,9 @@ defmodule Indexer.Block.Catchup.Fetcher do
:ok
_ ->
{:ok, sequence} = Sequence.start_link(ranges: missing_ranges, step: -1 * blocks_batch_size)
sequence_opts = [ranges: missing_ranges, step: -1 * blocks_batch_size]
gen_server_opts = [name: @sequence_name]
{:ok, sequence} = Sequence.start_link(sequence_opts, gen_server_opts)
Sequence.cap(sequence)
stream_fetch_and_import(state, sequence)
@ -223,4 +226,20 @@ defmodule Indexer.Block.Catchup.Fetcher do
:ok
end
@doc """
Puts a list of block numbers to the front of the sequencing queue.
"""
@spec enqueue([non_neg_integer()]) :: :ok | {:error, :queue_unavailable}
def enqueue(block_numbers) do
if Process.whereis(@sequence_name) do
for block_number <- block_numbers do
Sequence.queue_front(@sequence_name, block_number..block_number)
end
:ok
else
{:error, :queue_unavailable}
end
end
end

@ -100,7 +100,7 @@ defmodule Indexer.Block.Fetcher do
{:receipts, {:ok, receipt_params}} <- {:receipts, Receipts.fetch(state, transactions_without_receipts)},
%{logs: logs, receipts: receipts} = receipt_params,
transactions_with_receipts = Receipts.put(transactions_without_receipts, receipts),
%{token_transfers: token_transfers, tokens: tokens} = TokenTransfers.from_log_params(logs),
%{token_transfers: token_transfers, tokens: tokens} = TokenTransfers.parse(logs),
addresses =
AddressExtraction.extract_addresses(%{
blocks: blocks,

@ -38,12 +38,28 @@ defmodule Indexer.Sequence do
@type options :: [ranges_option | first_option | step_named_argument]
@typep edge :: :front | :back
@typep t :: %__MODULE__{
queue: :queue.queue(Range.t()),
current: nil | integer(),
step: step()
}
def child_spec([init_arguments]) do
child_spec([init_arguments, []])
end
def child_spec([_init_arguments, _gen_server_options] = start_link_arguments) do
spec = %{
id: __MODULE__,
start: {__MODULE__, :start_link, start_link_arguments},
type: :worker
}
Supervisor.child_spec(spec, [])
end
@doc """
Starts a process for managing a block sequence.
@ -56,16 +72,16 @@ defmodule Indexer.Sequence do
Indexer.Sequence.start_link(ranges: [100..0])
"""
@spec start_link(options) :: GenServer.on_start()
def start_link(options) when is_list(options) do
GenServer.start_link(__MODULE__, options)
@spec start_link(options(), Keyword.t()) :: GenServer.on_start()
def start_link(init_options, gen_server_options \\ []) when is_list(init_options) and is_list(gen_server_options) do
GenServer.start_link(__MODULE__, init_options, gen_server_options)
end
@doc """
Builds an enumerable stream using a sequencer agent.
"""
@spec build_stream(pid()) :: Enumerable.t()
def build_stream(sequencer) when is_pid(sequencer) do
@spec build_stream(GenServer.server()) :: Enumerable.t()
def build_stream(sequencer) do
Stream.resource(
fn -> sequencer end,
fn seq ->
@ -81,24 +97,32 @@ defmodule Indexer.Sequence do
@doc """
Changes the mode for the sequence to finite.
"""
@spec cap(pid()) :: mode
def cap(sequence) when is_pid(sequence) do
@spec cap(GenServer.server()) :: mode
def cap(sequence) do
GenServer.call(sequence, :cap)
end
@doc """
Adds a range of block numbers to the end of sequence.
Adds a range of block numbers to the end of the sequence.
"""
@spec queue(pid(), Range.t()) :: :ok
def queue(sequence, _first.._last = range) when is_pid(sequence) do
@spec queue(GenServer.server(), Range.t()) :: :ok | {:error, String.t()}
def queue(sequence, _first.._last = range) do
GenServer.call(sequence, {:queue, range})
end
@doc """
Adds a range of block numbers to the front of the sequence.
"""
@spec queue_front(GenServer.server(), Range.t()) :: {:ok, {:error, String.t()}}
def queue_front(sequence, _first.._last = range) do
GenServer.call(sequence, {:queue_front, range})
end
@doc """
Pops the next block range from the sequence.
"""
@spec pop(pid()) :: Range.t() | :halt
def pop(sequence) when is_pid(sequence) do
@spec pop(GenServer.server()) :: Range.t() | :halt
def pop(sequence) do
GenServer.call(sequence, :pop)
end
@ -143,6 +167,17 @@ defmodule Indexer.Sequence do
end
end
@spec handle_call({:queue_front, Range.t()}, GenServer.from(), t()) :: {:reply, :ok | {:error, String.t()}, t()}
def handle_call({:queue_front, _first.._last = range}, _from, %__MODULE__{queue: queue, step: step} = state) do
case queue_chunked_range(queue, step, range, :front) do
{:ok, updated_queue} ->
{:reply, :ok, %__MODULE__{state | queue: updated_queue}}
{:error, _} = error ->
{:reply, error, state}
end
end
@spec handle_call(:pop, GenServer.from(), t()) :: {:reply, Range.t() | :halt, t()}
def handle_call(:pop, _from, %__MODULE__{queue: queue, current: current, step: step} = state) do
{reply, new_state} =
@ -164,18 +199,26 @@ defmodule Indexer.Sequence do
{:reply, reply, new_state}
end
@spec queue_chunked_range(:queue.queue(Range.t()), step, Range.t()) ::
@spec queue_chunked_range(:queue.queue(Range.t()), step, Range.t(), edge()) ::
{:ok, :queue.queue(Range.t())} | {:error, reason :: String.t()}
defp queue_chunked_range(queue, step, _.._ = range) when is_integer(step) do
with {:error, [reason]} <- queue_chunked_ranges(queue, step, [range]) do
defp queue_chunked_range(queue, step, _.._ = range, edge \\ :back)
when is_integer(step) and edge in [:back, :front] do
with {:error, [reason]} <- queue_chunked_ranges(queue, step, [range], edge) do
{:error, reason}
end
end
@spec queue_chunked_range(:queue.queue(Range.t()), step, [Range.t()]) ::
@spec queue_chunked_range(:queue.queue(Range.t()), step, [Range.t()], edge()) ::
{:ok, :queue.queue(Range.t())} | {:error, reasons :: [String.t()]}
defp queue_chunked_ranges(queue, step, ranges) when is_integer(step) and is_list(ranges) do
reduce_chunked_ranges(ranges, step, queue, &:queue.in/2)
defp queue_chunked_ranges(queue, step, ranges, edge \\ :back)
when is_integer(step) and is_list(ranges) and edge in [:back, :front] do
reducer =
case edge do
:back -> &:queue.in/2
:front -> &:queue.in_r/2
end
reduce_chunked_ranges(ranges, step, queue, reducer)
end
defp reduce_chunked_ranges(ranges, step, initial, reducer)

@ -0,0 +1,126 @@
defmodule Indexer.TokenTransfer.Parser do
@moduledoc """
Helper functions for transforming data for ERC-20 and ERC-721 token transfers.
"""
require Logger
alias ABI.TypeDecoder
alias Explorer.Chain.TokenTransfer
@doc """
Returns a list of token transfers given a list of logs.
"""
def parse(logs) do
initial_acc = %{tokens: [], token_transfers: []}
logs
|> Enum.filter(&(&1.first_topic == unquote(TokenTransfer.constant())))
|> Enum.reduce(initial_acc, &do_parse/2)
end
defp do_parse(log, %{tokens: tokens, token_transfers: token_transfers} = acc) do
{token, token_transfer} = parse_params(log)
%{
tokens: [token | tokens],
token_transfers: [token_transfer | token_transfers]
}
rescue
_ in [FunctionClauseError, MatchError] ->
Logger.error(fn -> "Unknown token transfer format: #{inspect(log)}" end)
acc
end
# ERC-20 token transfer
defp parse_params(%{second_topic: second_topic, third_topic: third_topic, fourth_topic: nil} = log)
when not is_nil(second_topic) and not is_nil(third_topic) do
[amount] = decode_data(log.data, [{:uint, 256}])
token_transfer = %{
amount: Decimal.new(amount || 0),
block_number: log.block_number,
log_index: log.index,
from_address_hash: truncate_address_hash(log.second_topic),
to_address_hash: truncate_address_hash(log.third_topic),
token_contract_address_hash: log.address_hash,
transaction_hash: log.transaction_hash,
token_type: "ERC-20"
}
token = %{
contract_address_hash: log.address_hash,
type: "ERC-20"
}
{token, token_transfer}
end
# ERC-721 token transfer with topics as addresses
defp parse_params(%{second_topic: second_topic, third_topic: third_topic, fourth_topic: fourth_topic} = log)
when not is_nil(second_topic) and not is_nil(third_topic) and not is_nil(fourth_topic) do
[token_id] = decode_data(fourth_topic, [{:uint, 256}])
token_transfer = %{
block_number: log.block_number,
log_index: log.index,
from_address_hash: truncate_address_hash(log.second_topic),
to_address_hash: truncate_address_hash(log.third_topic),
token_contract_address_hash: log.address_hash,
token_id: token_id || 0,
transaction_hash: log.transaction_hash,
token_type: "ERC-721"
}
token = %{
contract_address_hash: log.address_hash,
type: "ERC-721"
}
{token, token_transfer}
end
# ERC-721 token transfer with info in data field instead of in log topics
defp parse_params(%{second_topic: nil, third_topic: nil, fourth_topic: nil, data: data} = log)
when not is_nil(data) do
[from_address_hash, to_address_hash, token_id] = decode_data(data, [:address, :address, {:uint, 256}])
token_transfer = %{
block_number: log.block_number,
log_index: log.index,
from_address_hash: encode_address_hash(from_address_hash),
to_address_hash: encode_address_hash(to_address_hash),
token_contract_address_hash: log.address_hash,
token_id: token_id,
transaction_hash: log.transaction_hash,
token_type: "ERC-721"
}
token = %{
contract_address_hash: log.address_hash,
type: "ERC-721"
}
{token, token_transfer}
end
defp truncate_address_hash(nil), do: "0x0000000000000000000000000000000000000000"
defp truncate_address_hash("0x000000000000000000000000" <> truncated_hash) do
"0x#{truncated_hash}"
end
defp encode_address_hash(binary) do
"0x" <> Base.encode16(binary, case: :lower)
end
defp decode_data("0x", types) do
for _ <- types, do: nil
end
defp decode_data("0x" <> encoded_data, types) do
encoded_data
|> Base.decode16!(case: :mixed)
|> TypeDecoder.decode_raw(types)
end
end

@ -0,0 +1,44 @@
defmodule Indexer.TokenTransfer.Uncataloged.Supervisor do
@moduledoc """
Supervises process for ensuring uncataloged token transfers get queued for indexing.
"""
use Supervisor
alias Indexer.TokenTransfer.Uncataloged.Worker
def child_spec([]) do
child_spec([[]])
end
def child_spec([init_arguments]) do
child_spec([init_arguments, [name: __MODULE__]])
end
def child_spec([_init_arguments, _gen_server_options] = start_link_arguments) do
spec = %{
id: __MODULE__,
start: {__MODULE__, :start_link, start_link_arguments},
restart: :transient,
type: :supervisor
}
Supervisor.child_spec(spec, [])
end
def start_link(init_arguments, gen_server_options \\ []) do
Supervisor.start_link(__MODULE__, init_arguments, gen_server_options)
end
@impl Supervisor
def init(_) do
children = [
{Worker, [[supervisor: self()], [name: Worker]]},
{Task.Supervisor, name: Indexer.TokenTransfer.Uncataloged.TaskSupervisor}
]
opts = [strategy: :one_for_all]
Supervisor.init(children, opts)
end
end

@ -0,0 +1,90 @@
defmodule Indexer.TokenTransfer.Uncataloged.Worker do
@moduledoc """
Catalogs token tranfer logs missing an accompanying token transfer record.
Missed token transfers happen due to formats that aren't supported at the time
they were parsed during main indexing. Updated the parser and rebooting will allow
this process to properly catalog those missed token transfers.
"""
use GenServer
alias Explorer.Chain
alias Indexer.Block.Catchup.Fetcher
alias Indexer.TokenTransfer.Uncataloged
def child_spec([init_arguments]) do
child_spec([init_arguments, []])
end
def child_spec([_init_arguments, _gen_server_options] = start_link_arguments) do
spec = %{
id: __MODULE__,
start: {__MODULE__, :start_link, start_link_arguments},
restart: :transient,
type: :worker
}
Supervisor.child_spec(spec, [])
end
def start_link(init_arguments, gen_server_options \\ []) do
GenServer.start_link(__MODULE__, init_arguments, gen_server_options)
end
def init(opts) do
sup_pid = Keyword.fetch!(opts, :supervisor)
retry_interval = Keyword.get(opts, :retry_interval, 10_000)
send(self(), :scan)
state = %{
block_numbers: [],
retry_interval: retry_interval,
sup_pid: sup_pid,
task_ref: nil
}
{:ok, state}
end
def handle_info(:scan, state) do
{:ok, block_numbers} = Chain.uncataloged_token_transfer_block_numbers()
case block_numbers do
[] ->
Supervisor.stop(state.sup_pid, :normal)
{:noreply, state}
block_numbers ->
Process.send_after(self(), :enqueue_blocks, state.retry_interval)
{:noreply, %{state | block_numbers: block_numbers}}
end
end
def handle_info(:enqueue_blocks, %{block_numbers: block_numbers} = state) do
%Task{ref: ref} = async_enqueue(block_numbers)
{:noreply, %{state | task_ref: ref}}
end
def handle_info({ref, :ok}, %{task_ref: ref, sup_pid: sup_pid}) do
Process.demonitor(ref, [:flush])
Supervisor.stop(sup_pid, :normal)
{:stop, :shutdown}
end
def handle_info({ref, {:error, :queue_unavailable}}, %{task_ref: ref, retry_interval: millis} = state) do
Process.demonitor(ref, [:flush])
Process.send_after(self(), :enqueue_blocks, millis)
{:noreply, %{state | task_ref: nil}}
end
def handle_info({:DOWN, ref, :process, _, _}, %{task_ref: ref, retry_interval: millis} = state) do
Process.send_after(self(), :enqueue_blocks, millis)
{:noreply, %{state | task_ref: nil}}
end
defp async_enqueue(block_numbers) do
Task.Supervisor.async_nolink(Uncataloged.TaskSupervisor, Fetcher, :enqueue, [block_numbers])
end
end

@ -1,126 +1,9 @@
defmodule Indexer.TokenTransfers do
@moduledoc """
Helper functions for transforming data for ERC-20 and ERC-721 token transfers.
Context for working with token transfers.
"""
require Logger
alias Indexer.TokenTransfer.Parser
alias ABI.TypeDecoder
alias Explorer.Chain.TokenTransfer
@doc """
Returns a list of token transfers given a list of logs.
"""
def from_log_params(logs) do
initial_acc = %{tokens: [], token_transfers: []}
logs
|> Enum.filter(&(&1.first_topic == unquote(TokenTransfer.constant())))
|> Enum.reduce(initial_acc, &do_from_log_params/2)
end
defp do_from_log_params(log, %{tokens: tokens, token_transfers: token_transfers} = acc) do
{token, token_transfer} = parse_params(log)
%{
tokens: [token | tokens],
token_transfers: [token_transfer | token_transfers]
}
rescue
_ in [FunctionClauseError, MatchError] ->
Logger.error(fn -> "Unknown token transfer format: #{inspect(log)}" end)
acc
end
# ERC-20 token transfer
defp parse_params(%{second_topic: second_topic, third_topic: third_topic, fourth_topic: nil} = log)
when not is_nil(second_topic) and not is_nil(third_topic) do
[amount] = decode_data(log.data, [{:uint, 256}])
token_transfer = %{
amount: Decimal.new(amount || 0),
block_number: log.block_number,
log_index: log.index,
from_address_hash: truncate_address_hash(log.second_topic),
to_address_hash: truncate_address_hash(log.third_topic),
token_contract_address_hash: log.address_hash,
transaction_hash: log.transaction_hash,
token_type: "ERC-20"
}
token = %{
contract_address_hash: log.address_hash,
type: "ERC-20"
}
{token, token_transfer}
end
# ERC-721 token transfer with topics as addresses
defp parse_params(%{second_topic: second_topic, third_topic: third_topic, fourth_topic: fourth_topic} = log)
when not is_nil(second_topic) and not is_nil(third_topic) and not is_nil(fourth_topic) do
[token_id] = decode_data(fourth_topic, [{:uint, 256}])
token_transfer = %{
block_number: log.block_number,
log_index: log.index,
from_address_hash: truncate_address_hash(log.second_topic),
to_address_hash: truncate_address_hash(log.third_topic),
token_contract_address_hash: log.address_hash,
token_id: token_id || 0,
transaction_hash: log.transaction_hash,
token_type: "ERC-721"
}
token = %{
contract_address_hash: log.address_hash,
type: "ERC-721"
}
{token, token_transfer}
end
# ERC-721 token transfer with info in data field instead of in log topics
defp parse_params(%{second_topic: nil, third_topic: nil, fourth_topic: nil, data: data} = log)
when not is_nil(data) do
[from_address_hash, to_address_hash, token_id] = decode_data(data, [:address, :address, {:uint, 256}])
token_transfer = %{
block_number: log.block_number,
log_index: log.index,
from_address_hash: encode_address_hash(from_address_hash),
to_address_hash: encode_address_hash(to_address_hash),
token_contract_address_hash: log.address_hash,
token_id: token_id,
transaction_hash: log.transaction_hash,
token_type: "ERC-721"
}
token = %{
contract_address_hash: log.address_hash,
type: "ERC-721"
}
{token, token_transfer}
end
defp truncate_address_hash(nil), do: "0x0000000000000000000000000000000000000000"
defp truncate_address_hash("0x000000000000000000000000" <> truncated_hash) do
"0x#{truncated_hash}"
end
defp encode_address_hash(binary) do
"0x" <> Base.encode16(binary, case: :lower)
end
defp decode_data("0x", types) do
for _ <- types, do: nil
end
defp decode_data("0x" <> encoded_data, types) do
encoded_data
|> Base.decode16!(case: :mixed)
|> TypeDecoder.decode_raw(types)
end
defdelegate parse(items), to: Parser
end

@ -116,6 +116,45 @@ defmodule Indexer.SequenceTest do
end
end
describe "queue_front/2" do
test "with finite mode range is chunked" do
{:ok, pid} = Sequence.start_link(ranges: [1..0], step: -1)
assert Sequence.pop(pid) == 1..1
assert Sequence.pop(pid) == 0..0
assert Sequence.queue_front(pid, 1..0) == :ok
assert Sequence.pop(pid) == 0..0
assert Sequence.pop(pid) == 1..1
assert Sequence.pop(pid) == :halt
assert Sequence.pop(pid) == :halt
end
test "with finite mode with range in wrong direction returns error" do
{:ok, ascending} = Sequence.start_link(first: 0, step: 1)
assert Sequence.queue_front(ascending, 1..0) == {:error, "Range (1..0) direction is opposite step (1) direction"}
{:ok, descending} = Sequence.start_link(ranges: [1..0], step: -1)
assert Sequence.queue_front(descending, 0..1) ==
{:error, "Range (0..1) direction is opposite step (-1) direction"}
end
test "with infinite mode range is chunked and is returned prior to calculated ranges" do
{:ok, pid} = Sequence.start_link(first: 5, step: 1)
assert :ok = Sequence.queue_front(pid, 3..4)
assert Sequence.pop(pid) == 4..4
assert Sequence.pop(pid) == 3..3
# infinite sequence takes over
assert Sequence.pop(pid) == 5..5
assert Sequence.pop(pid) == 6..6
end
end
describe "cap/1" do
test "returns previous mode" do
{:ok, pid} = Sequence.start_link(first: 5, step: 1)

@ -1,12 +1,12 @@
defmodule Indexer.TokenTransfersTest do
defmodule Indexer.TokenTransfer.ParserTest do
use ExUnit.Case
import ExUnit.CaptureLog
alias Indexer.TokenTransfers
alias Indexer.TokenTransfer.Parser
describe "from_log_params/2" do
test "from_log_params/2 parses logs for tokens and token transfers" do
describe "parse/1" do
test "parse/1 parses logs for tokens and token transfers" do
[log_1, _log_2, log_3] =
logs = [
%{
@ -82,7 +82,7 @@ defmodule Indexer.TokenTransfersTest do
]
}
assert TokenTransfers.from_log_params(logs) == expected
assert Parser.parse(logs) == expected
end
test "parses ERC-721 transfer with addresses in data field" do
@ -121,7 +121,7 @@ defmodule Indexer.TokenTransfersTest do
]
}
assert TokenTransfers.from_log_params([log]) == expected
assert Parser.parse([log]) == expected
end
test "logs error with unrecognized token transfer format" do
@ -138,7 +138,7 @@ defmodule Indexer.TokenTransfersTest do
type: "mined"
}
error = capture_log(fn -> %{tokens: [], token_transfers: []} = TokenTransfers.from_log_params([log]) end)
error = capture_log(fn -> %{tokens: [], token_transfers: []} = Parser.parse([log]) end)
assert error =~ ~r"unknown token transfer"i
end
end

@ -0,0 +1,76 @@
defmodule Indexer.TokenTransfer.Uncataloged.WorkerTest do
use Explorer.DataCase
alias Indexer.TokenTransfer.Uncataloged.{Worker, TaskSupervisor}
describe "start_link/1" do
test "starts the worker" do
assert {:ok, _pid} = Worker.start_link(supervisor: self())
end
end
describe "init/1" do
test "sends message to self" do
pid = self()
assert {:ok, %{task_ref: nil, block_numbers: [], sup_pid: ^pid}} = Worker.init(supervisor: self())
assert_received :scan
end
end
describe "handle_info with :scan" do
test "sends shutdown to supervisor" do
state = %{task_ref: nil, block_numbers: [], sup_pid: self()}
Task.async(fn -> Worker.handle_info(:scan, state) end)
assert_receive {_, _, {:terminate, :normal}}
end
test "sends message to self when uncataloged token transfers are found" do
log = insert(:token_transfer_log)
block_number = log.transaction.block_number
expected_state = %{task_ref: nil, block_numbers: [block_number], retry_interval: 1}
state = %{task_ref: nil, block_numbers: [], retry_interval: 1}
assert {:noreply, ^expected_state} = Worker.handle_info(:scan, state)
assert_receive :enqueue_blocks
end
end
describe "handle_info with :enqueue_blocks" do
test "starts a task" do
task_sup_pid = start_supervised!({Task.Supervisor, name: TaskSupervisor})
state = %{task_ref: nil, block_numbers: [1]}
assert {:noreply, new_state} = Worker.handle_info(:enqueue_blocks, state)
assert is_reference(new_state.task_ref)
stop_supervised(task_sup_pid)
end
end
describe "handle_info with task ref tuple" do
test "sends shutdown to supervisor on success" do
ref = Process.monitor(self())
state = %{task_ref: ref, block_numbers: [], sup_pid: self()}
Task.async(fn -> assert Worker.handle_info({ref, :ok}, state) end)
assert_receive {_, _, {:terminate, :normal}}
end
test "sends message to self to try again on failure" do
ref = Process.monitor(self())
state = %{task_ref: ref, block_numbers: [1], sup_pid: self(), retry_interval: 1}
expected_state = %{state | task_ref: nil}
assert {:noreply, ^expected_state} = Worker.handle_info({ref, {:error, :queue_unavailable}}, state)
assert_receive :enqueue_blocks
end
end
describe "handle_info with failed task" do
test "sends message to self to try again" do
ref = Process.monitor(self())
state = %{task_ref: ref, block_numbers: [1], sup_pid: self(), retry_interval: 1}
assert {:noreply, %{task_ref: nil}} = Worker.handle_info({:DOWN, ref, :process, self(), :EXIT}, state)
assert_receive :enqueue_blocks
end
end
end
Loading…
Cancel
Save