parent
0673e7fd07
commit
b9318e09d7
@ -0,0 +1,83 @@ |
||||
defmodule Explorer.Chain.Token.InstanceTest do |
||||
use Explorer.DataCase |
||||
|
||||
alias Explorer.Repo |
||||
alias Explorer.Chain.Token.Instance |
||||
|
||||
describe "stream_not_inserted_token_instances/2" do |
||||
test "reduces with given reducer and accumulator for ERC-721 token" do |
||||
token_contract_address = insert(:contract_address) |
||||
token = insert(:token, contract_address: token_contract_address, type: "ERC-721") |
||||
|
||||
transaction = |
||||
:transaction |
||||
|> insert() |
||||
|> with_block(insert(:block, number: 1)) |
||||
|
||||
token_transfer = |
||||
insert( |
||||
:token_transfer, |
||||
block_number: 1000, |
||||
to_address: build(:address), |
||||
transaction: transaction, |
||||
token_contract_address: token_contract_address, |
||||
token: token, |
||||
token_ids: [11] |
||||
) |
||||
|
||||
assert [result] = 5 |> Instance.not_inserted_token_instances_query() |> Repo.all() |
||||
assert result.token_id == List.first(token_transfer.token_ids) |
||||
assert result.contract_address_hash == token_transfer.token_contract_address_hash |
||||
end |
||||
|
||||
test "does not fetch token transfers without token_ids" do |
||||
token_contract_address = insert(:contract_address) |
||||
token = insert(:token, contract_address: token_contract_address, type: "ERC-721") |
||||
|
||||
transaction = |
||||
:transaction |
||||
|> insert() |
||||
|> with_block(insert(:block, number: 1)) |
||||
|
||||
insert( |
||||
:token_transfer, |
||||
block_number: 1000, |
||||
to_address: build(:address), |
||||
transaction: transaction, |
||||
token_contract_address: token_contract_address, |
||||
token: token, |
||||
token_ids: nil |
||||
) |
||||
|
||||
assert [] = 5 |> Instance.not_inserted_token_instances_query() |> Repo.all() |
||||
end |
||||
|
||||
test "do not fetch records with token instances" do |
||||
token_contract_address = insert(:contract_address) |
||||
token = insert(:token, contract_address: token_contract_address, type: "ERC-721") |
||||
|
||||
transaction = |
||||
:transaction |
||||
|> insert() |
||||
|> with_block(insert(:block, number: 1)) |
||||
|
||||
token_transfer = |
||||
insert( |
||||
:token_transfer, |
||||
block_number: 1000, |
||||
to_address: build(:address), |
||||
transaction: transaction, |
||||
token_contract_address: token_contract_address, |
||||
token: token, |
||||
token_ids: [11] |
||||
) |
||||
|
||||
insert(:token_instance, |
||||
token_id: List.first(token_transfer.token_ids), |
||||
token_contract_address_hash: token_transfer.token_contract_address_hash |
||||
) |
||||
|
||||
assert [] = 5 |> Instance.not_inserted_token_instances_query() |> Repo.all() |
||||
end |
||||
end |
||||
end |
@ -1,58 +1,50 @@ |
||||
defmodule Indexer.Fetcher.TokenInstance.LegacySanitize do |
||||
@moduledoc """ |
||||
This fetcher is stands for creating token instances which wasn't inserted yet and index meta for them. Legacy is because now we token instances inserted on block import and this fetcher is only for historical and unfetched for some reasons data |
||||
This fetcher is stands for creating token instances which wasn't inserted yet and index meta for them. |
||||
Legacy is because now we token instances inserted on block import and this fetcher is only for historical and unfetched for some reasons data |
||||
""" |
||||
|
||||
use Indexer.Fetcher, restart: :permanent |
||||
use Spandex.Decorators |
||||
use GenServer, restart: :transient |
||||
|
||||
import Indexer.Fetcher.TokenInstance.Helper |
||||
|
||||
alias Explorer.Chain |
||||
alias Indexer.BufferedTask |
||||
|
||||
@behaviour BufferedTask |
||||
alias Explorer.Chain.Token.Instance |
||||
alias Explorer.Repo |
||||
|
||||
@default_max_batch_size 10 |
||||
@default_max_concurrency 10 |
||||
@doc false |
||||
def child_spec([init_options, gen_server_options]) do |
||||
merged_init_opts = |
||||
defaults() |
||||
|> Keyword.merge(init_options) |
||||
|> Keyword.merge(state: []) |
||||
import Indexer.Fetcher.TokenInstance.Helper |
||||
|
||||
Supervisor.child_spec({BufferedTask, [{__MODULE__, merged_init_opts}, gen_server_options]}, id: __MODULE__) |
||||
def start_link(_) do |
||||
concurrency = Application.get_env(:indexer, __MODULE__)[:concurrency] |
||||
batch_size = Application.get_env(:indexer, __MODULE__)[:batch_size] |
||||
GenServer.start_link(__MODULE__, %{concurrency: concurrency, batch_size: batch_size}, name: __MODULE__) |
||||
end |
||||
|
||||
@impl BufferedTask |
||||
def init(initial_acc, reducer, _) do |
||||
{:ok, acc} = |
||||
Chain.stream_not_inserted_token_instances(initial_acc, fn data, acc -> |
||||
reducer.(data, acc) |
||||
end) |
||||
@impl true |
||||
def init(opts) do |
||||
GenServer.cast(__MODULE__, :backfill) |
||||
|
||||
acc |
||||
{:ok, opts} |
||||
end |
||||
|
||||
@impl BufferedTask |
||||
def run(token_instances, _) when is_list(token_instances) do |
||||
token_instances |
||||
|> Enum.filter(fn %{contract_address_hash: hash, token_id: token_id} -> |
||||
not Chain.token_instance_exists?(token_id, hash) |
||||
end) |
||||
|> batch_fetch_instances() |
||||
|
||||
:ok |
||||
@impl true |
||||
def handle_cast(:backfill, %{concurrency: concurrency, batch_size: batch_size} = state) do |
||||
instances_to_fetch = |
||||
(concurrency * batch_size) |
||||
|> Instance.not_inserted_token_instances_query() |
||||
|> Repo.all() |
||||
|
||||
if Enum.empty?(instances_to_fetch) do |
||||
{:stop, :normal, state} |
||||
else |
||||
instances_to_fetch |
||||
|> Enum.uniq() |
||||
|> Enum.chunk_every(batch_size) |
||||
|> Enum.map(&process_batch/1) |
||||
|> Task.await_many(:infinity) |
||||
|
||||
GenServer.cast(__MODULE__, :backfill) |
||||
|
||||
{:noreply, state} |
||||
end |
||||
end |
||||
|
||||
defp defaults do |
||||
[ |
||||
flush_interval: :infinity, |
||||
max_concurrency: Application.get_env(:indexer, __MODULE__)[:concurrency] || @default_max_concurrency, |
||||
max_batch_size: Application.get_env(:indexer, __MODULE__)[:batch_size] || @default_max_batch_size, |
||||
poll: false, |
||||
task_supervisor: __MODULE__.TaskSupervisor |
||||
] |
||||
end |
||||
defp process_batch(batch), do: Task.async(fn -> batch_fetch_instances(batch) end) |
||||
end |
||||
|
Loading…
Reference in new issue