Add owner_address_hash and other fields to token_instances; Move toke… (#8386)
* Add owner_address_hash and other fields to token_instances; Move token instances' insert into synchronous block import stage * Add TokenInstanceOwnerAddressMigration process * Add TokenInstanceOwnerAddressMigration.Helper tests * Add envs to Makefile and .env file * Process review comments * Process review commentspull/8552/head
parent
2e8d9060c5
commit
daed959d8f
@ -0,0 +1,106 @@ |
|||||||
|
defmodule Explorer.Chain.Import.Runner.TokenInstances do |
||||||
|
@moduledoc """ |
||||||
|
Bulk imports `t:Explorer.Chain.TokenInstances.t/0`. |
||||||
|
""" |
||||||
|
|
||||||
|
require Ecto.Query |
||||||
|
|
||||||
|
alias Ecto.{Changeset, Multi, Repo} |
||||||
|
alias Explorer.Chain.Import |
||||||
|
alias Explorer.Chain.Token.Instance, as: TokenInstance |
||||||
|
alias Explorer.Prometheus.Instrumenter |
||||||
|
|
||||||
|
import Ecto.Query, only: [from: 2] |
||||||
|
|
||||||
|
@behaviour Import.Runner |
||||||
|
|
||||||
|
# milliseconds |
||||||
|
@timeout 60_000 |
||||||
|
|
||||||
|
@type imported :: [TokenInstance.t()] |
||||||
|
|
||||||
|
@impl Import.Runner |
||||||
|
def ecto_schema_module, do: TokenInstance |
||||||
|
|
||||||
|
@impl Import.Runner |
||||||
|
def option_key, do: :token_instances |
||||||
|
|
||||||
|
@impl Import.Runner |
||||||
|
def imported_table_row do |
||||||
|
%{ |
||||||
|
value_type: "[#{ecto_schema_module()}.t()]", |
||||||
|
value_description: "List of `t:#{ecto_schema_module()}.t/0`s" |
||||||
|
} |
||||||
|
end |
||||||
|
|
||||||
|
@impl Import.Runner |
||||||
|
def run(multi, changes_list, %{timestamps: timestamps} = options) do |
||||||
|
insert_options = |
||||||
|
options |
||||||
|
|> Map.get(option_key(), %{}) |
||||||
|
|> Map.take(~w(on_conflict timeout)a) |
||||||
|
|> Map.put_new(:timeout, @timeout) |
||||||
|
|> Map.put(:timestamps, timestamps) |
||||||
|
|
||||||
|
Multi.run(multi, :token_instances, fn repo, _ -> |
||||||
|
Instrumenter.block_import_stage_runner( |
||||||
|
fn -> insert(repo, changes_list, insert_options) end, |
||||||
|
:block_referencing, |
||||||
|
:token_instances, |
||||||
|
:token_instances |
||||||
|
) |
||||||
|
end) |
||||||
|
end |
||||||
|
|
||||||
|
@impl Import.Runner |
||||||
|
def timeout, do: @timeout |
||||||
|
|
||||||
|
@spec insert(Repo.t(), [map()], %{ |
||||||
|
optional(:on_conflict) => Import.Runner.on_conflict(), |
||||||
|
required(:timeout) => timeout, |
||||||
|
required(:timestamps) => Import.timestamps() |
||||||
|
}) :: |
||||||
|
{:ok, [TokenInstance.t()]} |
||||||
|
| {:error, [Changeset.t()]} |
||||||
|
def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do |
||||||
|
on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) |
||||||
|
|
||||||
|
# Guarantee the same import order to avoid deadlocks |
||||||
|
ordered_changes_list = Enum.sort_by(changes_list, &{&1.token_contract_address_hash, &1.token_id}) |
||||||
|
|
||||||
|
{:ok, _} = |
||||||
|
Import.insert_changes_list( |
||||||
|
repo, |
||||||
|
ordered_changes_list, |
||||||
|
conflict_target: [:token_contract_address_hash, :token_id], |
||||||
|
on_conflict: on_conflict, |
||||||
|
for: TokenInstance, |
||||||
|
returning: true, |
||||||
|
timeout: timeout, |
||||||
|
timestamps: timestamps |
||||||
|
) |
||||||
|
end |
||||||
|
|
||||||
|
defp default_on_conflict do |
||||||
|
from( |
||||||
|
token_instance in TokenInstance, |
||||||
|
update: [ |
||||||
|
set: [ |
||||||
|
metadata: token_instance.metadata, |
||||||
|
error: token_instance.error, |
||||||
|
owner_updated_at_block: fragment("EXCLUDED.owner_updated_at_block"), |
||||||
|
owner_updated_at_log_index: fragment("EXCLUDED.owner_updated_at_log_index"), |
||||||
|
owner_address_hash: fragment("EXCLUDED.owner_address_hash"), |
||||||
|
inserted_at: fragment("LEAST(?, EXCLUDED.inserted_at)", token_instance.inserted_at), |
||||||
|
updated_at: fragment("GREATEST(?, EXCLUDED.updated_at)", token_instance.updated_at) |
||||||
|
] |
||||||
|
], |
||||||
|
where: |
||||||
|
fragment("EXCLUDED.owner_address_hash IS NOT NULL") and fragment("EXCLUDED.owner_updated_at_block IS NOT NULL") and |
||||||
|
(fragment("EXCLUDED.owner_updated_at_block > ?", token_instance.owner_updated_at_block) or |
||||||
|
(fragment("EXCLUDED.owner_updated_at_block = ?", token_instance.owner_updated_at_block) and |
||||||
|
fragment("EXCLUDED.owner_updated_at_log_index >= ?", token_instance.owner_updated_at_log_index)) or |
||||||
|
is_nil(token_instance.owner_updated_at_block) or is_nil(token_instance.owner_address_hash)) |
||||||
|
) |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,77 @@ |
|||||||
|
defmodule Explorer.TokenInstanceOwnerAddressMigration.Helper do |
||||||
|
@moduledoc """ |
||||||
|
Auxiliary functions for TokenInstanceOwnerAddressMigration.{Worker and Supervisor} |
||||||
|
""" |
||||||
|
import Ecto.Query, |
||||||
|
only: [ |
||||||
|
from: 2 |
||||||
|
] |
||||||
|
|
||||||
|
alias Explorer.{Chain, Repo} |
||||||
|
alias Explorer.Chain.Token.Instance |
||||||
|
alias Explorer.Chain.{SmartContract, TokenTransfer} |
||||||
|
|
||||||
|
{:ok, burn_address_hash} = Chain.string_to_address_hash(SmartContract.burn_address_hash_string()) |
||||||
|
@burn_address_hash burn_address_hash |
||||||
|
|
||||||
|
@spec filtered_token_instances_query(non_neg_integer()) :: Ecto.Query.t() |
||||||
|
def filtered_token_instances_query(limit) do |
||||||
|
from(instance in Instance, |
||||||
|
where: is_nil(instance.owner_address_hash), |
||||||
|
inner_join: token in assoc(instance, :token), |
||||||
|
where: token.type == "ERC-721", |
||||||
|
limit: ^limit, |
||||||
|
select: %{token_id: instance.token_id, token_contract_address_hash: instance.token_contract_address_hash} |
||||||
|
) |
||||||
|
end |
||||||
|
|
||||||
|
@spec fetch_and_insert([map]) :: |
||||||
|
{:error, :timeout | [map]} |
||||||
|
| {:ok, |
||||||
|
%{ |
||||||
|
:token_instances => [Instance.t()] |
||||||
|
}} |
||||||
|
| {:error, any, any, map} |
||||||
|
def fetch_and_insert(batch) do |
||||||
|
changes = |
||||||
|
Enum.map(batch, fn %{token_id: token_id, token_contract_address_hash: token_contract_address_hash} -> |
||||||
|
token_transfer_query = |
||||||
|
from(tt in TokenTransfer.only_consensus_transfers_query(), |
||||||
|
where: |
||||||
|
tt.token_contract_address_hash == ^token_contract_address_hash and |
||||||
|
fragment("? @> ARRAY[?::decimal]", tt.token_ids, ^token_id), |
||||||
|
order_by: [desc: tt.block_number, desc: tt.log_index], |
||||||
|
limit: 1, |
||||||
|
select: %{ |
||||||
|
token_contract_address_hash: tt.token_contract_address_hash, |
||||||
|
token_ids: tt.token_ids, |
||||||
|
to_address_hash: tt.to_address_hash, |
||||||
|
block_number: tt.block_number, |
||||||
|
log_index: tt.log_index |
||||||
|
} |
||||||
|
) |
||||||
|
|
||||||
|
token_transfer = |
||||||
|
Repo.one(token_transfer_query) || |
||||||
|
%{to_address_hash: @burn_address_hash, block_number: -1, log_index: -1} |
||||||
|
|
||||||
|
%{ |
||||||
|
token_contract_address_hash: token_contract_address_hash, |
||||||
|
token_id: token_id, |
||||||
|
token_type: "ERC-721", |
||||||
|
owner_address_hash: token_transfer.to_address_hash, |
||||||
|
owner_updated_at_block: token_transfer.block_number, |
||||||
|
owner_updated_at_log_index: token_transfer.log_index |
||||||
|
} |
||||||
|
end) |
||||||
|
|
||||||
|
Chain.import(%{token_instances: %{params: changes}}) |
||||||
|
end |
||||||
|
|
||||||
|
@spec unfilled_token_instances_exists? :: boolean |
||||||
|
def unfilled_token_instances_exists? do |
||||||
|
1 |
||||||
|
|> filtered_token_instances_query() |
||||||
|
|> Repo.exists?() |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,26 @@ |
|||||||
|
defmodule Explorer.TokenInstanceOwnerAddressMigration.Supervisor do |
||||||
|
@moduledoc """ |
||||||
|
Supervisor for Explorer.TokenInstanceOwnerAddressMigration.Worker |
||||||
|
""" |
||||||
|
|
||||||
|
use Supervisor |
||||||
|
|
||||||
|
alias Explorer.TokenInstanceOwnerAddressMigration.{Helper, Worker} |
||||||
|
|
||||||
|
def start_link(init_arg) do |
||||||
|
Supervisor.start_link(__MODULE__, init_arg, name: __MODULE__) |
||||||
|
end |
||||||
|
|
||||||
|
@impl true |
||||||
|
def init(_init_arg) do |
||||||
|
if Helper.unfilled_token_instances_exists?() do |
||||||
|
children = [ |
||||||
|
{Worker, Application.get_env(:explorer, Explorer.TokenInstanceOwnerAddressMigration)} |
||||||
|
] |
||||||
|
|
||||||
|
Supervisor.init(children, strategy: :one_for_one) |
||||||
|
else |
||||||
|
:ignore |
||||||
|
end |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,51 @@ |
|||||||
|
defmodule Explorer.TokenInstanceOwnerAddressMigration.Worker do |
||||||
|
@moduledoc """ |
||||||
|
GenServer for filling owner_address_hash, owner_updated_at_block and owner_updated_at_log_index |
||||||
|
for ERC-721 token instances. Works in the following way |
||||||
|
1. Checks if there are some unprocessed nfts. |
||||||
|
- if yes, then go to 2 stage |
||||||
|
- if no, then shutdown |
||||||
|
2. Fetch `(concurrency * batch_size)` token instances, process them in `concurrency` tasks. |
||||||
|
3. Go to step 1 |
||||||
|
""" |
||||||
|
|
||||||
|
use GenServer, restart: :transient |
||||||
|
|
||||||
|
alias Explorer.Repo |
||||||
|
alias Explorer.TokenInstanceOwnerAddressMigration.Helper |
||||||
|
|
||||||
|
def start_link(concurrency: concurrency, batch_size: batch_size) do |
||||||
|
GenServer.start_link(__MODULE__, %{concurrency: concurrency, batch_size: batch_size}, name: __MODULE__) |
||||||
|
end |
||||||
|
|
||||||
|
@impl true |
||||||
|
def init(opts) do |
||||||
|
GenServer.cast(__MODULE__, :check_necessity) |
||||||
|
|
||||||
|
{:ok, opts} |
||||||
|
end |
||||||
|
|
||||||
|
@impl true |
||||||
|
def handle_cast(:check_necessity, state) do |
||||||
|
if Helper.unfilled_token_instances_exists?() do |
||||||
|
GenServer.cast(__MODULE__, :backfill) |
||||||
|
{:noreply, state} |
||||||
|
else |
||||||
|
{:stop, :normal, state} |
||||||
|
end |
||||||
|
end |
||||||
|
|
||||||
|
@impl true |
||||||
|
def handle_cast(:backfill, %{concurrency: concurrency, batch_size: batch_size} = state) do |
||||||
|
(concurrency * batch_size) |
||||||
|
|> Helper.filtered_token_instances_query() |
||||||
|
|> Repo.all() |
||||||
|
|> Enum.chunk_every(batch_size) |
||||||
|
|> Enum.map(fn batch -> Task.async(fn -> Helper.fetch_and_insert(batch) end) end) |
||||||
|
|> Task.await_many(:infinity) |
||||||
|
|
||||||
|
GenServer.cast(__MODULE__, :check_necessity) |
||||||
|
|
||||||
|
{:noreply, state} |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,13 @@ |
|||||||
|
defmodule Explorer.Repo.Migrations.AddTokenIdsToAddressTokenBalances do |
||||||
|
use Ecto.Migration |
||||||
|
|
||||||
|
def change do |
||||||
|
alter table(:token_instances) do |
||||||
|
add(:owner_address_hash, :bytea, null: true) |
||||||
|
add(:owner_updated_at_block, :bigint, null: true) |
||||||
|
add(:owner_updated_at_log_index, :integer, null: true) |
||||||
|
end |
||||||
|
|
||||||
|
create(index(:token_instances, [:owner_address_hash])) |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,121 @@ |
|||||||
|
defmodule Explorer.TokenInstanceOwnerAddressMigration.HelperTest do |
||||||
|
use Explorer.DataCase |
||||||
|
|
||||||
|
alias Explorer.{Chain, Repo} |
||||||
|
alias Explorer.Chain.Token.Instance |
||||||
|
alias Explorer.TokenInstanceOwnerAddressMigration.Helper |
||||||
|
|
||||||
|
{:ok, burn_address_hash} = Chain.string_to_address_hash("0x0000000000000000000000000000000000000000") |
||||||
|
@burn_address_hash burn_address_hash |
||||||
|
|
||||||
|
describe "fetch_and_insert/2" do |
||||||
|
test "successfully update owner of single token instance" do |
||||||
|
token_address = insert(:contract_address) |
||||||
|
insert(:token, contract_address: token_address, type: "ERC-721") |
||||||
|
|
||||||
|
instance = insert(:token_instance, token_contract_address_hash: token_address.hash) |
||||||
|
|
||||||
|
transaction = |
||||||
|
:transaction |
||||||
|
|> insert() |
||||||
|
|> with_block() |
||||||
|
|
||||||
|
tt_1 = |
||||||
|
insert(:token_transfer, |
||||||
|
token_ids: [instance.token_id], |
||||||
|
transaction: transaction, |
||||||
|
token_contract_address: token_address |
||||||
|
) |
||||||
|
|
||||||
|
Helper.fetch_and_insert([ |
||||||
|
%{token_id: instance.token_id, token_contract_address_hash: instance.token_contract_address_hash} |
||||||
|
]) |
||||||
|
|
||||||
|
owner_address = tt_1.to_address_hash |
||||||
|
block_number = tt_1.block_number |
||||||
|
log_index = tt_1.log_index |
||||||
|
|
||||||
|
assert %Instance{ |
||||||
|
owner_address_hash: ^owner_address, |
||||||
|
owner_updated_at_block: ^block_number, |
||||||
|
owner_updated_at_log_index: ^log_index |
||||||
|
} = |
||||||
|
Repo.get_by(Instance, |
||||||
|
token_id: instance.token_id, |
||||||
|
token_contract_address_hash: instance.token_contract_address_hash |
||||||
|
) |
||||||
|
end |
||||||
|
|
||||||
|
test "put placeholder value if tt absent in db" do |
||||||
|
instance = insert(:token_instance) |
||||||
|
|
||||||
|
Helper.fetch_and_insert([ |
||||||
|
%{token_id: instance.token_id, token_contract_address_hash: instance.token_contract_address_hash} |
||||||
|
]) |
||||||
|
|
||||||
|
assert %Instance{ |
||||||
|
owner_address_hash: @burn_address_hash, |
||||||
|
owner_updated_at_block: -1, |
||||||
|
owner_updated_at_log_index: -1 |
||||||
|
} = |
||||||
|
Repo.get_by(Instance, |
||||||
|
token_id: instance.token_id, |
||||||
|
token_contract_address_hash: instance.token_contract_address_hash |
||||||
|
) |
||||||
|
end |
||||||
|
|
||||||
|
test "update owners of token instances batch" do |
||||||
|
instances = |
||||||
|
for _ <- 0..5 do |
||||||
|
token_address = insert(:contract_address) |
||||||
|
insert(:token, contract_address: token_address, type: "ERC-721") |
||||||
|
|
||||||
|
instance = insert(:token_instance, token_contract_address_hash: token_address.hash) |
||||||
|
|
||||||
|
tt = |
||||||
|
for _ <- 0..5 do |
||||||
|
transaction = |
||||||
|
:transaction |
||||||
|
|> insert() |
||||||
|
|> with_block() |
||||||
|
|
||||||
|
for _ <- 0..5 do |
||||||
|
insert(:token_transfer, |
||||||
|
token_ids: [instance.token_id], |
||||||
|
transaction: transaction, |
||||||
|
token_contract_address: token_address |
||||||
|
) |
||||||
|
end |
||||||
|
end |
||||||
|
|> Enum.concat() |
||||||
|
|> Enum.max_by(fn tt -> {tt.block_number, tt.log_index} end) |
||||||
|
|
||||||
|
%{ |
||||||
|
token_id: instance.token_id, |
||||||
|
token_contract_address_hash: instance.token_contract_address_hash, |
||||||
|
owner_address_hash: tt.to_address_hash, |
||||||
|
owner_updated_at_block: tt.block_number, |
||||||
|
owner_updated_at_log_index: tt.log_index |
||||||
|
} |
||||||
|
end |
||||||
|
|
||||||
|
Helper.fetch_and_insert(instances) |
||||||
|
|
||||||
|
for ti <- instances do |
||||||
|
owner_address = ti.owner_address_hash |
||||||
|
block_number = ti.owner_updated_at_block |
||||||
|
log_index = ti.owner_updated_at_log_index |
||||||
|
|
||||||
|
assert %Instance{ |
||||||
|
owner_address_hash: ^owner_address, |
||||||
|
owner_updated_at_block: ^block_number, |
||||||
|
owner_updated_at_log_index: ^log_index |
||||||
|
} = |
||||||
|
Repo.get_by(Instance, |
||||||
|
token_id: ti.token_id, |
||||||
|
token_contract_address_hash: ti.token_contract_address_hash |
||||||
|
) |
||||||
|
end |
||||||
|
end |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,58 @@ |
|||||||
|
defmodule Indexer.Fetcher.TokenInstance.LegacySanitize do |
||||||
|
@moduledoc """ |
||||||
|
This fetcher is stands for creating token instances which wasn't inserted yet and index meta for them. Legacy is because now we token instances inserted on block import and this fetcher is only for historical and unfetched for some reasons data |
||||||
|
""" |
||||||
|
|
||||||
|
use Indexer.Fetcher, restart: :permanent |
||||||
|
use Spandex.Decorators |
||||||
|
|
||||||
|
import Indexer.Fetcher.TokenInstance.Helper |
||||||
|
|
||||||
|
alias Explorer.Chain |
||||||
|
alias Indexer.BufferedTask |
||||||
|
|
||||||
|
@behaviour BufferedTask |
||||||
|
|
||||||
|
@default_max_batch_size 10 |
||||||
|
@default_max_concurrency 10 |
||||||
|
@doc false |
||||||
|
def child_spec([init_options, gen_server_options]) do |
||||||
|
merged_init_opts = |
||||||
|
defaults() |
||||||
|
|> Keyword.merge(init_options) |
||||||
|
|> Keyword.merge(state: []) |
||||||
|
|
||||||
|
Supervisor.child_spec({BufferedTask, [{__MODULE__, merged_init_opts}, gen_server_options]}, id: __MODULE__) |
||||||
|
end |
||||||
|
|
||||||
|
@impl BufferedTask |
||||||
|
def init(initial_acc, reducer, _) do |
||||||
|
{:ok, acc} = |
||||||
|
Chain.stream_not_inserted_token_instances(initial_acc, fn data, acc -> |
||||||
|
reducer.(data, acc) |
||||||
|
end) |
||||||
|
|
||||||
|
acc |
||||||
|
end |
||||||
|
|
||||||
|
@impl BufferedTask |
||||||
|
def run(token_instances, _) when is_list(token_instances) do |
||||||
|
token_instances |
||||||
|
|> Enum.filter(fn %{contract_address_hash: hash, token_id: token_id} -> |
||||||
|
not Chain.token_instance_exists?(token_id, hash) |
||||||
|
end) |
||||||
|
|> batch_fetch_instances() |
||||||
|
|
||||||
|
:ok |
||||||
|
end |
||||||
|
|
||||||
|
defp defaults do |
||||||
|
[ |
||||||
|
flush_interval: :infinity, |
||||||
|
max_concurrency: Application.get_env(:indexer, __MODULE__)[:concurrency] || @default_max_concurrency, |
||||||
|
max_batch_size: Application.get_env(:indexer, __MODULE__)[:batch_size] || @default_max_batch_size, |
||||||
|
poll: false, |
||||||
|
task_supervisor: __MODULE__.TaskSupervisor |
||||||
|
] |
||||||
|
end |
||||||
|
end |
@ -0,0 +1,88 @@ |
|||||||
|
defmodule Indexer.Transform.TokenInstances do |
||||||
|
@moduledoc """ |
||||||
|
Module extracts token instances from token transfers |
||||||
|
""" |
||||||
|
|
||||||
|
def params_set(%{} = import_options) do |
||||||
|
Enum.reduce(import_options, %{}, &reducer/2) |
||||||
|
end |
||||||
|
|
||||||
|
defp reducer({:token_transfers_params, token_transfers_params}, initial) when is_list(token_transfers_params) do |
||||||
|
token_transfers_params |
||||||
|
|> Enum.reduce(initial, fn |
||||||
|
%{ |
||||||
|
block_number: block_number, |
||||||
|
from_address_hash: from_address_hash, |
||||||
|
to_address_hash: to_address_hash, |
||||||
|
token_contract_address_hash: token_contract_address_hash, |
||||||
|
token_ids: [_ | _] |
||||||
|
} = tt, |
||||||
|
acc |
||||||
|
when is_integer(block_number) and |
||||||
|
is_binary(from_address_hash) and |
||||||
|
is_binary(to_address_hash) and is_binary(token_contract_address_hash) -> |
||||||
|
transfer_to_instances(tt, acc) |
||||||
|
|
||||||
|
_, acc -> |
||||||
|
acc |
||||||
|
end) |
||||||
|
|> Map.values() |
||||||
|
end |
||||||
|
|
||||||
|
defp transfer_to_instances( |
||||||
|
%{ |
||||||
|
token_type: "ERC-721" = token_type, |
||||||
|
to_address_hash: to_address_hash, |
||||||
|
token_ids: [token_id], |
||||||
|
token_contract_address_hash: token_contract_address_hash, |
||||||
|
block_number: block_number, |
||||||
|
log_index: log_index |
||||||
|
}, |
||||||
|
acc |
||||||
|
) do |
||||||
|
params = %{ |
||||||
|
token_contract_address_hash: token_contract_address_hash, |
||||||
|
token_id: token_id, |
||||||
|
token_type: token_type, |
||||||
|
owner_address_hash: to_address_hash, |
||||||
|
owner_updated_at_block: block_number, |
||||||
|
owner_updated_at_log_index: log_index |
||||||
|
} |
||||||
|
|
||||||
|
current_key = {token_contract_address_hash, token_id} |
||||||
|
|
||||||
|
Map.put( |
||||||
|
acc, |
||||||
|
current_key, |
||||||
|
Enum.max_by( |
||||||
|
[ |
||||||
|
params, |
||||||
|
acc[current_key] || params |
||||||
|
], |
||||||
|
fn %{ |
||||||
|
owner_updated_at_block: owner_updated_at_block, |
||||||
|
owner_updated_at_log_index: owner_updated_at_log_index |
||||||
|
} -> |
||||||
|
{owner_updated_at_block, owner_updated_at_log_index} |
||||||
|
end |
||||||
|
) |
||||||
|
) |
||||||
|
end |
||||||
|
|
||||||
|
defp transfer_to_instances( |
||||||
|
%{ |
||||||
|
token_type: _token_type, |
||||||
|
token_ids: [_ | _] = token_ids, |
||||||
|
token_contract_address_hash: token_contract_address_hash |
||||||
|
}, |
||||||
|
acc |
||||||
|
) do |
||||||
|
Enum.reduce(token_ids, acc, fn id, sub_acc -> |
||||||
|
Map.put(sub_acc, {token_contract_address_hash, id}, %{ |
||||||
|
token_contract_address_hash: token_contract_address_hash, |
||||||
|
token_id: id, |
||||||
|
token_type: "ERC-1155" |
||||||
|
}) |
||||||
|
end) |
||||||
|
end |
||||||
|
end |
Loading…
Reference in new issue